repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.print_series_info
def print_series_info(self, series_info, minimal_series_number=1): """ Print series_info from dcmdirstats """ strinfo = '' if len(series_info) > minimal_series_number: for serie_number in series_info.keys(): strl = get_one_serie_info(series_info, serie_number) strinfo = strinfo + strl + '\n' # rint strl return strinfo
python
def print_series_info(self, series_info, minimal_series_number=1): """ Print series_info from dcmdirstats """ strinfo = '' if len(series_info) > minimal_series_number: for serie_number in series_info.keys(): strl = get_one_serie_info(series_info, serie_number) strinfo = strinfo + strl + '\n' # rint strl return strinfo
[ "def", "print_series_info", "(", "self", ",", "series_info", ",", "minimal_series_number", "=", "1", ")", ":", "strinfo", "=", "''", "if", "len", "(", "series_info", ")", ">", "minimal_series_number", ":", "for", "serie_number", "in", "series_info", ".", "keys", "(", ")", ":", "strl", "=", "get_one_serie_info", "(", "series_info", ",", "serie_number", ")", "strinfo", "=", "strinfo", "+", "strl", "+", "'\\n'", "# rint strl", "return", "strinfo" ]
Print series_info from dcmdirstats
[ "Print", "series_info", "from", "dcmdirstats" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L645-L656
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.__prepare_info_from_dicomdir_file
def __prepare_info_from_dicomdir_file(self, writedicomdirfile=True): """ Check if exists dicomdir file and load it or cerate it dcmdir = get_dir(dirpath) dcmdir: list with filenames, SeriesNumber and SliceLocation """ createdcmdir = True dicomdirfile = os.path.join(self.dirpath, self.dicomdir_filename) ftype = 'pickle' # if exist dicomdir file and is in correct version, use it if os.path.exists(dicomdirfile): try: dcmdirplus = misc.obj_from_file(dicomdirfile, ftype) if dcmdirplus['version'] == __version__: createdcmdir = False dcmdir = dcmdirplus['filesinfo'] except Exception: logger.debug('Found dicomdir.pkl with wrong version') createdcmdir = True if createdcmdir or self.force_create_dicomdir: dcmdirplus = self._create_dicomdir_info() dcmdir = dcmdirplus['filesinfo'] if (writedicomdirfile) and len(dcmdir) > 0: # obj_to_file(dcmdirplus, dicomdirfile, ftype) try: misc.obj_to_file(dcmdirplus, dicomdirfile, ftype) except: logger.warning('Cannot write dcmdir file') traceback.print_exc() # bj_to_file(dcmdir, dcmdiryamlpath ) dcmdir = dcmdirplus['filesinfo'] self.dcmdirplus = dcmdirplus self.files_with_info = dcmdir return dcmdir
python
def __prepare_info_from_dicomdir_file(self, writedicomdirfile=True): """ Check if exists dicomdir file and load it or cerate it dcmdir = get_dir(dirpath) dcmdir: list with filenames, SeriesNumber and SliceLocation """ createdcmdir = True dicomdirfile = os.path.join(self.dirpath, self.dicomdir_filename) ftype = 'pickle' # if exist dicomdir file and is in correct version, use it if os.path.exists(dicomdirfile): try: dcmdirplus = misc.obj_from_file(dicomdirfile, ftype) if dcmdirplus['version'] == __version__: createdcmdir = False dcmdir = dcmdirplus['filesinfo'] except Exception: logger.debug('Found dicomdir.pkl with wrong version') createdcmdir = True if createdcmdir or self.force_create_dicomdir: dcmdirplus = self._create_dicomdir_info() dcmdir = dcmdirplus['filesinfo'] if (writedicomdirfile) and len(dcmdir) > 0: # obj_to_file(dcmdirplus, dicomdirfile, ftype) try: misc.obj_to_file(dcmdirplus, dicomdirfile, ftype) except: logger.warning('Cannot write dcmdir file') traceback.print_exc() # bj_to_file(dcmdir, dcmdiryamlpath ) dcmdir = dcmdirplus['filesinfo'] self.dcmdirplus = dcmdirplus self.files_with_info = dcmdir return dcmdir
[ "def", "__prepare_info_from_dicomdir_file", "(", "self", ",", "writedicomdirfile", "=", "True", ")", ":", "createdcmdir", "=", "True", "dicomdirfile", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dirpath", ",", "self", ".", "dicomdir_filename", ")", "ftype", "=", "'pickle'", "# if exist dicomdir file and is in correct version, use it", "if", "os", ".", "path", ".", "exists", "(", "dicomdirfile", ")", ":", "try", ":", "dcmdirplus", "=", "misc", ".", "obj_from_file", "(", "dicomdirfile", ",", "ftype", ")", "if", "dcmdirplus", "[", "'version'", "]", "==", "__version__", ":", "createdcmdir", "=", "False", "dcmdir", "=", "dcmdirplus", "[", "'filesinfo'", "]", "except", "Exception", ":", "logger", ".", "debug", "(", "'Found dicomdir.pkl with wrong version'", ")", "createdcmdir", "=", "True", "if", "createdcmdir", "or", "self", ".", "force_create_dicomdir", ":", "dcmdirplus", "=", "self", ".", "_create_dicomdir_info", "(", ")", "dcmdir", "=", "dcmdirplus", "[", "'filesinfo'", "]", "if", "(", "writedicomdirfile", ")", "and", "len", "(", "dcmdir", ")", ">", "0", ":", "# obj_to_file(dcmdirplus, dicomdirfile, ftype)", "try", ":", "misc", ".", "obj_to_file", "(", "dcmdirplus", ",", "dicomdirfile", ",", "ftype", ")", "except", ":", "logger", ".", "warning", "(", "'Cannot write dcmdir file'", ")", "traceback", ".", "print_exc", "(", ")", "# bj_to_file(dcmdir, dcmdiryamlpath )", "dcmdir", "=", "dcmdirplus", "[", "'filesinfo'", "]", "self", ".", "dcmdirplus", "=", "dcmdirplus", "self", ".", "files_with_info", "=", "dcmdir", "return", "dcmdir" ]
Check if exists dicomdir file and load it or cerate it dcmdir = get_dir(dirpath) dcmdir: list with filenames, SeriesNumber and SliceLocation
[ "Check", "if", "exists", "dicomdir", "file", "and", "load", "it", "or", "cerate", "it" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L679-L718
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.series_in_dir
def series_in_dir(self): """input is dcmdir, not dirpath """ # none_count = 0 countsd = {} # dcmdirseries = [] for line in self.files_with_info: if "SeriesNumber" in line: sn = line['SeriesNumber'] else: sn = None if sn in countsd: countsd[sn] += 1 else: countsd[sn] = 1 bins = list(countsd) counts = list(countsd.values()) # try: # dcmdirseries = [line['SeriesNumber'] for line in self.files_with_info] # except: # return [0], [0] # bins, counts = np.unique(dcmdirseries, return_counts=True) # binslist = bins.tolist() # if None in binslist: # if len(binslist) == 1: # return [0], [0] # else: # logger.warning # kvůli správným intervalům mezi biny je nutno jeden přidat na konce # mxb = np.max(bins) # if mxb is None: # mxb = 1 # else: # mxb = mxb + 1 # # binslist.append(mxb) # counts, binsvyhodit = np.histogram(dcmdirseries, bins=binslist) # return counts.tolist(), bins.tolist() return counts, bins
python
def series_in_dir(self): """input is dcmdir, not dirpath """ # none_count = 0 countsd = {} # dcmdirseries = [] for line in self.files_with_info: if "SeriesNumber" in line: sn = line['SeriesNumber'] else: sn = None if sn in countsd: countsd[sn] += 1 else: countsd[sn] = 1 bins = list(countsd) counts = list(countsd.values()) # try: # dcmdirseries = [line['SeriesNumber'] for line in self.files_with_info] # except: # return [0], [0] # bins, counts = np.unique(dcmdirseries, return_counts=True) # binslist = bins.tolist() # if None in binslist: # if len(binslist) == 1: # return [0], [0] # else: # logger.warning # kvůli správným intervalům mezi biny je nutno jeden přidat na konce # mxb = np.max(bins) # if mxb is None: # mxb = 1 # else: # mxb = mxb + 1 # # binslist.append(mxb) # counts, binsvyhodit = np.histogram(dcmdirseries, bins=binslist) # return counts.tolist(), bins.tolist() return counts, bins
[ "def", "series_in_dir", "(", "self", ")", ":", "# none_count = 0", "countsd", "=", "{", "}", "# dcmdirseries = []", "for", "line", "in", "self", ".", "files_with_info", ":", "if", "\"SeriesNumber\"", "in", "line", ":", "sn", "=", "line", "[", "'SeriesNumber'", "]", "else", ":", "sn", "=", "None", "if", "sn", "in", "countsd", ":", "countsd", "[", "sn", "]", "+=", "1", "else", ":", "countsd", "[", "sn", "]", "=", "1", "bins", "=", "list", "(", "countsd", ")", "counts", "=", "list", "(", "countsd", ".", "values", "(", ")", ")", "# try:", "# dcmdirseries = [line['SeriesNumber'] for line in self.files_with_info]", "# except:", "# return [0], [0]", "# bins, counts = np.unique(dcmdirseries, return_counts=True)", "# binslist = bins.tolist()", "# if None in binslist:", "# if len(binslist) == 1:", "# return [0], [0]", "# else:", "# logger.warning", "# kvůli správným intervalům mezi biny je nutno jeden přidat na konce", "# mxb = np.max(bins)", "# if mxb is None:", "# mxb = 1", "# else:", "# mxb = mxb + 1", "#", "# binslist.append(mxb)", "# counts, binsvyhodit = np.histogram(dcmdirseries, bins=binslist)", "# return counts.tolist(), bins.tolist()", "return", "counts", ",", "bins" ]
input is dcmdir, not dirpath
[ "input", "is", "dcmdir", "not", "dirpath" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L720-L765
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.get_sorted_series_files
def get_sorted_series_files(self, startpath="", series_number=None, return_files_with_info=False, sort_keys="SliceLocation", return_files=True, remove_doubled_slice_locations=True): """ Function returns sorted list of dicom files. File paths are organized by SeriesUID, StudyUID and FrameUID :param startpath: path prefix. E.g. "~/data" :param series_number: ID of series used for filtering the data :param return_files_with_info: return more complex information about sorted files :param return_files: return simple list of sorted files :type sort_keys: One key or list of keys used for sorting method by the order of keys. """ dcmdir = self.files_with_info[:] # select sublist with SeriesNumber if series_number is not None: dcmdir = [ line for line in dcmdir if line['SeriesNumber'] == series_number ] dcmdir = sort_list_of_dicts(dcmdir, keys=sort_keys) logger.debug('SeriesNumber: ' + str(series_number)) if remove_doubled_slice_locations: dcmdir = self._remove_doubled_slice_locations(dcmdir) filelist = [] for onefile in dcmdir: filelist.append(os.path.join(startpath, self.dirpath, onefile['filename'])) # head, tail = os.path.split(onefile['filename']) retval = [] if return_files: retval.append(filelist) if return_files_with_info: retval.append(dcmdir) if len(retval) == 0: retval = None elif len(retval) == 1: retval = retval[0] else: retval = tuple(retval) return retval
python
def get_sorted_series_files(self, startpath="", series_number=None, return_files_with_info=False, sort_keys="SliceLocation", return_files=True, remove_doubled_slice_locations=True): """ Function returns sorted list of dicom files. File paths are organized by SeriesUID, StudyUID and FrameUID :param startpath: path prefix. E.g. "~/data" :param series_number: ID of series used for filtering the data :param return_files_with_info: return more complex information about sorted files :param return_files: return simple list of sorted files :type sort_keys: One key or list of keys used for sorting method by the order of keys. """ dcmdir = self.files_with_info[:] # select sublist with SeriesNumber if series_number is not None: dcmdir = [ line for line in dcmdir if line['SeriesNumber'] == series_number ] dcmdir = sort_list_of_dicts(dcmdir, keys=sort_keys) logger.debug('SeriesNumber: ' + str(series_number)) if remove_doubled_slice_locations: dcmdir = self._remove_doubled_slice_locations(dcmdir) filelist = [] for onefile in dcmdir: filelist.append(os.path.join(startpath, self.dirpath, onefile['filename'])) # head, tail = os.path.split(onefile['filename']) retval = [] if return_files: retval.append(filelist) if return_files_with_info: retval.append(dcmdir) if len(retval) == 0: retval = None elif len(retval) == 1: retval = retval[0] else: retval = tuple(retval) return retval
[ "def", "get_sorted_series_files", "(", "self", ",", "startpath", "=", "\"\"", ",", "series_number", "=", "None", ",", "return_files_with_info", "=", "False", ",", "sort_keys", "=", "\"SliceLocation\"", ",", "return_files", "=", "True", ",", "remove_doubled_slice_locations", "=", "True", ")", ":", "dcmdir", "=", "self", ".", "files_with_info", "[", ":", "]", "# select sublist with SeriesNumber", "if", "series_number", "is", "not", "None", ":", "dcmdir", "=", "[", "line", "for", "line", "in", "dcmdir", "if", "line", "[", "'SeriesNumber'", "]", "==", "series_number", "]", "dcmdir", "=", "sort_list_of_dicts", "(", "dcmdir", ",", "keys", "=", "sort_keys", ")", "logger", ".", "debug", "(", "'SeriesNumber: '", "+", "str", "(", "series_number", ")", ")", "if", "remove_doubled_slice_locations", ":", "dcmdir", "=", "self", ".", "_remove_doubled_slice_locations", "(", "dcmdir", ")", "filelist", "=", "[", "]", "for", "onefile", "in", "dcmdir", ":", "filelist", ".", "append", "(", "os", ".", "path", ".", "join", "(", "startpath", ",", "self", ".", "dirpath", ",", "onefile", "[", "'filename'", "]", ")", ")", "# head, tail = os.path.split(onefile['filename'])", "retval", "=", "[", "]", "if", "return_files", ":", "retval", ".", "append", "(", "filelist", ")", "if", "return_files_with_info", ":", "retval", ".", "append", "(", "dcmdir", ")", "if", "len", "(", "retval", ")", "==", "0", ":", "retval", "=", "None", "elif", "len", "(", "retval", ")", "==", "1", ":", "retval", "=", "retval", "[", "0", "]", "else", ":", "retval", "=", "tuple", "(", "retval", ")", "return", "retval" ]
Function returns sorted list of dicom files. File paths are organized by SeriesUID, StudyUID and FrameUID :param startpath: path prefix. E.g. "~/data" :param series_number: ID of series used for filtering the data :param return_files_with_info: return more complex information about sorted files :param return_files: return simple list of sorted files :type sort_keys: One key or list of keys used for sorting method by the order of keys.
[ "Function", "returns", "sorted", "list", "of", "dicom", "files", ".", "File", "paths", "are", "organized", "by", "SeriesUID", "StudyUID", "and", "FrameUID" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L767-L815
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory._create_dicomdir_info
def _create_dicomdir_info(self): """ Function crates list of all files in dicom dir with all IDs """ filelist = files_in_dir(self.dirpath) files = [] metadataline = {} for filepath in filelist: head, teil = os.path.split(filepath) dcmdata = None if os.path.isdir(filepath): logger.debug("Subdirectory found in series dir is ignored: " + str(filepath)) continue try: dcmdata = pydicom.read_file(filepath) except pydicom.errors.InvalidDicomError as e: # some files doesnt have DICM marker try: dcmdata = pydicom.read_file(filepath, force=self.force_read) # if e.[0].startswith("File is missing \\'DICM\\' marker. Use force=True to force reading") except Exception as e: if teil != self.dicomdir_filename: # print('Dicom read problem with file ' + filepath) logger.info('Dicom read problem with file ' + filepath) import traceback logger.debug(traceback.format_exc()) if hasattr(dcmdata, "DirectoryRecordSequence"): # file is DICOMDIR - metainfo about files in directory # we are not using this info dcmdata = None if dcmdata is not None: metadataline = _prepare_metadata_line(dcmdata, teil) files.append(metadataline) # if SliceLocation is None, it is sorted to the end # this is not necessary it can be deleted files.sort(key=lambda x: (x['SliceLocation'] is None, x["SliceLocation"])) dcmdirplus = {'version': __version__, 'filesinfo': files, } if "StudyDate" in metadataline: dcmdirplus["StudyDate"] = metadataline["StudyDate"] return dcmdirplus
python
def _create_dicomdir_info(self): """ Function crates list of all files in dicom dir with all IDs """ filelist = files_in_dir(self.dirpath) files = [] metadataline = {} for filepath in filelist: head, teil = os.path.split(filepath) dcmdata = None if os.path.isdir(filepath): logger.debug("Subdirectory found in series dir is ignored: " + str(filepath)) continue try: dcmdata = pydicom.read_file(filepath) except pydicom.errors.InvalidDicomError as e: # some files doesnt have DICM marker try: dcmdata = pydicom.read_file(filepath, force=self.force_read) # if e.[0].startswith("File is missing \\'DICM\\' marker. Use force=True to force reading") except Exception as e: if teil != self.dicomdir_filename: # print('Dicom read problem with file ' + filepath) logger.info('Dicom read problem with file ' + filepath) import traceback logger.debug(traceback.format_exc()) if hasattr(dcmdata, "DirectoryRecordSequence"): # file is DICOMDIR - metainfo about files in directory # we are not using this info dcmdata = None if dcmdata is not None: metadataline = _prepare_metadata_line(dcmdata, teil) files.append(metadataline) # if SliceLocation is None, it is sorted to the end # this is not necessary it can be deleted files.sort(key=lambda x: (x['SliceLocation'] is None, x["SliceLocation"])) dcmdirplus = {'version': __version__, 'filesinfo': files, } if "StudyDate" in metadataline: dcmdirplus["StudyDate"] = metadataline["StudyDate"] return dcmdirplus
[ "def", "_create_dicomdir_info", "(", "self", ")", ":", "filelist", "=", "files_in_dir", "(", "self", ".", "dirpath", ")", "files", "=", "[", "]", "metadataline", "=", "{", "}", "for", "filepath", "in", "filelist", ":", "head", ",", "teil", "=", "os", ".", "path", ".", "split", "(", "filepath", ")", "dcmdata", "=", "None", "if", "os", ".", "path", ".", "isdir", "(", "filepath", ")", ":", "logger", ".", "debug", "(", "\"Subdirectory found in series dir is ignored: \"", "+", "str", "(", "filepath", ")", ")", "continue", "try", ":", "dcmdata", "=", "pydicom", ".", "read_file", "(", "filepath", ")", "except", "pydicom", ".", "errors", ".", "InvalidDicomError", "as", "e", ":", "# some files doesnt have DICM marker", "try", ":", "dcmdata", "=", "pydicom", ".", "read_file", "(", "filepath", ",", "force", "=", "self", ".", "force_read", ")", "# if e.[0].startswith(\"File is missing \\\\'DICM\\\\' marker. Use force=True to force reading\")", "except", "Exception", "as", "e", ":", "if", "teil", "!=", "self", ".", "dicomdir_filename", ":", "# print('Dicom read problem with file ' + filepath)", "logger", ".", "info", "(", "'Dicom read problem with file '", "+", "filepath", ")", "import", "traceback", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "if", "hasattr", "(", "dcmdata", ",", "\"DirectoryRecordSequence\"", ")", ":", "# file is DICOMDIR - metainfo about files in directory", "# we are not using this info", "dcmdata", "=", "None", "if", "dcmdata", "is", "not", "None", ":", "metadataline", "=", "_prepare_metadata_line", "(", "dcmdata", ",", "teil", ")", "files", ".", "append", "(", "metadataline", ")", "# if SliceLocation is None, it is sorted to the end", "# this is not necessary it can be deleted", "files", ".", "sort", "(", "key", "=", "lambda", "x", ":", "(", "x", "[", "'SliceLocation'", "]", "is", "None", ",", "x", "[", "\"SliceLocation\"", "]", ")", ")", "dcmdirplus", "=", "{", "'version'", ":", "__version__", ",", "'filesinfo'", ":", "files", ",", "}", "if", "\"StudyDate\"", "in", "metadataline", ":", "dcmdirplus", "[", "\"StudyDate\"", "]", "=", "metadataline", "[", "\"StudyDate\"", "]", "return", "dcmdirplus" ]
Function crates list of all files in dicom dir with all IDs
[ "Function", "crates", "list", "of", "all", "files", "in", "dicom", "dir", "with", "all", "IDs" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L828-L874
ClericPy/torequests
torequests/parsers.py
get_one
def get_one(seq, default=None, skip_string_iter=True): """ Return one item from seq or None(by default). """ if skip_string_iter and isinstance(seq, (str, unicode, bytes, bytearray)): return seq if not seq: return '' try: return next(iter(seq)) except TypeError: # not hasattr __iter__/__getitem__ return default
python
def get_one(seq, default=None, skip_string_iter=True): """ Return one item from seq or None(by default). """ if skip_string_iter and isinstance(seq, (str, unicode, bytes, bytearray)): return seq if not seq: return '' try: return next(iter(seq)) except TypeError: # not hasattr __iter__/__getitem__ return default
[ "def", "get_one", "(", "seq", ",", "default", "=", "None", ",", "skip_string_iter", "=", "True", ")", ":", "if", "skip_string_iter", "and", "isinstance", "(", "seq", ",", "(", "str", ",", "unicode", ",", "bytes", ",", "bytearray", ")", ")", ":", "return", "seq", "if", "not", "seq", ":", "return", "''", "try", ":", "return", "next", "(", "iter", "(", "seq", ")", ")", "except", "TypeError", ":", "# not hasattr __iter__/__getitem__", "return", "default" ]
Return one item from seq or None(by default).
[ "Return", "one", "item", "from", "seq", "or", "None", "(", "by", "default", ")", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L15-L27
ClericPy/torequests
torequests/parsers.py
SimpleParser.ensure_list
def ensure_list(obj): """ null obj -> return []; str, unicode, bytes, bytearray -> [obj]; else -> list(obj) """ if not obj: return [] elif isinstance(obj, (str, unicode, bytes, bytearray)): return [obj] elif hasattr(obj, '__iter__') or hasattr(obj, '__getitem__'): return list(obj) else: return [obj]
python
def ensure_list(obj): """ null obj -> return []; str, unicode, bytes, bytearray -> [obj]; else -> list(obj) """ if not obj: return [] elif isinstance(obj, (str, unicode, bytes, bytearray)): return [obj] elif hasattr(obj, '__iter__') or hasattr(obj, '__getitem__'): return list(obj) else: return [obj]
[ "def", "ensure_list", "(", "obj", ")", ":", "if", "not", "obj", ":", "return", "[", "]", "elif", "isinstance", "(", "obj", ",", "(", "str", ",", "unicode", ",", "bytes", ",", "bytearray", ")", ")", ":", "return", "[", "obj", "]", "elif", "hasattr", "(", "obj", ",", "'__iter__'", ")", "or", "hasattr", "(", "obj", ",", "'__getitem__'", ")", ":", "return", "list", "(", "obj", ")", "else", ":", "return", "[", "obj", "]" ]
null obj -> return []; str, unicode, bytes, bytearray -> [obj]; else -> list(obj)
[ "null", "obj", "-", ">", "return", "[]", ";" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L167-L182
ClericPy/torequests
torequests/parsers.py
SimpleParser.python_parser
def python_parser(self, obj, *args): """operate a python obj""" attr, args = args[0], args[1:] item = getattr(obj, attr) if callable(item): item = item(*args) return [item]
python
def python_parser(self, obj, *args): """operate a python obj""" attr, args = args[0], args[1:] item = getattr(obj, attr) if callable(item): item = item(*args) return [item]
[ "def", "python_parser", "(", "self", ",", "obj", ",", "*", "args", ")", ":", "attr", ",", "args", "=", "args", "[", "0", "]", ",", "args", "[", "1", ":", "]", "item", "=", "getattr", "(", "obj", ",", "attr", ")", "if", "callable", "(", "item", ")", ":", "item", "=", "item", "(", "*", "args", ")", "return", "[", "item", "]" ]
operate a python obj
[ "operate", "a", "python", "obj" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L194-L200
ClericPy/torequests
torequests/parsers.py
SimpleParser.re_parser
def re_parser(self, scode, *args): """ args: [arg1, arg2] arg[0] = a valid regex pattern arg[1] : if startswith('@') call sub; if startswith('$') call finditer, $0, $1 means group index. return an ensure_list """ def gen_match(matches, num): for match in matches: yield match.group(num) scode = self.ensure_str(scode) assert self._re.match( '^@|^\$\d+', args[1]), ValueError('args1 should match ^@|^\$\d+') arg1, arg2 = args[1][0], args[1][1:] com = self._re.compile(args[0]) if arg1 == '@': result = com.sub(arg2, scode) return self.ensure_list(result) else: result = com.finditer(scode) return gen_match(result, int(arg2))
python
def re_parser(self, scode, *args): """ args: [arg1, arg2] arg[0] = a valid regex pattern arg[1] : if startswith('@') call sub; if startswith('$') call finditer, $0, $1 means group index. return an ensure_list """ def gen_match(matches, num): for match in matches: yield match.group(num) scode = self.ensure_str(scode) assert self._re.match( '^@|^\$\d+', args[1]), ValueError('args1 should match ^@|^\$\d+') arg1, arg2 = args[1][0], args[1][1:] com = self._re.compile(args[0]) if arg1 == '@': result = com.sub(arg2, scode) return self.ensure_list(result) else: result = com.finditer(scode) return gen_match(result, int(arg2))
[ "def", "re_parser", "(", "self", ",", "scode", ",", "*", "args", ")", ":", "def", "gen_match", "(", "matches", ",", "num", ")", ":", "for", "match", "in", "matches", ":", "yield", "match", ".", "group", "(", "num", ")", "scode", "=", "self", ".", "ensure_str", "(", "scode", ")", "assert", "self", ".", "_re", ".", "match", "(", "'^@|^\\$\\d+'", ",", "args", "[", "1", "]", ")", ",", "ValueError", "(", "'args1 should match ^@|^\\$\\d+'", ")", "arg1", ",", "arg2", "=", "args", "[", "1", "]", "[", "0", "]", ",", "args", "[", "1", "]", "[", "1", ":", "]", "com", "=", "self", ".", "_re", ".", "compile", "(", "args", "[", "0", "]", ")", "if", "arg1", "==", "'@'", ":", "result", "=", "com", ".", "sub", "(", "arg2", ",", "scode", ")", "return", "self", ".", "ensure_list", "(", "result", ")", "else", ":", "result", "=", "com", ".", "finditer", "(", "scode", ")", "return", "gen_match", "(", "result", ",", "int", "(", "arg2", ")", ")" ]
args: [arg1, arg2] arg[0] = a valid regex pattern arg[1] : if startswith('@') call sub; if startswith('$') call finditer, $0, $1 means group index. return an ensure_list
[ "args", ":", "[", "arg1", "arg2", "]" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L202-L228
ClericPy/torequests
torequests/parsers.py
SimpleParser.html_parser
def html_parser(self, scode, *args): """ args[0] = cssselector args[1] = text / html / xml / @attribute_name """ allow_method = ('text', 'html', 'xml') css_path, method = args assert method in allow_method or method.startswith( '@'), 'method allow: %s or @attr' % allow_method result = self.ensure_list( self._fromstring(scode, parser=self._html_parser).cssselect(css_path)) if method.startswith('@'): result = [item.get(method[1:]) for item in result] else: result = [ self._tostring( item, method=method, with_tail=0, encoding='unicode') for item in result ] return result
python
def html_parser(self, scode, *args): """ args[0] = cssselector args[1] = text / html / xml / @attribute_name """ allow_method = ('text', 'html', 'xml') css_path, method = args assert method in allow_method or method.startswith( '@'), 'method allow: %s or @attr' % allow_method result = self.ensure_list( self._fromstring(scode, parser=self._html_parser).cssselect(css_path)) if method.startswith('@'): result = [item.get(method[1:]) for item in result] else: result = [ self._tostring( item, method=method, with_tail=0, encoding='unicode') for item in result ] return result
[ "def", "html_parser", "(", "self", ",", "scode", ",", "*", "args", ")", ":", "allow_method", "=", "(", "'text'", ",", "'html'", ",", "'xml'", ")", "css_path", ",", "method", "=", "args", "assert", "method", "in", "allow_method", "or", "method", ".", "startswith", "(", "'@'", ")", ",", "'method allow: %s or @attr'", "%", "allow_method", "result", "=", "self", ".", "ensure_list", "(", "self", ".", "_fromstring", "(", "scode", ",", "parser", "=", "self", ".", "_html_parser", ")", ".", "cssselect", "(", "css_path", ")", ")", "if", "method", ".", "startswith", "(", "'@'", ")", ":", "result", "=", "[", "item", ".", "get", "(", "method", "[", "1", ":", "]", ")", "for", "item", "in", "result", "]", "else", ":", "result", "=", "[", "self", ".", "_tostring", "(", "item", ",", "method", "=", "method", ",", "with_tail", "=", "0", ",", "encoding", "=", "'unicode'", ")", "for", "item", "in", "result", "]", "return", "result" ]
args[0] = cssselector args[1] = text / html / xml / @attribute_name
[ "args", "[", "0", "]", "=", "cssselector" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L230-L251
ClericPy/torequests
torequests/parsers.py
SimpleParser.xml_parser
def xml_parser(self, scode, *args): """ args[0]: xpath args[1]: text / html / xml """ allow_method = ('text', 'html', 'xml') xpath_string, method = args assert method in allow_method, 'method allow: %s' % allow_method result = self.ensure_list( self._fromstring(scode, parser=self._xml_parser).xpath(xpath_string)) result = [ self._tostring( item, method=method, with_tail=0, encoding='unicode') for item in result ] return result
python
def xml_parser(self, scode, *args): """ args[0]: xpath args[1]: text / html / xml """ allow_method = ('text', 'html', 'xml') xpath_string, method = args assert method in allow_method, 'method allow: %s' % allow_method result = self.ensure_list( self._fromstring(scode, parser=self._xml_parser).xpath(xpath_string)) result = [ self._tostring( item, method=method, with_tail=0, encoding='unicode') for item in result ] return result
[ "def", "xml_parser", "(", "self", ",", "scode", ",", "*", "args", ")", ":", "allow_method", "=", "(", "'text'", ",", "'html'", ",", "'xml'", ")", "xpath_string", ",", "method", "=", "args", "assert", "method", "in", "allow_method", ",", "'method allow: %s'", "%", "allow_method", "result", "=", "self", ".", "ensure_list", "(", "self", ".", "_fromstring", "(", "scode", ",", "parser", "=", "self", ".", "_xml_parser", ")", ".", "xpath", "(", "xpath_string", ")", ")", "result", "=", "[", "self", ".", "_tostring", "(", "item", ",", "method", "=", "method", ",", "with_tail", "=", "0", ",", "encoding", "=", "'unicode'", ")", "for", "item", "in", "result", "]", "return", "result" ]
args[0]: xpath args[1]: text / html / xml
[ "args", "[", "0", "]", ":", "xpath" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L253-L270
ClericPy/torequests
torequests/parsers.py
SimpleParser.parse
def parse(self, scode, args_chain=None, join_with=None, default=''): """ single arg: [one_to_many, parser_name, *args] args_chain: [['1-n', 're', 'search', '(<.*?>)', '\\1']] [['1-n', 'html', 'p', 'html'], ['n-n', 'html', 'p', 'text']] """ assert args_chain and isinstance( args_chain, (list, tuple) ) and isinstance(args_chain[0], (list, tuple)), ValueError( 'args_chain type should be list of list, like: [["1-n", "html", "p", "html"], ["n-n", "html", "p", "text"]].' ) for arg in args_chain: # py2 not support * unpack one_to_many, parser_name, parse_args = (arg[0], arg[1], arg[2:]) assert self._re.match( '^[1n]-[1n]$', one_to_many), 'one_to_many should be one of 1-1, 1-n, n-n, n-1' input_count, output_count = one_to_many.split('-') parser = self._choose_parser(parser_name) # input data to parse. if input_count == 'n': scode = list(map(lambda item: parser(item, *parse_args), scode)) if input_count == '1': if parser not in (self.jsonpath_parser, self.objectpath_parser, self.python_parser): # json may remain multi-items scode = get_one(scode, default=default) scode = parser(scode, *parse_args) # ensure result match n or 1 after parsing. if parser in (self.objectpath_parser,): # objectpath not need continue if output_count == '1': # 1-1 or n-1 scode = get_one(scode, default=default) elif input_count == 'n': # n-n scode = [get_one(i, default=default) for i in scode] else: # 1-n scode = list(scode) if join_with: scode = join_with.join(map(str, scode)) return scode
python
def parse(self, scode, args_chain=None, join_with=None, default=''): """ single arg: [one_to_many, parser_name, *args] args_chain: [['1-n', 're', 'search', '(<.*?>)', '\\1']] [['1-n', 'html', 'p', 'html'], ['n-n', 'html', 'p', 'text']] """ assert args_chain and isinstance( args_chain, (list, tuple) ) and isinstance(args_chain[0], (list, tuple)), ValueError( 'args_chain type should be list of list, like: [["1-n", "html", "p", "html"], ["n-n", "html", "p", "text"]].' ) for arg in args_chain: # py2 not support * unpack one_to_many, parser_name, parse_args = (arg[0], arg[1], arg[2:]) assert self._re.match( '^[1n]-[1n]$', one_to_many), 'one_to_many should be one of 1-1, 1-n, n-n, n-1' input_count, output_count = one_to_many.split('-') parser = self._choose_parser(parser_name) # input data to parse. if input_count == 'n': scode = list(map(lambda item: parser(item, *parse_args), scode)) if input_count == '1': if parser not in (self.jsonpath_parser, self.objectpath_parser, self.python_parser): # json may remain multi-items scode = get_one(scode, default=default) scode = parser(scode, *parse_args) # ensure result match n or 1 after parsing. if parser in (self.objectpath_parser,): # objectpath not need continue if output_count == '1': # 1-1 or n-1 scode = get_one(scode, default=default) elif input_count == 'n': # n-n scode = [get_one(i, default=default) for i in scode] else: # 1-n scode = list(scode) if join_with: scode = join_with.join(map(str, scode)) return scode
[ "def", "parse", "(", "self", ",", "scode", ",", "args_chain", "=", "None", ",", "join_with", "=", "None", ",", "default", "=", "''", ")", ":", "assert", "args_chain", "and", "isinstance", "(", "args_chain", ",", "(", "list", ",", "tuple", ")", ")", "and", "isinstance", "(", "args_chain", "[", "0", "]", ",", "(", "list", ",", "tuple", ")", ")", ",", "ValueError", "(", "'args_chain type should be list of list, like: [[\"1-n\", \"html\", \"p\", \"html\"], [\"n-n\", \"html\", \"p\", \"text\"]].'", ")", "for", "arg", "in", "args_chain", ":", "# py2 not support * unpack", "one_to_many", ",", "parser_name", ",", "parse_args", "=", "(", "arg", "[", "0", "]", ",", "arg", "[", "1", "]", ",", "arg", "[", "2", ":", "]", ")", "assert", "self", ".", "_re", ".", "match", "(", "'^[1n]-[1n]$'", ",", "one_to_many", ")", ",", "'one_to_many should be one of 1-1, 1-n, n-n, n-1'", "input_count", ",", "output_count", "=", "one_to_many", ".", "split", "(", "'-'", ")", "parser", "=", "self", ".", "_choose_parser", "(", "parser_name", ")", "# input data to parse.", "if", "input_count", "==", "'n'", ":", "scode", "=", "list", "(", "map", "(", "lambda", "item", ":", "parser", "(", "item", ",", "*", "parse_args", ")", ",", "scode", ")", ")", "if", "input_count", "==", "'1'", ":", "if", "parser", "not", "in", "(", "self", ".", "jsonpath_parser", ",", "self", ".", "objectpath_parser", ",", "self", ".", "python_parser", ")", ":", "# json may remain multi-items", "scode", "=", "get_one", "(", "scode", ",", "default", "=", "default", ")", "scode", "=", "parser", "(", "scode", ",", "*", "parse_args", ")", "# ensure result match n or 1 after parsing.", "if", "parser", "in", "(", "self", ".", "objectpath_parser", ",", ")", ":", "# objectpath not need", "continue", "if", "output_count", "==", "'1'", ":", "# 1-1 or n-1", "scode", "=", "get_one", "(", "scode", ",", "default", "=", "default", ")", "elif", "input_count", "==", "'n'", ":", "# n-n", "scode", "=", "[", "get_one", "(", "i", ",", "default", "=", "default", ")", "for", "i", "in", "scode", "]", "else", ":", "# 1-n", "scode", "=", "list", "(", "scode", ")", "if", "join_with", ":", "scode", "=", "join_with", ".", "join", "(", "map", "(", "str", ",", "scode", ")", ")", "return", "scode" ]
single arg: [one_to_many, parser_name, *args] args_chain: [['1-n', 're', 'search', '(<.*?>)', '\\1']] [['1-n', 'html', 'p', 'html'], ['n-n', 'html', 'p', 'text']]
[ "single", "arg", ":", "[", "one_to_many", "parser_name", "*", "args", "]", "args_chain", ":", "[[", "1", "-", "n", "re", "search", "(", "<", ".", "*", "?", ">", ")", "\\\\", "1", "]]", "[[", "1", "-", "n", "html", "p", "html", "]", "[", "n", "-", "n", "html", "p", "text", "]]" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/parsers.py#L291-L336
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor.parse
def parse(self): """ Parses the CSS contents and returns the cleaned CSS as a string :returns: The cleaned CSS :rtype: str """ # Build the HTML tree self.tree = self._build_tree(self.html_contents) # Parse the CSS contents self.stylesheet = self.parser.parse_stylesheet(self.css_contents) # Get the cleaned CSS contents self.cleaned_css = self._clean_css()
python
def parse(self): """ Parses the CSS contents and returns the cleaned CSS as a string :returns: The cleaned CSS :rtype: str """ # Build the HTML tree self.tree = self._build_tree(self.html_contents) # Parse the CSS contents self.stylesheet = self.parser.parse_stylesheet(self.css_contents) # Get the cleaned CSS contents self.cleaned_css = self._clean_css()
[ "def", "parse", "(", "self", ")", ":", "# Build the HTML tree", "self", ".", "tree", "=", "self", ".", "_build_tree", "(", "self", ".", "html_contents", ")", "# Parse the CSS contents", "self", ".", "stylesheet", "=", "self", ".", "parser", ".", "parse_stylesheet", "(", "self", ".", "css_contents", ")", "# Get the cleaned CSS contents", "self", ".", "cleaned_css", "=", "self", ".", "_clean_css", "(", ")" ]
Parses the CSS contents and returns the cleaned CSS as a string :returns: The cleaned CSS :rtype: str
[ "Parses", "the", "CSS", "contents", "and", "returns", "the", "cleaned", "CSS", "as", "a", "string" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L42-L56
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor.rel_to_abs
def rel_to_abs(self, base_url): """ Converts relative links from css contents to absolute links :param base_url: The base page url to use for building absolute links :type base_url: str :param css_contents: The CSS contents to parse :type css_contents: str """ self.cleaned_css = self.rel_to_abs_re.sub( lambda match: "url('%s')" % urljoin( base_url, match.group('path').strip('\'"')), self.cleaned_css)
python
def rel_to_abs(self, base_url): """ Converts relative links from css contents to absolute links :param base_url: The base page url to use for building absolute links :type base_url: str :param css_contents: The CSS contents to parse :type css_contents: str """ self.cleaned_css = self.rel_to_abs_re.sub( lambda match: "url('%s')" % urljoin( base_url, match.group('path').strip('\'"')), self.cleaned_css)
[ "def", "rel_to_abs", "(", "self", ",", "base_url", ")", ":", "self", ".", "cleaned_css", "=", "self", ".", "rel_to_abs_re", ".", "sub", "(", "lambda", "match", ":", "\"url('%s')\"", "%", "urljoin", "(", "base_url", ",", "match", ".", "group", "(", "'path'", ")", ".", "strip", "(", "'\\'\"'", ")", ")", ",", "self", ".", "cleaned_css", ")" ]
Converts relative links from css contents to absolute links :param base_url: The base page url to use for building absolute links :type base_url: str :param css_contents: The CSS contents to parse :type css_contents: str
[ "Converts", "relative", "links", "from", "css", "contents", "to", "absolute", "links" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L58-L70
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._clean_css
def _clean_css(self): """ Returns the cleaned CSS :param stylesheet: The Stylesheet object to parse :type stylesheet: tinycss.css21.Stylesheet """ # Init the cleaned CSS rules and contents string css_rules = [] # For every rule in the CSS for rule in self.stylesheet.rules: try: # Clean the CSS rule cleaned_rule = self._clean_rule(rule) # Append the rule to matched CSS rules if cleaned_rule is not None: css_rules.append(cleaned_rule) except: # On error, assume the rule matched the tree css_rules.append(rule) return self._build_css(css_rules)
python
def _clean_css(self): """ Returns the cleaned CSS :param stylesheet: The Stylesheet object to parse :type stylesheet: tinycss.css21.Stylesheet """ # Init the cleaned CSS rules and contents string css_rules = [] # For every rule in the CSS for rule in self.stylesheet.rules: try: # Clean the CSS rule cleaned_rule = self._clean_rule(rule) # Append the rule to matched CSS rules if cleaned_rule is not None: css_rules.append(cleaned_rule) except: # On error, assume the rule matched the tree css_rules.append(rule) return self._build_css(css_rules)
[ "def", "_clean_css", "(", "self", ")", ":", "# Init the cleaned CSS rules and contents string", "css_rules", "=", "[", "]", "# For every rule in the CSS", "for", "rule", "in", "self", ".", "stylesheet", ".", "rules", ":", "try", ":", "# Clean the CSS rule", "cleaned_rule", "=", "self", ".", "_clean_rule", "(", "rule", ")", "# Append the rule to matched CSS rules", "if", "cleaned_rule", "is", "not", "None", ":", "css_rules", ".", "append", "(", "cleaned_rule", ")", "except", ":", "# On error, assume the rule matched the tree", "css_rules", ".", "append", "(", "rule", ")", "return", "self", ".", "_build_css", "(", "css_rules", ")" ]
Returns the cleaned CSS :param stylesheet: The Stylesheet object to parse :type stylesheet: tinycss.css21.Stylesheet
[ "Returns", "the", "cleaned", "CSS" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L85-L110
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._clean_rule
def _clean_rule(self, rule): """ Cleans a css Rule by removing Selectors without matches on the tree Returns None if the whole rule do not match :param rule: CSS Rule to check :type rule: A tinycss Rule object :returns: A cleaned tinycss Rule with only Selectors matching the tree or None :rtype: tinycss Rule or None """ # Always match @ rules if rule.at_keyword is not None: return rule # Clean selectors cleaned_token_list = [] for token_list in split_on_comma(rule.selector): # If the token list matches the tree if self._token_list_matches_tree(token_list): # Add a Comma if multiple token lists matched if len(cleaned_token_list) > 0: cleaned_token_list.append( cssselect.parser.Token('DELIM', ',', len(cleaned_token_list) + 1)) # Append it to the list of cleaned token list cleaned_token_list += token_list # Return None if selectors list is empty if not cleaned_token_list: return None # Update rule token list rule.selector = cleaned_token_list # Return cleaned rule return rule
python
def _clean_rule(self, rule): """ Cleans a css Rule by removing Selectors without matches on the tree Returns None if the whole rule do not match :param rule: CSS Rule to check :type rule: A tinycss Rule object :returns: A cleaned tinycss Rule with only Selectors matching the tree or None :rtype: tinycss Rule or None """ # Always match @ rules if rule.at_keyword is not None: return rule # Clean selectors cleaned_token_list = [] for token_list in split_on_comma(rule.selector): # If the token list matches the tree if self._token_list_matches_tree(token_list): # Add a Comma if multiple token lists matched if len(cleaned_token_list) > 0: cleaned_token_list.append( cssselect.parser.Token('DELIM', ',', len(cleaned_token_list) + 1)) # Append it to the list of cleaned token list cleaned_token_list += token_list # Return None if selectors list is empty if not cleaned_token_list: return None # Update rule token list rule.selector = cleaned_token_list # Return cleaned rule return rule
[ "def", "_clean_rule", "(", "self", ",", "rule", ")", ":", "# Always match @ rules", "if", "rule", ".", "at_keyword", "is", "not", "None", ":", "return", "rule", "# Clean selectors", "cleaned_token_list", "=", "[", "]", "for", "token_list", "in", "split_on_comma", "(", "rule", ".", "selector", ")", ":", "# If the token list matches the tree", "if", "self", ".", "_token_list_matches_tree", "(", "token_list", ")", ":", "# Add a Comma if multiple token lists matched", "if", "len", "(", "cleaned_token_list", ")", ">", "0", ":", "cleaned_token_list", ".", "append", "(", "cssselect", ".", "parser", ".", "Token", "(", "'DELIM'", ",", "','", ",", "len", "(", "cleaned_token_list", ")", "+", "1", ")", ")", "# Append it to the list of cleaned token list", "cleaned_token_list", "+=", "token_list", "# Return None if selectors list is empty", "if", "not", "cleaned_token_list", ":", "return", "None", "# Update rule token list", "rule", ".", "selector", "=", "cleaned_token_list", "# Return cleaned rule", "return", "rule" ]
Cleans a css Rule by removing Selectors without matches on the tree Returns None if the whole rule do not match :param rule: CSS Rule to check :type rule: A tinycss Rule object :returns: A cleaned tinycss Rule with only Selectors matching the tree or None :rtype: tinycss Rule or None
[ "Cleans", "a", "css", "Rule", "by", "removing", "Selectors", "without", "matches", "on", "the", "tree", "Returns", "None", "if", "the", "whole", "rule", "do", "not", "match" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L112-L150
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._token_list_matches_tree
def _token_list_matches_tree(self, token_list): """ Returns whether the token list matches the HTML tree :param selector: A Token list to check :type selector: list of Token objects :returns: True if the token list has matches in self.tree :rtype: bool """ try: parsed_selector = cssselect.parse( ''.join(token.as_css() for token in token_list))[0] return bool( self.tree.xpath( self.xpath_translator.selector_to_xpath(parsed_selector))) except: # On error, assume the selector matches the tree return True
python
def _token_list_matches_tree(self, token_list): """ Returns whether the token list matches the HTML tree :param selector: A Token list to check :type selector: list of Token objects :returns: True if the token list has matches in self.tree :rtype: bool """ try: parsed_selector = cssselect.parse( ''.join(token.as_css() for token in token_list))[0] return bool( self.tree.xpath( self.xpath_translator.selector_to_xpath(parsed_selector))) except: # On error, assume the selector matches the tree return True
[ "def", "_token_list_matches_tree", "(", "self", ",", "token_list", ")", ":", "try", ":", "parsed_selector", "=", "cssselect", ".", "parse", "(", "''", ".", "join", "(", "token", ".", "as_css", "(", ")", "for", "token", "in", "token_list", ")", ")", "[", "0", "]", "return", "bool", "(", "self", ".", "tree", ".", "xpath", "(", "self", ".", "xpath_translator", ".", "selector_to_xpath", "(", "parsed_selector", ")", ")", ")", "except", ":", "# On error, assume the selector matches the tree", "return", "True" ]
Returns whether the token list matches the HTML tree :param selector: A Token list to check :type selector: list of Token objects :returns: True if the token list has matches in self.tree :rtype: bool
[ "Returns", "whether", "the", "token", "list", "matches", "the", "HTML", "tree" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L152-L170
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._rule_as_string
def _rule_as_string(self, rule): """ Converts a tinycss rule to a formatted CSS string :param rule: The rule to format :type rule: tinycss Rule object :returns: The Rule as a CSS string :rtype: str """ if isinstance(rule, RuleSet): # Simple CSS rule : a { color: red; } return '%s{%s}' % ( self._selector_as_string(rule.selector), self._declarations_as_string(rule.declarations)) elif isinstance(rule, ImportRule): # @import rule return "@import url('%s') %s;" % ( rule.uri, ','.join(rule.media)) elif isinstance(rule, FontFaceRule): # @font-face rule return "@font-face{%s}" % self._declarations_as_string(rule.declarations) elif isinstance(rule, MediaRule): # @media rule return "@media %s{%s}" % ( ','.join(rule.media), ''.join(self._rule_as_string(r) for r in rule.rules)) elif isinstance(rule, PageRule): # @page rule selector, pseudo = rule.selector return "@page%s%s{%s}" % ( ' %s' % selector if selector else '', ' :%s' % pseudo if pseudo else '', self._declarations_as_string(rule.declarations)) return ''
python
def _rule_as_string(self, rule): """ Converts a tinycss rule to a formatted CSS string :param rule: The rule to format :type rule: tinycss Rule object :returns: The Rule as a CSS string :rtype: str """ if isinstance(rule, RuleSet): # Simple CSS rule : a { color: red; } return '%s{%s}' % ( self._selector_as_string(rule.selector), self._declarations_as_string(rule.declarations)) elif isinstance(rule, ImportRule): # @import rule return "@import url('%s') %s;" % ( rule.uri, ','.join(rule.media)) elif isinstance(rule, FontFaceRule): # @font-face rule return "@font-face{%s}" % self._declarations_as_string(rule.declarations) elif isinstance(rule, MediaRule): # @media rule return "@media %s{%s}" % ( ','.join(rule.media), ''.join(self._rule_as_string(r) for r in rule.rules)) elif isinstance(rule, PageRule): # @page rule selector, pseudo = rule.selector return "@page%s%s{%s}" % ( ' %s' % selector if selector else '', ' :%s' % pseudo if pseudo else '', self._declarations_as_string(rule.declarations)) return ''
[ "def", "_rule_as_string", "(", "self", ",", "rule", ")", ":", "if", "isinstance", "(", "rule", ",", "RuleSet", ")", ":", "# Simple CSS rule : a { color: red; }", "return", "'%s{%s}'", "%", "(", "self", ".", "_selector_as_string", "(", "rule", ".", "selector", ")", ",", "self", ".", "_declarations_as_string", "(", "rule", ".", "declarations", ")", ")", "elif", "isinstance", "(", "rule", ",", "ImportRule", ")", ":", "# @import rule", "return", "\"@import url('%s') %s;\"", "%", "(", "rule", ".", "uri", ",", "','", ".", "join", "(", "rule", ".", "media", ")", ")", "elif", "isinstance", "(", "rule", ",", "FontFaceRule", ")", ":", "# @font-face rule", "return", "\"@font-face{%s}\"", "%", "self", ".", "_declarations_as_string", "(", "rule", ".", "declarations", ")", "elif", "isinstance", "(", "rule", ",", "MediaRule", ")", ":", "# @media rule", "return", "\"@media %s{%s}\"", "%", "(", "','", ".", "join", "(", "rule", ".", "media", ")", ",", "''", ".", "join", "(", "self", ".", "_rule_as_string", "(", "r", ")", "for", "r", "in", "rule", ".", "rules", ")", ")", "elif", "isinstance", "(", "rule", ",", "PageRule", ")", ":", "# @page rule", "selector", ",", "pseudo", "=", "rule", ".", "selector", "return", "\"@page%s%s{%s}\"", "%", "(", "' %s'", "%", "selector", "if", "selector", "else", "''", ",", "' :%s'", "%", "pseudo", "if", "pseudo", "else", "''", ",", "self", ".", "_declarations_as_string", "(", "rule", ".", "declarations", ")", ")", "return", "''" ]
Converts a tinycss rule to a formatted CSS string :param rule: The rule to format :type rule: tinycss Rule object :returns: The Rule as a CSS string :rtype: str
[ "Converts", "a", "tinycss", "rule", "to", "a", "formatted", "CSS", "string" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L184-L223
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._selector_as_string
def _selector_as_string(self, selector): """ Returns a selector as a CSS string :param selector: A list of tinycss Tokens :type selector: list :returns: The CSS string for the selector :rtype: str """ return ','.join( ''.join(token.as_css() for token in strip_whitespace(token_list)) for token_list in split_on_comma(selector))
python
def _selector_as_string(self, selector): """ Returns a selector as a CSS string :param selector: A list of tinycss Tokens :type selector: list :returns: The CSS string for the selector :rtype: str """ return ','.join( ''.join(token.as_css() for token in strip_whitespace(token_list)) for token_list in split_on_comma(selector))
[ "def", "_selector_as_string", "(", "self", ",", "selector", ")", ":", "return", "','", ".", "join", "(", "''", ".", "join", "(", "token", ".", "as_css", "(", ")", "for", "token", "in", "strip_whitespace", "(", "token_list", ")", ")", "for", "token_list", "in", "split_on_comma", "(", "selector", ")", ")" ]
Returns a selector as a CSS string :param selector: A list of tinycss Tokens :type selector: list :returns: The CSS string for the selector :rtype: str
[ "Returns", "a", "selector", "as", "a", "CSS", "string" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L225-L236
jurismarches/chopper
chopper/css/extractor.py
CSSExtractor._declarations_as_string
def _declarations_as_string(self, declarations): """ Returns a list of declarations as a formatted CSS string :param declarations: The list of tinycss Declarations to format :type declarations: list of tinycss.css21.Declaration :returns: The CSS string for the declarations list :rtype: str """ return ''.join('%s:%s%s;' % ( d.name, d.value.as_css(), ' !' + d.priority if d.priority else '') for d in declarations)
python
def _declarations_as_string(self, declarations): """ Returns a list of declarations as a formatted CSS string :param declarations: The list of tinycss Declarations to format :type declarations: list of tinycss.css21.Declaration :returns: The CSS string for the declarations list :rtype: str """ return ''.join('%s:%s%s;' % ( d.name, d.value.as_css(), ' !' + d.priority if d.priority else '') for d in declarations)
[ "def", "_declarations_as_string", "(", "self", ",", "declarations", ")", ":", "return", "''", ".", "join", "(", "'%s:%s%s;'", "%", "(", "d", ".", "name", ",", "d", ".", "value", ".", "as_css", "(", ")", ",", "' !'", "+", "d", ".", "priority", "if", "d", ".", "priority", "else", "''", ")", "for", "d", "in", "declarations", ")" ]
Returns a list of declarations as a formatted CSS string :param declarations: The list of tinycss Declarations to format :type declarations: list of tinycss.css21.Declaration :returns: The CSS string for the declarations list :rtype: str
[ "Returns", "a", "list", "of", "declarations", "as", "a", "formatted", "CSS", "string" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/css/extractor.py#L238-L250
vsoch/helpme
helpme/utils/format.py
envars_to_markdown
def envars_to_markdown(envars, title = "Environment"): '''generate a markdown list of a list of environment variable tuples Parameters ========== title: A title for the section (defaults to "Environment" envars: a list of tuples for the environment, e.g.: [('TERM', 'xterm-256color'), ('SHELL', '/bin/bash'), ('USER', 'vanessa'), ('LD_LIBRARY_PATH', ':/usr/local/pulse')] ''' markdown = '' if envars not in [None, '', []]: markdown += '\n## %s\n' % title for envar in envars: markdown += ' - **%s**: %s\n' %(envar[0], envar[1]) return markdown
python
def envars_to_markdown(envars, title = "Environment"): '''generate a markdown list of a list of environment variable tuples Parameters ========== title: A title for the section (defaults to "Environment" envars: a list of tuples for the environment, e.g.: [('TERM', 'xterm-256color'), ('SHELL', '/bin/bash'), ('USER', 'vanessa'), ('LD_LIBRARY_PATH', ':/usr/local/pulse')] ''' markdown = '' if envars not in [None, '', []]: markdown += '\n## %s\n' % title for envar in envars: markdown += ' - **%s**: %s\n' %(envar[0], envar[1]) return markdown
[ "def", "envars_to_markdown", "(", "envars", ",", "title", "=", "\"Environment\"", ")", ":", "markdown", "=", "''", "if", "envars", "not", "in", "[", "None", ",", "''", ",", "[", "]", "]", ":", "markdown", "+=", "'\\n## %s\\n'", "%", "title", "for", "envar", "in", "envars", ":", "markdown", "+=", "' - **%s**: %s\\n'", "%", "(", "envar", "[", "0", "]", ",", "envar", "[", "1", "]", ")", "return", "markdown" ]
generate a markdown list of a list of environment variable tuples Parameters ========== title: A title for the section (defaults to "Environment" envars: a list of tuples for the environment, e.g.: [('TERM', 'xterm-256color'), ('SHELL', '/bin/bash'), ('USER', 'vanessa'), ('LD_LIBRARY_PATH', ':/usr/local/pulse')]
[ "generate", "a", "markdown", "list", "of", "a", "list", "of", "environment", "variable", "tuples" ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/format.py#L29-L48
DreamLab/VmShepherd
src/vmshepherd/iaas/abstract.py
AbstractIaasDriver.create_vm
async def create_vm(self, preset_name: str, image: str, flavor: str, security_groups: List=None, userdata: Dict=None, key_name: str=None, availability_zone: str=None, subnets: List=None) -> Any: """ Create (boot) a new server. :arg string preset_name: Name of vm group where vm is created. :arg string image: Image name. :arg string flavor: Flavor (or instance_type in AWS) name. :arg list security_groups: A list of security group names. :arg dict userdata: A dict of arbitrary key/value metadata to store in grains. :arg string key_name: (optional extension) name of previously created keypair to inject into the instance. :arg string availability_zone: Name of the availability zone for instance placement. :arg string subnets: List of the subnets for instance placement. Returns Any vm_id. """ raise NotImplementedError
python
async def create_vm(self, preset_name: str, image: str, flavor: str, security_groups: List=None, userdata: Dict=None, key_name: str=None, availability_zone: str=None, subnets: List=None) -> Any: """ Create (boot) a new server. :arg string preset_name: Name of vm group where vm is created. :arg string image: Image name. :arg string flavor: Flavor (or instance_type in AWS) name. :arg list security_groups: A list of security group names. :arg dict userdata: A dict of arbitrary key/value metadata to store in grains. :arg string key_name: (optional extension) name of previously created keypair to inject into the instance. :arg string availability_zone: Name of the availability zone for instance placement. :arg string subnets: List of the subnets for instance placement. Returns Any vm_id. """ raise NotImplementedError
[ "async", "def", "create_vm", "(", "self", ",", "preset_name", ":", "str", ",", "image", ":", "str", ",", "flavor", ":", "str", ",", "security_groups", ":", "List", "=", "None", ",", "userdata", ":", "Dict", "=", "None", ",", "key_name", ":", "str", "=", "None", ",", "availability_zone", ":", "str", "=", "None", ",", "subnets", ":", "List", "=", "None", ")", "->", "Any", ":", "raise", "NotImplementedError" ]
Create (boot) a new server. :arg string preset_name: Name of vm group where vm is created. :arg string image: Image name. :arg string flavor: Flavor (or instance_type in AWS) name. :arg list security_groups: A list of security group names. :arg dict userdata: A dict of arbitrary key/value metadata to store in grains. :arg string key_name: (optional extension) name of previously created keypair to inject into the instance. :arg string availability_zone: Name of the availability zone for instance placement. :arg string subnets: List of the subnets for instance placement. Returns Any vm_id.
[ "Create", "(", "boot", ")", "a", "new", "server", "." ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/abstract.py#L26-L46
SetBased/py-stratum
pystratum/application/PyStratumApplication.py
PyStratumApplication.get_default_commands
def get_default_commands(self): """ Returns the default commands of this application. :rtype: list[cleo.Command] """ commands = Application.get_default_commands(self) self.add(ConstantsCommand()) self.add(LoaderCommand()) self.add(PyStratumCommand()) self.add(WrapperCommand()) return commands
python
def get_default_commands(self): """ Returns the default commands of this application. :rtype: list[cleo.Command] """ commands = Application.get_default_commands(self) self.add(ConstantsCommand()) self.add(LoaderCommand()) self.add(PyStratumCommand()) self.add(WrapperCommand()) return commands
[ "def", "get_default_commands", "(", "self", ")", ":", "commands", "=", "Application", ".", "get_default_commands", "(", "self", ")", "self", ".", "add", "(", "ConstantsCommand", "(", ")", ")", "self", ".", "add", "(", "LoaderCommand", "(", ")", ")", "self", ".", "add", "(", "PyStratumCommand", "(", ")", ")", "self", ".", "add", "(", "WrapperCommand", "(", ")", ")", "return", "commands" ]
Returns the default commands of this application. :rtype: list[cleo.Command]
[ "Returns", "the", "default", "commands", "of", "this", "application", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/application/PyStratumApplication.py#L25-L38
bpannier/simpletr64
simpletr64/discover.py
Discover.discover
def discover(service="ssdp:all", timeout=1, retries=2, ipAddress="239.255.255.250", port=1900): """Discovers UPnP devices in the local network. Try to discover all devices in the local network which do support UPnP. The discovery process can fail for various reasons and it is recommended to do at least two discoveries, which you can specify with the ``retries`` parameter. The default ``service`` parameter tries to address all devices also if you know which kind of service type you are looking for you should set it as some devices do not respond or respond differently otherwise. :param service: the service type or list of service types of devices you look for :type service: str or list[str] :param float timeout: the socket timeout for each try :param int retries: how often should be a discovery request send :param str ipAddress: the multicast ip address to use :param int port: the port to use :return: a list of DiscoveryResponse objects or empty if no device was found :rtype: list[DiscoveryResponse] Example: :: results = discover() for result in results: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discoverParticularHost` """ socket.setdefaulttimeout(timeout) messages = [] if isinstance(service, str): services = [service] elif isinstance(service, list): services = service for service in services: message = 'M-SEARCH * HTTP/1.1\r\nMX: 5\r\nMAN: "ssdp:discover"\r\nHOST: ' + \ ipAddress + ':' + str(port) + '\r\n' message += "ST: " + service + "\r\n\r\n" messages.append(message) responses = {} for _ in range(retries): # setup the socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) # noinspection PyAssignmentToLoopOrWithParameter for _ in range(2): # send the messages with different service types for message in messages: # send message more often to make sure all devices will get it sock.sendto(message.encode('utf-8'), (ipAddress, port)) while True: try: # read the message until timeout data = sock.recv(1024) except socket.timeout: break else: # no time out, read the response data and create response object response = DiscoveryResponse(data) # filter duplicated responses responses[response.location] = response # return a list of all responses return list(responses.values())
python
def discover(service="ssdp:all", timeout=1, retries=2, ipAddress="239.255.255.250", port=1900): """Discovers UPnP devices in the local network. Try to discover all devices in the local network which do support UPnP. The discovery process can fail for various reasons and it is recommended to do at least two discoveries, which you can specify with the ``retries`` parameter. The default ``service`` parameter tries to address all devices also if you know which kind of service type you are looking for you should set it as some devices do not respond or respond differently otherwise. :param service: the service type or list of service types of devices you look for :type service: str or list[str] :param float timeout: the socket timeout for each try :param int retries: how often should be a discovery request send :param str ipAddress: the multicast ip address to use :param int port: the port to use :return: a list of DiscoveryResponse objects or empty if no device was found :rtype: list[DiscoveryResponse] Example: :: results = discover() for result in results: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discoverParticularHost` """ socket.setdefaulttimeout(timeout) messages = [] if isinstance(service, str): services = [service] elif isinstance(service, list): services = service for service in services: message = 'M-SEARCH * HTTP/1.1\r\nMX: 5\r\nMAN: "ssdp:discover"\r\nHOST: ' + \ ipAddress + ':' + str(port) + '\r\n' message += "ST: " + service + "\r\n\r\n" messages.append(message) responses = {} for _ in range(retries): # setup the socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) # noinspection PyAssignmentToLoopOrWithParameter for _ in range(2): # send the messages with different service types for message in messages: # send message more often to make sure all devices will get it sock.sendto(message.encode('utf-8'), (ipAddress, port)) while True: try: # read the message until timeout data = sock.recv(1024) except socket.timeout: break else: # no time out, read the response data and create response object response = DiscoveryResponse(data) # filter duplicated responses responses[response.location] = response # return a list of all responses return list(responses.values())
[ "def", "discover", "(", "service", "=", "\"ssdp:all\"", ",", "timeout", "=", "1", ",", "retries", "=", "2", ",", "ipAddress", "=", "\"239.255.255.250\"", ",", "port", "=", "1900", ")", ":", "socket", ".", "setdefaulttimeout", "(", "timeout", ")", "messages", "=", "[", "]", "if", "isinstance", "(", "service", ",", "str", ")", ":", "services", "=", "[", "service", "]", "elif", "isinstance", "(", "service", ",", "list", ")", ":", "services", "=", "service", "for", "service", "in", "services", ":", "message", "=", "'M-SEARCH * HTTP/1.1\\r\\nMX: 5\\r\\nMAN: \"ssdp:discover\"\\r\\nHOST: '", "+", "ipAddress", "+", "':'", "+", "str", "(", "port", ")", "+", "'\\r\\n'", "message", "+=", "\"ST: \"", "+", "service", "+", "\"\\r\\n\\r\\n\"", "messages", ".", "append", "(", "message", ")", "responses", "=", "{", "}", "for", "_", "in", "range", "(", "retries", ")", ":", "# setup the socket", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ",", "socket", ".", "IPPROTO_UDP", ")", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "sock", ".", "setsockopt", "(", "socket", ".", "IPPROTO_IP", ",", "socket", ".", "IP_MULTICAST_TTL", ",", "2", ")", "# noinspection PyAssignmentToLoopOrWithParameter", "for", "_", "in", "range", "(", "2", ")", ":", "# send the messages with different service types", "for", "message", "in", "messages", ":", "# send message more often to make sure all devices will get it", "sock", ".", "sendto", "(", "message", ".", "encode", "(", "'utf-8'", ")", ",", "(", "ipAddress", ",", "port", ")", ")", "while", "True", ":", "try", ":", "# read the message until timeout", "data", "=", "sock", ".", "recv", "(", "1024", ")", "except", "socket", ".", "timeout", ":", "break", "else", ":", "# no time out, read the response data and create response object", "response", "=", "DiscoveryResponse", "(", "data", ")", "# filter duplicated responses", "responses", "[", "response", ".", "location", "]", "=", "response", "# return a list of all responses", "return", "list", "(", "responses", ".", "values", "(", ")", ")" ]
Discovers UPnP devices in the local network. Try to discover all devices in the local network which do support UPnP. The discovery process can fail for various reasons and it is recommended to do at least two discoveries, which you can specify with the ``retries`` parameter. The default ``service`` parameter tries to address all devices also if you know which kind of service type you are looking for you should set it as some devices do not respond or respond differently otherwise. :param service: the service type or list of service types of devices you look for :type service: str or list[str] :param float timeout: the socket timeout for each try :param int retries: how often should be a discovery request send :param str ipAddress: the multicast ip address to use :param int port: the port to use :return: a list of DiscoveryResponse objects or empty if no device was found :rtype: list[DiscoveryResponse] Example: :: results = discover() for result in results: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discoverParticularHost`
[ "Discovers", "UPnP", "devices", "in", "the", "local", "network", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/discover.py#L34-L111
bpannier/simpletr64
simpletr64/discover.py
Discover.discoverParticularHost
def discoverParticularHost(host, service="ssdp:all", deviceDefinitionURL=None, timeout=1, retries=2, ipAddress="239.255.255.250", port=1900, proxies=None): """Discover a particular host and find the best response. This tries to find the most specific discovery result for the given host. Only the discovery result contains the URL to the XML tree which initializes the device definition. If an URL is already known it should be provided to avoid additional latency for a broader first device discovery. This method also do some magic to find the best result for the given host as UPnP devices behave sometimes strangely. This call is costly the result if any should be cached. :param str host: the host to find :param service: the service type or list of service types if known to search for :type service: str or list[str] :param str deviceDefinitionURL: if provided it is used to skip a first device discovery :param float timeout: the time to wait for each retry :param int retries: the amount of times how often the device is tried to discover :param str ipAddress: the multicast ip address to discover devices :param int port: the port to discover devices :param str proxies: proxy definition as defined here: `Proxy definition <http://docs.python-requests.org/en/latest/user/advanced/#proxies>`_ :return: If the device have been found the response is returned otherwise None :rtype: DiscoveryResponse :raises ValueError: if problems with reading or parsing the xml device definition occurs :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out Example: :: proxies = {"http": "http://localhost:8888"} result = discoverParticularHost("192.168.0.1", proxies=proxies) if result is not None: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discover` """ # get all IP addresses for the given host ipResults = socket.getaddrinfo(host, 80) if len(ipResults) == 0: return None ipAddresses = [] # remember all ip addresses for the given host for ipAdrTupple in ipResults: ipAddresses.append(ipAdrTupple[4][0]) bestPick = None services = [] if deviceDefinitionURL is None: # no xml definition given, so lets search for one # search for all devices first discoverResults = Discover.discover(service=service, timeout=timeout, retries=retries, ipAddress=ipAddress, port=port) for result in discoverResults: if result.locationHost in ipAddresses: # now we found a result for that host, pick the best service type if multiple results for the host # are found if Discover.rateServiceTypeInResult(result) > Discover.rateServiceTypeInResult(bestPick): bestPick = result # remember all services if result.service not in services: services.append(result.service) if bestPick is None: return None else: # create response with given parameter bestPick = DiscoveryResponse.create(deviceDefinitionURL, service=service) # some routers do not advice their TR64 capabilities but their UPnp which is only a subset of actions. # Try to find out if the given XML definition path will give us a better service type. # load xml definition # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-3"} request = requests.get(bestPick.location, proxies=proxies, headers=headers, timeout=float(timeout)) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not get CPE definitions for "' + bestPick.location + '": ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse xml try: root = ET.fromstring(request.text.encode('utf-8')) except Exception as e: raise ValueError("Could not parse CPE definitions for '" + bestPick.location + "': " + str(e)) # find the first deviceType in the document tree for element in root.getiterator(): # check if element tag name ends on deviceType, skip XML namespace if element.tag.lower().endswith("devicetype"): serviceFound = element.text # remember the service found if it does not exist yet if serviceFound not in services: services.append(serviceFound) # create a specific service just to check if we found it already serviceFound = serviceFound.replace("schemas-upnp-org", "dslforum-org") # test if we already have the best service type then we dont need to do an other discovery request if serviceFound == bestPick.service: return bestPick for service in services: # we search for the specific device tyoe version as of specified in TR64 protocol. # some devices returns different results depending on the given service type, so lets be # very specific specificService = service.replace("schemas-upnp-org", "dslforum-org") if specificService not in services: services.append(specificService) # we do an other discovery request with more specific service/device type discoverResultsSpecific = Discover.discover(service=services, timeout=float(timeout), retries=retries, ipAddress=ipAddress, port=port) # iterate through all results to find the most specific one evenBetterPick = None for specificResult in discoverResultsSpecific: if specificResult.locationHost in ipAddresses: if Discover.rateServiceTypeInResult(specificResult) > \ Discover.rateServiceTypeInResult(evenBetterPick): evenBetterPick = specificResult if evenBetterPick is not None: # best we could find return evenBetterPick # we found first deviceType tag in the XML structure, no need to go further break if deviceDefinitionURL is not None: # we created our own response, so no result found return None # we found only an unspecific result, return it anyway return bestPick
python
def discoverParticularHost(host, service="ssdp:all", deviceDefinitionURL=None, timeout=1, retries=2, ipAddress="239.255.255.250", port=1900, proxies=None): """Discover a particular host and find the best response. This tries to find the most specific discovery result for the given host. Only the discovery result contains the URL to the XML tree which initializes the device definition. If an URL is already known it should be provided to avoid additional latency for a broader first device discovery. This method also do some magic to find the best result for the given host as UPnP devices behave sometimes strangely. This call is costly the result if any should be cached. :param str host: the host to find :param service: the service type or list of service types if known to search for :type service: str or list[str] :param str deviceDefinitionURL: if provided it is used to skip a first device discovery :param float timeout: the time to wait for each retry :param int retries: the amount of times how often the device is tried to discover :param str ipAddress: the multicast ip address to discover devices :param int port: the port to discover devices :param str proxies: proxy definition as defined here: `Proxy definition <http://docs.python-requests.org/en/latest/user/advanced/#proxies>`_ :return: If the device have been found the response is returned otherwise None :rtype: DiscoveryResponse :raises ValueError: if problems with reading or parsing the xml device definition occurs :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out Example: :: proxies = {"http": "http://localhost:8888"} result = discoverParticularHost("192.168.0.1", proxies=proxies) if result is not None: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discover` """ # get all IP addresses for the given host ipResults = socket.getaddrinfo(host, 80) if len(ipResults) == 0: return None ipAddresses = [] # remember all ip addresses for the given host for ipAdrTupple in ipResults: ipAddresses.append(ipAdrTupple[4][0]) bestPick = None services = [] if deviceDefinitionURL is None: # no xml definition given, so lets search for one # search for all devices first discoverResults = Discover.discover(service=service, timeout=timeout, retries=retries, ipAddress=ipAddress, port=port) for result in discoverResults: if result.locationHost in ipAddresses: # now we found a result for that host, pick the best service type if multiple results for the host # are found if Discover.rateServiceTypeInResult(result) > Discover.rateServiceTypeInResult(bestPick): bestPick = result # remember all services if result.service not in services: services.append(result.service) if bestPick is None: return None else: # create response with given parameter bestPick = DiscoveryResponse.create(deviceDefinitionURL, service=service) # some routers do not advice their TR64 capabilities but their UPnp which is only a subset of actions. # Try to find out if the given XML definition path will give us a better service type. # load xml definition # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-3"} request = requests.get(bestPick.location, proxies=proxies, headers=headers, timeout=float(timeout)) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not get CPE definitions for "' + bestPick.location + '": ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse xml try: root = ET.fromstring(request.text.encode('utf-8')) except Exception as e: raise ValueError("Could not parse CPE definitions for '" + bestPick.location + "': " + str(e)) # find the first deviceType in the document tree for element in root.getiterator(): # check if element tag name ends on deviceType, skip XML namespace if element.tag.lower().endswith("devicetype"): serviceFound = element.text # remember the service found if it does not exist yet if serviceFound not in services: services.append(serviceFound) # create a specific service just to check if we found it already serviceFound = serviceFound.replace("schemas-upnp-org", "dslforum-org") # test if we already have the best service type then we dont need to do an other discovery request if serviceFound == bestPick.service: return bestPick for service in services: # we search for the specific device tyoe version as of specified in TR64 protocol. # some devices returns different results depending on the given service type, so lets be # very specific specificService = service.replace("schemas-upnp-org", "dslforum-org") if specificService not in services: services.append(specificService) # we do an other discovery request with more specific service/device type discoverResultsSpecific = Discover.discover(service=services, timeout=float(timeout), retries=retries, ipAddress=ipAddress, port=port) # iterate through all results to find the most specific one evenBetterPick = None for specificResult in discoverResultsSpecific: if specificResult.locationHost in ipAddresses: if Discover.rateServiceTypeInResult(specificResult) > \ Discover.rateServiceTypeInResult(evenBetterPick): evenBetterPick = specificResult if evenBetterPick is not None: # best we could find return evenBetterPick # we found first deviceType tag in the XML structure, no need to go further break if deviceDefinitionURL is not None: # we created our own response, so no result found return None # we found only an unspecific result, return it anyway return bestPick
[ "def", "discoverParticularHost", "(", "host", ",", "service", "=", "\"ssdp:all\"", ",", "deviceDefinitionURL", "=", "None", ",", "timeout", "=", "1", ",", "retries", "=", "2", ",", "ipAddress", "=", "\"239.255.255.250\"", ",", "port", "=", "1900", ",", "proxies", "=", "None", ")", ":", "# get all IP addresses for the given host", "ipResults", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "80", ")", "if", "len", "(", "ipResults", ")", "==", "0", ":", "return", "None", "ipAddresses", "=", "[", "]", "# remember all ip addresses for the given host", "for", "ipAdrTupple", "in", "ipResults", ":", "ipAddresses", ".", "append", "(", "ipAdrTupple", "[", "4", "]", "[", "0", "]", ")", "bestPick", "=", "None", "services", "=", "[", "]", "if", "deviceDefinitionURL", "is", "None", ":", "# no xml definition given, so lets search for one", "# search for all devices first", "discoverResults", "=", "Discover", ".", "discover", "(", "service", "=", "service", ",", "timeout", "=", "timeout", ",", "retries", "=", "retries", ",", "ipAddress", "=", "ipAddress", ",", "port", "=", "port", ")", "for", "result", "in", "discoverResults", ":", "if", "result", ".", "locationHost", "in", "ipAddresses", ":", "# now we found a result for that host, pick the best service type if multiple results for the host", "# are found", "if", "Discover", ".", "rateServiceTypeInResult", "(", "result", ")", ">", "Discover", ".", "rateServiceTypeInResult", "(", "bestPick", ")", ":", "bestPick", "=", "result", "# remember all services", "if", "result", ".", "service", "not", "in", "services", ":", "services", ".", "append", "(", "result", ".", "service", ")", "if", "bestPick", "is", "None", ":", "return", "None", "else", ":", "# create response with given parameter", "bestPick", "=", "DiscoveryResponse", ".", "create", "(", "deviceDefinitionURL", ",", "service", "=", "service", ")", "# some routers do not advice their TR64 capabilities but their UPnp which is only a subset of actions.", "# Try to find out if the given XML definition path will give us a better service type.", "# load xml definition", "# some devices response differently without a User-Agent", "headers", "=", "{", "\"User-Agent\"", ":", "\"Mozilla/5.0; SimpleTR64-3\"", "}", "request", "=", "requests", ".", "get", "(", "bestPick", ".", "location", ",", "proxies", "=", "proxies", ",", "headers", "=", "headers", ",", "timeout", "=", "float", "(", "timeout", ")", ")", "if", "request", ".", "status_code", "!=", "200", ":", "errorStr", "=", "DeviceTR64", ".", "_extractErrorString", "(", "request", ")", "raise", "ValueError", "(", "'Could not get CPE definitions for \"'", "+", "bestPick", ".", "location", "+", "'\": '", "+", "str", "(", "request", ".", "status_code", ")", "+", "' - '", "+", "request", ".", "reason", "+", "\" -- \"", "+", "errorStr", ")", "# parse xml", "try", ":", "root", "=", "ET", ".", "fromstring", "(", "request", ".", "text", ".", "encode", "(", "'utf-8'", ")", ")", "except", "Exception", "as", "e", ":", "raise", "ValueError", "(", "\"Could not parse CPE definitions for '\"", "+", "bestPick", ".", "location", "+", "\"': \"", "+", "str", "(", "e", ")", ")", "# find the first deviceType in the document tree", "for", "element", "in", "root", ".", "getiterator", "(", ")", ":", "# check if element tag name ends on deviceType, skip XML namespace", "if", "element", ".", "tag", ".", "lower", "(", ")", ".", "endswith", "(", "\"devicetype\"", ")", ":", "serviceFound", "=", "element", ".", "text", "# remember the service found if it does not exist yet", "if", "serviceFound", "not", "in", "services", ":", "services", ".", "append", "(", "serviceFound", ")", "# create a specific service just to check if we found it already", "serviceFound", "=", "serviceFound", ".", "replace", "(", "\"schemas-upnp-org\"", ",", "\"dslforum-org\"", ")", "# test if we already have the best service type then we dont need to do an other discovery request", "if", "serviceFound", "==", "bestPick", ".", "service", ":", "return", "bestPick", "for", "service", "in", "services", ":", "# we search for the specific device tyoe version as of specified in TR64 protocol.", "# some devices returns different results depending on the given service type, so lets be", "# very specific", "specificService", "=", "service", ".", "replace", "(", "\"schemas-upnp-org\"", ",", "\"dslforum-org\"", ")", "if", "specificService", "not", "in", "services", ":", "services", ".", "append", "(", "specificService", ")", "# we do an other discovery request with more specific service/device type", "discoverResultsSpecific", "=", "Discover", ".", "discover", "(", "service", "=", "services", ",", "timeout", "=", "float", "(", "timeout", ")", ",", "retries", "=", "retries", ",", "ipAddress", "=", "ipAddress", ",", "port", "=", "port", ")", "# iterate through all results to find the most specific one", "evenBetterPick", "=", "None", "for", "specificResult", "in", "discoverResultsSpecific", ":", "if", "specificResult", ".", "locationHost", "in", "ipAddresses", ":", "if", "Discover", ".", "rateServiceTypeInResult", "(", "specificResult", ")", ">", "Discover", ".", "rateServiceTypeInResult", "(", "evenBetterPick", ")", ":", "evenBetterPick", "=", "specificResult", "if", "evenBetterPick", "is", "not", "None", ":", "# best we could find", "return", "evenBetterPick", "# we found first deviceType tag in the XML structure, no need to go further", "break", "if", "deviceDefinitionURL", "is", "not", "None", ":", "# we created our own response, so no result found", "return", "None", "# we found only an unspecific result, return it anyway", "return", "bestPick" ]
Discover a particular host and find the best response. This tries to find the most specific discovery result for the given host. Only the discovery result contains the URL to the XML tree which initializes the device definition. If an URL is already known it should be provided to avoid additional latency for a broader first device discovery. This method also do some magic to find the best result for the given host as UPnP devices behave sometimes strangely. This call is costly the result if any should be cached. :param str host: the host to find :param service: the service type or list of service types if known to search for :type service: str or list[str] :param str deviceDefinitionURL: if provided it is used to skip a first device discovery :param float timeout: the time to wait for each retry :param int retries: the amount of times how often the device is tried to discover :param str ipAddress: the multicast ip address to discover devices :param int port: the port to discover devices :param str proxies: proxy definition as defined here: `Proxy definition <http://docs.python-requests.org/en/latest/user/advanced/#proxies>`_ :return: If the device have been found the response is returned otherwise None :rtype: DiscoveryResponse :raises ValueError: if problems with reading or parsing the xml device definition occurs :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out Example: :: proxies = {"http": "http://localhost:8888"} result = discoverParticularHost("192.168.0.1", proxies=proxies) if result is not None: print("Host: " + result.locationHost + " Port: " + result.locationPort + " Device definitions: " + \\ result.location) .. seealso:: :class:`~simpletr64.DiscoveryResponse`, :meth:`~simpletr64.Discover.discover`
[ "Discover", "a", "particular", "host", "and", "find", "the", "best", "response", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/discover.py#L114-L265
bpannier/simpletr64
simpletr64/discover.py
Discover.rateServiceTypeInResult
def rateServiceTypeInResult(discoveryResponse): """Gives a quality rating for a given service type in a result, higher is better. Several UpnP devices reply to a discovery request with multiple responses with different service type announcements. To find the most specific one we need to be able rate the service types against each other. Usually this is an internal method and just exported for convenience reasons. :param DiscoveryResponse discoveryResponse: the response to rate :return: a rating of the quality of the given service type :rtype: int """ if discoveryResponse is None: return 0 serviceType = discoveryResponse.service if serviceType.startswith("urn:dslforum-org:device"): return 11 if serviceType.startswith("urn:dslforum-org:service"): return 10 if serviceType.startswith("urn:dslforum-org:"): return 9 if serviceType.startswith("urn:schemas-upnp-org:device"): return 8 if serviceType.startswith("urn:schemas-upnp-org:service"): return 7 if serviceType.startswith("urn:schemas-upnp-org:"): return 6 if serviceType.startswith("urn:schemas-"): # other schemas, schema-any-com for example return 5 if serviceType.startswith("urn:"): return 4 if serviceType.startswith("upnp:rootdevice"): return 3 if serviceType.startswith("uuid:"): # no service, just the uuid given return 2 return 1
python
def rateServiceTypeInResult(discoveryResponse): """Gives a quality rating for a given service type in a result, higher is better. Several UpnP devices reply to a discovery request with multiple responses with different service type announcements. To find the most specific one we need to be able rate the service types against each other. Usually this is an internal method and just exported for convenience reasons. :param DiscoveryResponse discoveryResponse: the response to rate :return: a rating of the quality of the given service type :rtype: int """ if discoveryResponse is None: return 0 serviceType = discoveryResponse.service if serviceType.startswith("urn:dslforum-org:device"): return 11 if serviceType.startswith("urn:dslforum-org:service"): return 10 if serviceType.startswith("urn:dslforum-org:"): return 9 if serviceType.startswith("urn:schemas-upnp-org:device"): return 8 if serviceType.startswith("urn:schemas-upnp-org:service"): return 7 if serviceType.startswith("urn:schemas-upnp-org:"): return 6 if serviceType.startswith("urn:schemas-"): # other schemas, schema-any-com for example return 5 if serviceType.startswith("urn:"): return 4 if serviceType.startswith("upnp:rootdevice"): return 3 if serviceType.startswith("uuid:"): # no service, just the uuid given return 2 return 1
[ "def", "rateServiceTypeInResult", "(", "discoveryResponse", ")", ":", "if", "discoveryResponse", "is", "None", ":", "return", "0", "serviceType", "=", "discoveryResponse", ".", "service", "if", "serviceType", ".", "startswith", "(", "\"urn:dslforum-org:device\"", ")", ":", "return", "11", "if", "serviceType", ".", "startswith", "(", "\"urn:dslforum-org:service\"", ")", ":", "return", "10", "if", "serviceType", ".", "startswith", "(", "\"urn:dslforum-org:\"", ")", ":", "return", "9", "if", "serviceType", ".", "startswith", "(", "\"urn:schemas-upnp-org:device\"", ")", ":", "return", "8", "if", "serviceType", ".", "startswith", "(", "\"urn:schemas-upnp-org:service\"", ")", ":", "return", "7", "if", "serviceType", ".", "startswith", "(", "\"urn:schemas-upnp-org:\"", ")", ":", "return", "6", "if", "serviceType", ".", "startswith", "(", "\"urn:schemas-\"", ")", ":", "# other schemas, schema-any-com for example", "return", "5", "if", "serviceType", ".", "startswith", "(", "\"urn:\"", ")", ":", "return", "4", "if", "serviceType", ".", "startswith", "(", "\"upnp:rootdevice\"", ")", ":", "return", "3", "if", "serviceType", ".", "startswith", "(", "\"uuid:\"", ")", ":", "# no service, just the uuid given", "return", "2", "return", "1" ]
Gives a quality rating for a given service type in a result, higher is better. Several UpnP devices reply to a discovery request with multiple responses with different service type announcements. To find the most specific one we need to be able rate the service types against each other. Usually this is an internal method and just exported for convenience reasons. :param DiscoveryResponse discoveryResponse: the response to rate :return: a rating of the quality of the given service type :rtype: int
[ "Gives", "a", "quality", "rating", "for", "a", "given", "service", "type", "in", "a", "result", "higher", "is", "better", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/discover.py#L268-L304
vsoch/helpme
helpme/main/base/http.py
download
def download(self, url, file_name, headers=None, show_progress=True): '''stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add force: If the final image exists, don't overwrite ''' fd, tmp_file = tempfile.mkstemp(prefix=("%s.tmp." % file_name)) os.close(fd) # Should we verify the request? verify = self._verify() # Check here if exists if requests.head(url, verify=verify).status_code in [200, 401]: response = self.stream(url, headers=headers, stream_to=tmp_file) if isinstance(response, HTTPError): bot.error("Error downloading %s, exiting." %url) sys.exit(1) shutil.move(tmp_file, file_name) else: bot.error("Invalid url or permissions %s" %url) return file_name
python
def download(self, url, file_name, headers=None, show_progress=True): '''stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add force: If the final image exists, don't overwrite ''' fd, tmp_file = tempfile.mkstemp(prefix=("%s.tmp." % file_name)) os.close(fd) # Should we verify the request? verify = self._verify() # Check here if exists if requests.head(url, verify=verify).status_code in [200, 401]: response = self.stream(url, headers=headers, stream_to=tmp_file) if isinstance(response, HTTPError): bot.error("Error downloading %s, exiting." %url) sys.exit(1) shutil.move(tmp_file, file_name) else: bot.error("Invalid url or permissions %s" %url) return file_name
[ "def", "download", "(", "self", ",", "url", ",", "file_name", ",", "headers", "=", "None", ",", "show_progress", "=", "True", ")", ":", "fd", ",", "tmp_file", "=", "tempfile", ".", "mkstemp", "(", "prefix", "=", "(", "\"%s.tmp.\"", "%", "file_name", ")", ")", "os", ".", "close", "(", "fd", ")", "# Should we verify the request?", "verify", "=", "self", ".", "_verify", "(", ")", "# Check here if exists", "if", "requests", ".", "head", "(", "url", ",", "verify", "=", "verify", ")", ".", "status_code", "in", "[", "200", ",", "401", "]", ":", "response", "=", "self", ".", "stream", "(", "url", ",", "headers", "=", "headers", ",", "stream_to", "=", "tmp_file", ")", "if", "isinstance", "(", "response", ",", "HTTPError", ")", ":", "bot", ".", "error", "(", "\"Error downloading %s, exiting.\"", "%", "url", ")", "sys", ".", "exit", "(", "1", ")", "shutil", ".", "move", "(", "tmp_file", ",", "file_name", ")", "else", ":", "bot", ".", "error", "(", "\"Invalid url or permissions %s\"", "%", "url", ")", "return", "file_name" ]
stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add force: If the final image exists, don't overwrite
[ "stream", "to", "a", "temporary", "file", "rename", "on", "successful", "completion" ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/http.py#L172-L204
vsoch/helpme
helpme/main/base/http.py
stream_response
def stream_response(self, response, stream_to=None): ''' stream response is one level higher up than stream, starting with a response object and then performing the stream without making the requests.get. The expectation is that the request was successful (status code 20*). Parameters ========== response: a response that is ready to be iterated over to download in streamed chunks stream_to: the file to stream to ''' if response.status_code == 200: # Keep user updated with Progress Bar content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress, content_size, length=35) chunk_size = 1 << 20 with open(stream_to, 'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress+=chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" %(response.status_code)) sys.exit(1)
python
def stream_response(self, response, stream_to=None): ''' stream response is one level higher up than stream, starting with a response object and then performing the stream without making the requests.get. The expectation is that the request was successful (status code 20*). Parameters ========== response: a response that is ready to be iterated over to download in streamed chunks stream_to: the file to stream to ''' if response.status_code == 200: # Keep user updated with Progress Bar content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress, content_size, length=35) chunk_size = 1 << 20 with open(stream_to, 'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress+=chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" %(response.status_code)) sys.exit(1)
[ "def", "stream_response", "(", "self", ",", "response", ",", "stream_to", "=", "None", ")", ":", "if", "response", ".", "status_code", "==", "200", ":", "# Keep user updated with Progress Bar", "content_size", "=", "None", "if", "'Content-Length'", "in", "response", ".", "headers", ":", "progress", "=", "0", "content_size", "=", "int", "(", "response", ".", "headers", "[", "'Content-Length'", "]", ")", "bot", ".", "show_progress", "(", "progress", ",", "content_size", ",", "length", "=", "35", ")", "chunk_size", "=", "1", "<<", "20", "with", "open", "(", "stream_to", ",", "'wb'", ")", "as", "filey", ":", "for", "chunk", "in", "response", ".", "iter_content", "(", "chunk_size", "=", "chunk_size", ")", ":", "filey", ".", "write", "(", "chunk", ")", "if", "content_size", "is", "not", "None", ":", "progress", "+=", "chunk_size", "bot", ".", "show_progress", "(", "iteration", "=", "progress", ",", "total", "=", "content_size", ",", "length", "=", "35", ",", "carriage_return", "=", "False", ")", "# Newline to finish download", "sys", ".", "stdout", ".", "write", "(", "'\\n'", ")", "return", "stream_to", "bot", ".", "error", "(", "\"Problem with stream, response %s\"", "%", "(", "response", ".", "status_code", ")", ")", "sys", ".", "exit", "(", "1", ")" ]
stream response is one level higher up than stream, starting with a response object and then performing the stream without making the requests.get. The expectation is that the request was successful (status code 20*). Parameters ========== response: a response that is ready to be iterated over to download in streamed chunks stream_to: the file to stream to
[ "stream", "response", "is", "one", "level", "higher", "up", "than", "stream", "starting", "with", "a", "response", "object", "and", "then", "performing", "the", "stream", "without", "making", "the", "requests", ".", "get", ".", "The", "expectation", "is", "that", "the", "request", "was", "successful", "(", "status", "code", "20", "*", ")", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/http.py#L255-L297
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.getSCDPURL
def getSCDPURL(self, serviceType, default=None): """Returns the SCDP (Service Control Protocol Document) URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated URL to the SCDP. If the device definitions have not been loaded a default value can be given which gets returned instead. The SCDP specifies all the interaction functionality a device provides. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["scpdURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
python
def getSCDPURL(self, serviceType, default=None): """Returns the SCDP (Service Control Protocol Document) URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated URL to the SCDP. If the device definitions have not been loaded a default value can be given which gets returned instead. The SCDP specifies all the interaction functionality a device provides. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["scpdURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
[ "def", "getSCDPURL", "(", "self", ",", "serviceType", ",", "default", "=", "None", ")", ":", "if", "serviceType", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "return", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"scpdURL\"", "]", "# check if definitions have been loaded, then dont return the default", "if", "self", ".", "__deviceXMLInitialized", ":", "raise", "ValueError", "(", "\"Device do not support given serviceType: \"", "+", "serviceType", ")", "return", "default" ]
Returns the SCDP (Service Control Protocol Document) URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated URL to the SCDP. If the device definitions have not been loaded a default value can be given which gets returned instead. The SCDP specifies all the interaction functionality a device provides. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`
[ "Returns", "the", "SCDP", "(", "Service", "Control", "Protocol", "Document", ")", "URL", "for", "a", "given", "service", "type", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L270-L297
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.getControlURL
def getControlURL(self, serviceType, default=None): """Returns the control URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated control URL. If the device definitions have not been loaded a default value can be given which gets returned instead. The control URL is used to execute actions for a dedicated service type/namespace. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["controlURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
python
def getControlURL(self, serviceType, default=None): """Returns the control URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated control URL. If the device definitions have not been loaded a default value can be given which gets returned instead. The control URL is used to execute actions for a dedicated service type/namespace. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["controlURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
[ "def", "getControlURL", "(", "self", ",", "serviceType", ",", "default", "=", "None", ")", ":", "if", "serviceType", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "return", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"controlURL\"", "]", "# check if definitions have been loaded, then dont return the default", "if", "self", ".", "__deviceXMLInitialized", ":", "raise", "ValueError", "(", "\"Device do not support given serviceType: \"", "+", "serviceType", ")", "return", "default" ]
Returns the control URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated control URL. If the device definitions have not been loaded a default value can be given which gets returned instead. The control URL is used to execute actions for a dedicated service type/namespace. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`
[ "Returns", "the", "control", "URL", "for", "a", "given", "service", "type", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L299-L326
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.getEventSubURL
def getEventSubURL(self, serviceType, default=None): """Returns the event URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated event URL. If the device definitions have not been loaded a default value can be given which gets returned instead. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["eventSubURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
python
def getEventSubURL(self, serviceType, default=None): """Returns the event URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated event URL. If the device definitions have not been loaded a default value can be given which gets returned instead. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` """ if serviceType in self.__deviceServiceDefinitions.keys(): return self.__deviceServiceDefinitions[serviceType]["eventSubURL"] # check if definitions have been loaded, then dont return the default if self.__deviceXMLInitialized: raise ValueError("Device do not support given serviceType: " + serviceType) return default
[ "def", "getEventSubURL", "(", "self", ",", "serviceType", ",", "default", "=", "None", ")", ":", "if", "serviceType", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "return", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"eventSubURL\"", "]", "# check if definitions have been loaded, then dont return the default", "if", "self", ".", "__deviceXMLInitialized", ":", "raise", "ValueError", "(", "\"Device do not support given serviceType: \"", "+", "serviceType", ")", "return", "default" ]
Returns the event URL for a given service type. When the device definitions have been loaded with :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions` this method returns for a given service type/namespace the associated event URL. If the device definitions have not been loaded a default value can be given which gets returned instead. :param serviceType: the service type to look up for :param default: the default return value in case the service type is not found and device definitions are not loaded :type default: str or None :return: the URL/URI :rtype: str or None :raises ValueError: if the device did load device definitions and the service type is not known. .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`
[ "Returns", "the", "event", "URL", "for", "a", "given", "service", "type", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L328-L354
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.execute
def execute(self, uri, namespace, action, timeout=2, **kwargs): """Executes a given action with optional arguments. The execution of an action of an UPnP/TR64 device needs more than just the name of an action. It needs the control URI which is called to place the action and also the namespace aka service type is needed. The namespace defines the scope or service type of the given action, the same action name can appear in different namespaces. The way how to obtain the needed information's is either through the documentation of the vendor of the device. Or through a discovery requests which return's the URL to the root device description XML. :param str uri: the control URI, for example ``/upnp/control/hosts`` :param str namespace: the namespace for the given action, for example ``urn:dslforum-org:service:Hosts:1`` :param str action: the name of the action to call, for example ``GetGenericHostEntry`` :param float timeout: the timeout to wait for the action to be executed :param kwargs: optional arguments for the given action, depends if the action needs parameter. The arguments are given as dict where the key is the parameter name and the value the value of the parameter. :type kwargs: dict[str, str] :return: returns the results of the action, if any. The results are structured as dict where the key is the name of the result argument and the value is the value of the result. :rtype: dict[str,str] :raises ValueError: if parameters are not set correctly :raises requests.exceptions.ConnectionError: when the action can not be placed on the device :raises requests.exceptions.ConnectTimeout: when download time out Example: :: device = DeviceTR64(...) device.execute("/upnp/control/hosts", "urn:dslforum-org:service:Hosts:1", "GetGenericHostEntry", {"NewIndex": 1}) {'NewActive': '0', 'NewIPAddress': '192.168.0.23', 'NewMACAddress': '9C:20:7B:E7:FF:5F', 'NewInterfaceType': 'Ethernet', 'NewHostName': 'Apple-TV', 'NewAddressSource': 'DHCP', 'NewLeaseTimeRemaining': '0'} .. seealso:: `Additional short explanation of the UPnP protocol <http://www.upnp-hacks.org/upnp.html>`_ :class:`~simpletr64.Discover`, :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.loadSCPD` """ if not uri: raise ValueError("No action URI has been defined.") if not namespace: raise ValueError("No namespace has been defined.") if not action: raise ValueError("No action has been defined.") # soap headers header = {'Content-Type': 'text/xml; charset="UTF-8"', 'Soapaction': '"' + namespace + "#" + action + '"'} # build SOAP body body = '''<?xml version="1.0" encoding="UTF-8"?> <s:Envelope s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <s:Header/> <s:Body>\n''' body += " <u:" + action + ' xmlns="' + namespace + '">\n' arguments = {} for key in kwargs.keys(): body += " <" + key + ">" + str(kwargs[key]) + "</" + key + ">\n" arguments[key] = str(kwargs[key]) body += " </u:" + action + ">\n" body += '''</s:Body> </s:Envelope>''' # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # build the URL location = self.__protocol + "://" + self.__hostname + ":" + str(self.port) + uri # Post http request request = requests.post(location, data=body, headers=header, auth=auth, proxies=proxies, timeout=float(timeout), verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not execute "' + action + str(arguments) + '": ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse XML return try: root = ET.fromstring(request.text.encode('utf-8')) except Exception as e: raise ValueError("Can not parse results for the action: " + str(e)) # iterate in the XML structure to get the action result actionNode = root[0][0] # we need to remove XML namespace for the action node namespaceLength = len(namespace) + 2 # add braces tag = actionNode.tag[namespaceLength:] if tag != (action + "Response"): raise ValueError('Soap result structure is wrong, expected action "' + action + 'Response" got "' + tag + '".') # pack all the received results results = {} for resultNode in actionNode: results[resultNode.tag] = resultNode.text return results
python
def execute(self, uri, namespace, action, timeout=2, **kwargs): """Executes a given action with optional arguments. The execution of an action of an UPnP/TR64 device needs more than just the name of an action. It needs the control URI which is called to place the action and also the namespace aka service type is needed. The namespace defines the scope or service type of the given action, the same action name can appear in different namespaces. The way how to obtain the needed information's is either through the documentation of the vendor of the device. Or through a discovery requests which return's the URL to the root device description XML. :param str uri: the control URI, for example ``/upnp/control/hosts`` :param str namespace: the namespace for the given action, for example ``urn:dslforum-org:service:Hosts:1`` :param str action: the name of the action to call, for example ``GetGenericHostEntry`` :param float timeout: the timeout to wait for the action to be executed :param kwargs: optional arguments for the given action, depends if the action needs parameter. The arguments are given as dict where the key is the parameter name and the value the value of the parameter. :type kwargs: dict[str, str] :return: returns the results of the action, if any. The results are structured as dict where the key is the name of the result argument and the value is the value of the result. :rtype: dict[str,str] :raises ValueError: if parameters are not set correctly :raises requests.exceptions.ConnectionError: when the action can not be placed on the device :raises requests.exceptions.ConnectTimeout: when download time out Example: :: device = DeviceTR64(...) device.execute("/upnp/control/hosts", "urn:dslforum-org:service:Hosts:1", "GetGenericHostEntry", {"NewIndex": 1}) {'NewActive': '0', 'NewIPAddress': '192.168.0.23', 'NewMACAddress': '9C:20:7B:E7:FF:5F', 'NewInterfaceType': 'Ethernet', 'NewHostName': 'Apple-TV', 'NewAddressSource': 'DHCP', 'NewLeaseTimeRemaining': '0'} .. seealso:: `Additional short explanation of the UPnP protocol <http://www.upnp-hacks.org/upnp.html>`_ :class:`~simpletr64.Discover`, :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.loadSCPD` """ if not uri: raise ValueError("No action URI has been defined.") if not namespace: raise ValueError("No namespace has been defined.") if not action: raise ValueError("No action has been defined.") # soap headers header = {'Content-Type': 'text/xml; charset="UTF-8"', 'Soapaction': '"' + namespace + "#" + action + '"'} # build SOAP body body = '''<?xml version="1.0" encoding="UTF-8"?> <s:Envelope s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <s:Header/> <s:Body>\n''' body += " <u:" + action + ' xmlns="' + namespace + '">\n' arguments = {} for key in kwargs.keys(): body += " <" + key + ">" + str(kwargs[key]) + "</" + key + ">\n" arguments[key] = str(kwargs[key]) body += " </u:" + action + ">\n" body += '''</s:Body> </s:Envelope>''' # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # build the URL location = self.__protocol + "://" + self.__hostname + ":" + str(self.port) + uri # Post http request request = requests.post(location, data=body, headers=header, auth=auth, proxies=proxies, timeout=float(timeout), verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not execute "' + action + str(arguments) + '": ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse XML return try: root = ET.fromstring(request.text.encode('utf-8')) except Exception as e: raise ValueError("Can not parse results for the action: " + str(e)) # iterate in the XML structure to get the action result actionNode = root[0][0] # we need to remove XML namespace for the action node namespaceLength = len(namespace) + 2 # add braces tag = actionNode.tag[namespaceLength:] if tag != (action + "Response"): raise ValueError('Soap result structure is wrong, expected action "' + action + 'Response" got "' + tag + '".') # pack all the received results results = {} for resultNode in actionNode: results[resultNode.tag] = resultNode.text return results
[ "def", "execute", "(", "self", ",", "uri", ",", "namespace", ",", "action", ",", "timeout", "=", "2", ",", "*", "*", "kwargs", ")", ":", "if", "not", "uri", ":", "raise", "ValueError", "(", "\"No action URI has been defined.\"", ")", "if", "not", "namespace", ":", "raise", "ValueError", "(", "\"No namespace has been defined.\"", ")", "if", "not", "action", ":", "raise", "ValueError", "(", "\"No action has been defined.\"", ")", "# soap headers", "header", "=", "{", "'Content-Type'", ":", "'text/xml; charset=\"UTF-8\"'", ",", "'Soapaction'", ":", "'\"'", "+", "namespace", "+", "\"#\"", "+", "action", "+", "'\"'", "}", "# build SOAP body", "body", "=", "'''<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<s:Envelope\n s:encodingStyle=\"http://schemas.xmlsoap.org/soap/encoding/\"\n xmlns:s=\"http://schemas.xmlsoap.org/soap/envelope/\"\n xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\n xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n <s:Header/>\n <s:Body>\\n'''", "body", "+=", "\" <u:\"", "+", "action", "+", "' xmlns=\"'", "+", "namespace", "+", "'\">\\n'", "arguments", "=", "{", "}", "for", "key", "in", "kwargs", ".", "keys", "(", ")", ":", "body", "+=", "\" <\"", "+", "key", "+", "\">\"", "+", "str", "(", "kwargs", "[", "key", "]", ")", "+", "\"</\"", "+", "key", "+", "\">\\n\"", "arguments", "[", "key", "]", "=", "str", "(", "kwargs", "[", "key", "]", ")", "body", "+=", "\" </u:\"", "+", "action", "+", "\">\\n\"", "body", "+=", "'''</s:Body>\n</s:Envelope>'''", "# setup proxies", "proxies", "=", "{", "}", "if", "self", ".", "__httpsProxy", ":", "proxies", "=", "{", "\"https\"", ":", "self", ".", "__httpsProxy", "}", "if", "self", ".", "__httpProxy", ":", "proxies", "=", "{", "\"http\"", ":", "self", ".", "__httpProxy", "}", "# setup authentication", "auth", "=", "None", "if", "self", ".", "__password", ":", "auth", "=", "HTTPDigestAuth", "(", "self", ".", "__username", ",", "self", ".", "__password", ")", "# build the URL", "location", "=", "self", ".", "__protocol", "+", "\"://\"", "+", "self", ".", "__hostname", "+", "\":\"", "+", "str", "(", "self", ".", "port", ")", "+", "uri", "# Post http request", "request", "=", "requests", ".", "post", "(", "location", ",", "data", "=", "body", ",", "headers", "=", "header", ",", "auth", "=", "auth", ",", "proxies", "=", "proxies", ",", "timeout", "=", "float", "(", "timeout", ")", ",", "verify", "=", "self", ".", "__verify", ")", "if", "request", ".", "status_code", "!=", "200", ":", "errorStr", "=", "DeviceTR64", ".", "_extractErrorString", "(", "request", ")", "raise", "ValueError", "(", "'Could not execute \"'", "+", "action", "+", "str", "(", "arguments", ")", "+", "'\": '", "+", "str", "(", "request", ".", "status_code", ")", "+", "' - '", "+", "request", ".", "reason", "+", "\" -- \"", "+", "errorStr", ")", "# parse XML return", "try", ":", "root", "=", "ET", ".", "fromstring", "(", "request", ".", "text", ".", "encode", "(", "'utf-8'", ")", ")", "except", "Exception", "as", "e", ":", "raise", "ValueError", "(", "\"Can not parse results for the action: \"", "+", "str", "(", "e", ")", ")", "# iterate in the XML structure to get the action result", "actionNode", "=", "root", "[", "0", "]", "[", "0", "]", "# we need to remove XML namespace for the action node", "namespaceLength", "=", "len", "(", "namespace", ")", "+", "2", "# add braces", "tag", "=", "actionNode", ".", "tag", "[", "namespaceLength", ":", "]", "if", "tag", "!=", "(", "action", "+", "\"Response\"", ")", ":", "raise", "ValueError", "(", "'Soap result structure is wrong, expected action \"'", "+", "action", "+", "'Response\" got \"'", "+", "tag", "+", "'\".'", ")", "# pack all the received results", "results", "=", "{", "}", "for", "resultNode", "in", "actionNode", ":", "results", "[", "resultNode", ".", "tag", "]", "=", "resultNode", ".", "text", "return", "results" ]
Executes a given action with optional arguments. The execution of an action of an UPnP/TR64 device needs more than just the name of an action. It needs the control URI which is called to place the action and also the namespace aka service type is needed. The namespace defines the scope or service type of the given action, the same action name can appear in different namespaces. The way how to obtain the needed information's is either through the documentation of the vendor of the device. Or through a discovery requests which return's the URL to the root device description XML. :param str uri: the control URI, for example ``/upnp/control/hosts`` :param str namespace: the namespace for the given action, for example ``urn:dslforum-org:service:Hosts:1`` :param str action: the name of the action to call, for example ``GetGenericHostEntry`` :param float timeout: the timeout to wait for the action to be executed :param kwargs: optional arguments for the given action, depends if the action needs parameter. The arguments are given as dict where the key is the parameter name and the value the value of the parameter. :type kwargs: dict[str, str] :return: returns the results of the action, if any. The results are structured as dict where the key is the name of the result argument and the value is the value of the result. :rtype: dict[str,str] :raises ValueError: if parameters are not set correctly :raises requests.exceptions.ConnectionError: when the action can not be placed on the device :raises requests.exceptions.ConnectTimeout: when download time out Example: :: device = DeviceTR64(...) device.execute("/upnp/control/hosts", "urn:dslforum-org:service:Hosts:1", "GetGenericHostEntry", {"NewIndex": 1}) {'NewActive': '0', 'NewIPAddress': '192.168.0.23', 'NewMACAddress': '9C:20:7B:E7:FF:5F', 'NewInterfaceType': 'Ethernet', 'NewHostName': 'Apple-TV', 'NewAddressSource': 'DHCP', 'NewLeaseTimeRemaining': '0'} .. seealso:: `Additional short explanation of the UPnP protocol <http://www.upnp-hacks.org/upnp.html>`_ :class:`~simpletr64.Discover`, :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.loadSCPD`
[ "Executes", "a", "given", "action", "with", "optional", "arguments", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L356-L483
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._extractErrorString
def _extractErrorString(request): """Extract error string from a failed UPnP call. :param request: the failed request result :type request: requests.Response :return: an extracted error text or empty str :rtype: str """ errorStr = "" tag = None # noinspection PyBroadException try: # parse XML return root = ET.fromstring(request.text.encode('utf-8')) tag = root[0][0] except: # return an empty string as we can not parse the structure return errorStr for element in tag.getiterator(): tagName = element.tag.lower() if tagName.endswith("string"): errorStr += element.text + " " elif tagName.endswith("description"): errorStr += element.text + " " return errorStr
python
def _extractErrorString(request): """Extract error string from a failed UPnP call. :param request: the failed request result :type request: requests.Response :return: an extracted error text or empty str :rtype: str """ errorStr = "" tag = None # noinspection PyBroadException try: # parse XML return root = ET.fromstring(request.text.encode('utf-8')) tag = root[0][0] except: # return an empty string as we can not parse the structure return errorStr for element in tag.getiterator(): tagName = element.tag.lower() if tagName.endswith("string"): errorStr += element.text + " " elif tagName.endswith("description"): errorStr += element.text + " " return errorStr
[ "def", "_extractErrorString", "(", "request", ")", ":", "errorStr", "=", "\"\"", "tag", "=", "None", "# noinspection PyBroadException", "try", ":", "# parse XML return", "root", "=", "ET", ".", "fromstring", "(", "request", ".", "text", ".", "encode", "(", "'utf-8'", ")", ")", "tag", "=", "root", "[", "0", "]", "[", "0", "]", "except", ":", "# return an empty string as we can not parse the structure", "return", "errorStr", "for", "element", "in", "tag", ".", "getiterator", "(", ")", ":", "tagName", "=", "element", ".", "tag", ".", "lower", "(", ")", "if", "tagName", ".", "endswith", "(", "\"string\"", ")", ":", "errorStr", "+=", "element", ".", "text", "+", "\" \"", "elif", "tagName", ".", "endswith", "(", "\"description\"", ")", ":", "errorStr", "+=", "element", ".", "text", "+", "\" \"", "return", "errorStr" ]
Extract error string from a failed UPnP call. :param request: the failed request result :type request: requests.Response :return: an extracted error text or empty str :rtype: str
[ "Extract", "error", "string", "from", "a", "failed", "UPnP", "call", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L486-L515
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.setupTR64Device
def setupTR64Device(self, deviceType): """Setup actions for known devices. For convenience reasons for some devices there is no need to discover/load device definitions before the pre defined :doc:`tr64` can be used. The following devices are currently supported (please help to extend): * fritz.box - Any AVM Fritz Box with the latest software version installed :param str deviceType: a known device type :raise ValueError: if the device type is not known. .. seealso:: :doc:`tr64` """ if deviceType.lower() != "fritz.box": raise ValueError("Unknown device type given.") self.__deviceServiceDefinitions = {} self.__deviceXMLInitialized = False # Fritz.box setup self.deviceServiceDefinitions["urn:dslforum-org:service:DeviceConfig:1"] = { "controlURL": "/upnp/control/deviceconfig"} self.deviceServiceDefinitions["urn:dslforum-org:service:ManagementServer:1"] = { "controlURL": "/upnp/control/mgmsrv"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANConfigSecurity:1"] = { "controlURL": "/upnp/control/lanconfigsecurity"} self.deviceServiceDefinitions["urn:dslforum-org:service:Time:1"] = {"controlURL": "/upnp/control/time"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANHostConfigManagement:1"] = { "controlURL": "/upnp/control/lanhostconfigmgm"} self.deviceServiceDefinitions["urn:dslforum-org:service:UserInterface:1"] = { "controlURL": "/upnp/control/userif"} self.deviceServiceDefinitions["urn:dslforum-org:service:DeviceInfo:1"] = { "controlURL": "/upnp/control/deviceinfo"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_TAM:1"] = {"controlURL": "/upnp/control/x_tam"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_MyFritz:1"] = { "controlURL": "/upnp/control/x_myfritz"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_RemoteAccess:1"] = { "controlURL": "/upnp/control/x_remote"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:1"] = { "controlURL": "/upnp/control/wlanconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:3"] = { "controlURL": "/upnp/control/wlanconfig3"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:2"] = { "controlURL": "/upnp/control/wlanconfig2"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_WebDAVClient:1"] = { "controlURL": "/upnp/control/x_webdav"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANDSLLinkConfig:1"] = { "controlURL": "/upnp/control/wandsllinkconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:Hosts:1"] = {"controlURL": "/upnp/control/hosts"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_VoIP:1"] = {"controlURL": "/upnp/control/x_voip"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANEthernetInterfaceConfig:1"] = { "controlURL": "/upnp/control/lanethernetifcfg"} self.deviceServiceDefinitions["urn:dslforum-org:service:Layer3Forwarding:1"] = { "controlURL": "/upnp/control/layer3forwarding"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANIPConnection:1"] = { "controlURL": "/upnp/control/wanipconnection1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_OnTel:1"] = { "controlURL": "/upnp/control/x_contact"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANCommonInterfaceConfig:1"] = { "controlURL": "/upnp/control/wancommonifconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_UPnP:1"] = { "controlURL": "/upnp/control/x_upnp"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANDSLInterfaceConfig:1"] = { "controlURL": "/upnp/control/wandslifconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANPPPConnection:1"] = { "controlURL": "/upnp/control/wanpppconn1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_Storage:1"] = { "controlURL": "/upnp/control/x_storage"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANEthernetLinkConfig:1"] = { "controlURL": "/upnp/control/wanethlinkconfig1"}
python
def setupTR64Device(self, deviceType): """Setup actions for known devices. For convenience reasons for some devices there is no need to discover/load device definitions before the pre defined :doc:`tr64` can be used. The following devices are currently supported (please help to extend): * fritz.box - Any AVM Fritz Box with the latest software version installed :param str deviceType: a known device type :raise ValueError: if the device type is not known. .. seealso:: :doc:`tr64` """ if deviceType.lower() != "fritz.box": raise ValueError("Unknown device type given.") self.__deviceServiceDefinitions = {} self.__deviceXMLInitialized = False # Fritz.box setup self.deviceServiceDefinitions["urn:dslforum-org:service:DeviceConfig:1"] = { "controlURL": "/upnp/control/deviceconfig"} self.deviceServiceDefinitions["urn:dslforum-org:service:ManagementServer:1"] = { "controlURL": "/upnp/control/mgmsrv"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANConfigSecurity:1"] = { "controlURL": "/upnp/control/lanconfigsecurity"} self.deviceServiceDefinitions["urn:dslforum-org:service:Time:1"] = {"controlURL": "/upnp/control/time"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANHostConfigManagement:1"] = { "controlURL": "/upnp/control/lanhostconfigmgm"} self.deviceServiceDefinitions["urn:dslforum-org:service:UserInterface:1"] = { "controlURL": "/upnp/control/userif"} self.deviceServiceDefinitions["urn:dslforum-org:service:DeviceInfo:1"] = { "controlURL": "/upnp/control/deviceinfo"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_TAM:1"] = {"controlURL": "/upnp/control/x_tam"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_MyFritz:1"] = { "controlURL": "/upnp/control/x_myfritz"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_RemoteAccess:1"] = { "controlURL": "/upnp/control/x_remote"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:1"] = { "controlURL": "/upnp/control/wlanconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:3"] = { "controlURL": "/upnp/control/wlanconfig3"} self.deviceServiceDefinitions["urn:dslforum-org:service:WLANConfiguration:2"] = { "controlURL": "/upnp/control/wlanconfig2"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_WebDAVClient:1"] = { "controlURL": "/upnp/control/x_webdav"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANDSLLinkConfig:1"] = { "controlURL": "/upnp/control/wandsllinkconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:Hosts:1"] = {"controlURL": "/upnp/control/hosts"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_VoIP:1"] = {"controlURL": "/upnp/control/x_voip"} self.deviceServiceDefinitions["urn:dslforum-org:service:LANEthernetInterfaceConfig:1"] = { "controlURL": "/upnp/control/lanethernetifcfg"} self.deviceServiceDefinitions["urn:dslforum-org:service:Layer3Forwarding:1"] = { "controlURL": "/upnp/control/layer3forwarding"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANIPConnection:1"] = { "controlURL": "/upnp/control/wanipconnection1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_OnTel:1"] = { "controlURL": "/upnp/control/x_contact"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANCommonInterfaceConfig:1"] = { "controlURL": "/upnp/control/wancommonifconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_UPnP:1"] = { "controlURL": "/upnp/control/x_upnp"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANDSLInterfaceConfig:1"] = { "controlURL": "/upnp/control/wandslifconfig1"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANPPPConnection:1"] = { "controlURL": "/upnp/control/wanpppconn1"} self.deviceServiceDefinitions["urn:dslforum-org:service:X_AVM-DE_Storage:1"] = { "controlURL": "/upnp/control/x_storage"} self.deviceServiceDefinitions["urn:dslforum-org:service:WANEthernetLinkConfig:1"] = { "controlURL": "/upnp/control/wanethlinkconfig1"}
[ "def", "setupTR64Device", "(", "self", ",", "deviceType", ")", ":", "if", "deviceType", ".", "lower", "(", ")", "!=", "\"fritz.box\"", ":", "raise", "ValueError", "(", "\"Unknown device type given.\"", ")", "self", ".", "__deviceServiceDefinitions", "=", "{", "}", "self", ".", "__deviceXMLInitialized", "=", "False", "# Fritz.box setup", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:DeviceConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/deviceconfig\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:ManagementServer:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/mgmsrv\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:LANConfigSecurity:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/lanconfigsecurity\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:Time:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/time\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:LANHostConfigManagement:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/lanhostconfigmgm\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:UserInterface:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/userif\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:DeviceInfo:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/deviceinfo\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_TAM:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_tam\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_MyFritz:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_myfritz\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_RemoteAccess:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_remote\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WLANConfiguration:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wlanconfig1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WLANConfiguration:3\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wlanconfig3\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WLANConfiguration:2\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wlanconfig2\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_WebDAVClient:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_webdav\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANDSLLinkConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wandsllinkconfig1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:Hosts:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/hosts\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_VoIP:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_voip\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:LANEthernetInterfaceConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/lanethernetifcfg\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:Layer3Forwarding:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/layer3forwarding\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANIPConnection:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wanipconnection1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_OnTel:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_contact\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANCommonInterfaceConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wancommonifconfig1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_UPnP:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_upnp\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANDSLInterfaceConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wandslifconfig1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANPPPConnection:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wanpppconn1\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:X_AVM-DE_Storage:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/x_storage\"", "}", "self", ".", "deviceServiceDefinitions", "[", "\"urn:dslforum-org:service:WANEthernetLinkConfig:1\"", "]", "=", "{", "\"controlURL\"", ":", "\"/upnp/control/wanethlinkconfig1\"", "}" ]
Setup actions for known devices. For convenience reasons for some devices there is no need to discover/load device definitions before the pre defined :doc:`tr64` can be used. The following devices are currently supported (please help to extend): * fritz.box - Any AVM Fritz Box with the latest software version installed :param str deviceType: a known device type :raise ValueError: if the device type is not known. .. seealso:: :doc:`tr64`
[ "Setup", "actions", "for", "known", "devices", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L517-L589
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.loadDeviceDefinitions
def loadDeviceDefinitions(self, urlOfXMLDefinition, timeout=3): """Loads the device definitions from a given URL which points to the root XML in the device. This loads the device definitions which is needed in case you like to: * get additional information's about the device like manufacture, device type, etc * get all support service types of this device * use the convenient actions classes part of this library in the actions module :param str urlOfXMLDefinition: the URL to the root XML which sets the device definitions. :param float timeout: the timeout for downloading :raises ValueError: if the XML could not be parsed correctly :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadSCPD`, :meth:`~simpletr64.DeviceTR64.deviceServiceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceInformations`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :meth:`~simpletr64.DeviceTR64.getSCDPURL`, :meth:`~simpletr64.DeviceTR64.getControlURL`, :meth:`~simpletr64.DeviceTR64.getEventSubURL` """ # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-1"} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # get the content request = requests.get(urlOfXMLDefinition, proxies=proxies, headers=headers, timeout=float(timeout), auth=auth, verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not get CPE definitions "' + urlOfXMLDefinition + '" : ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse XML return xml = request.text.encode('utf-8') return self._loadDeviceDefinitions(urlOfXMLDefinition, xml)
python
def loadDeviceDefinitions(self, urlOfXMLDefinition, timeout=3): """Loads the device definitions from a given URL which points to the root XML in the device. This loads the device definitions which is needed in case you like to: * get additional information's about the device like manufacture, device type, etc * get all support service types of this device * use the convenient actions classes part of this library in the actions module :param str urlOfXMLDefinition: the URL to the root XML which sets the device definitions. :param float timeout: the timeout for downloading :raises ValueError: if the XML could not be parsed correctly :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadSCPD`, :meth:`~simpletr64.DeviceTR64.deviceServiceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceInformations`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :meth:`~simpletr64.DeviceTR64.getSCDPURL`, :meth:`~simpletr64.DeviceTR64.getControlURL`, :meth:`~simpletr64.DeviceTR64.getEventSubURL` """ # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-1"} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # get the content request = requests.get(urlOfXMLDefinition, proxies=proxies, headers=headers, timeout=float(timeout), auth=auth, verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not get CPE definitions "' + urlOfXMLDefinition + '" : ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) # parse XML return xml = request.text.encode('utf-8') return self._loadDeviceDefinitions(urlOfXMLDefinition, xml)
[ "def", "loadDeviceDefinitions", "(", "self", ",", "urlOfXMLDefinition", ",", "timeout", "=", "3", ")", ":", "# setup proxies", "proxies", "=", "{", "}", "if", "self", ".", "__httpsProxy", ":", "proxies", "=", "{", "\"https\"", ":", "self", ".", "__httpsProxy", "}", "if", "self", ".", "__httpProxy", ":", "proxies", "=", "{", "\"http\"", ":", "self", ".", "__httpProxy", "}", "# some devices response differently without a User-Agent", "headers", "=", "{", "\"User-Agent\"", ":", "\"Mozilla/5.0; SimpleTR64-1\"", "}", "# setup authentication", "auth", "=", "None", "if", "self", ".", "__password", ":", "auth", "=", "HTTPDigestAuth", "(", "self", ".", "__username", ",", "self", ".", "__password", ")", "# get the content", "request", "=", "requests", ".", "get", "(", "urlOfXMLDefinition", ",", "proxies", "=", "proxies", ",", "headers", "=", "headers", ",", "timeout", "=", "float", "(", "timeout", ")", ",", "auth", "=", "auth", ",", "verify", "=", "self", ".", "__verify", ")", "if", "request", ".", "status_code", "!=", "200", ":", "errorStr", "=", "DeviceTR64", ".", "_extractErrorString", "(", "request", ")", "raise", "ValueError", "(", "'Could not get CPE definitions \"'", "+", "urlOfXMLDefinition", "+", "'\" : '", "+", "str", "(", "request", ".", "status_code", ")", "+", "' - '", "+", "request", ".", "reason", "+", "\" -- \"", "+", "errorStr", ")", "# parse XML return", "xml", "=", "request", ".", "text", ".", "encode", "(", "'utf-8'", ")", "return", "self", ".", "_loadDeviceDefinitions", "(", "urlOfXMLDefinition", ",", "xml", ")" ]
Loads the device definitions from a given URL which points to the root XML in the device. This loads the device definitions which is needed in case you like to: * get additional information's about the device like manufacture, device type, etc * get all support service types of this device * use the convenient actions classes part of this library in the actions module :param str urlOfXMLDefinition: the URL to the root XML which sets the device definitions. :param float timeout: the timeout for downloading :raises ValueError: if the XML could not be parsed correctly :raises requests.exceptions.ConnectionError: when the device definitions can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadSCPD`, :meth:`~simpletr64.DeviceTR64.deviceServiceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceInformations`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :meth:`~simpletr64.DeviceTR64.getSCDPURL`, :meth:`~simpletr64.DeviceTR64.getControlURL`, :meth:`~simpletr64.DeviceTR64.getEventSubURL`
[ "Loads", "the", "device", "definitions", "from", "a", "given", "URL", "which", "points", "to", "the", "root", "XML", "in", "the", "device", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L591-L642
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._loadDeviceDefinitions
def _loadDeviceDefinitions(self, urlOfXMLDefinition, xml): """Internal call to parse the XML of the device definition. :param urlOfXMLDefinition: the URL to the XML device defintions :param xml: the XML content to parse """ # extract the base path of the given XML to make sure any relative URL later will be created correctly url = urlparse(urlOfXMLDefinition) baseURIPath = url.path.rpartition('/')[0] + "/" try: root = ET.fromstring(xml) except Exception as e: raise ValueError("Can not parse CPE definitions '" + urlOfXMLDefinition + "': " + str(e)) self.__deviceServiceDefinitions = {} self.__deviceSCPD = {} self.__deviceInformations = {'rootURL': urlOfXMLDefinition} self.__deviceUnknownKeys = {} self.__deviceXMLInitialized = False # iterate through all the informations self._iterateToFindSCPDElements(root, baseURIPath) self.__deviceXMLInitialized = True
python
def _loadDeviceDefinitions(self, urlOfXMLDefinition, xml): """Internal call to parse the XML of the device definition. :param urlOfXMLDefinition: the URL to the XML device defintions :param xml: the XML content to parse """ # extract the base path of the given XML to make sure any relative URL later will be created correctly url = urlparse(urlOfXMLDefinition) baseURIPath = url.path.rpartition('/')[0] + "/" try: root = ET.fromstring(xml) except Exception as e: raise ValueError("Can not parse CPE definitions '" + urlOfXMLDefinition + "': " + str(e)) self.__deviceServiceDefinitions = {} self.__deviceSCPD = {} self.__deviceInformations = {'rootURL': urlOfXMLDefinition} self.__deviceUnknownKeys = {} self.__deviceXMLInitialized = False # iterate through all the informations self._iterateToFindSCPDElements(root, baseURIPath) self.__deviceXMLInitialized = True
[ "def", "_loadDeviceDefinitions", "(", "self", ",", "urlOfXMLDefinition", ",", "xml", ")", ":", "# extract the base path of the given XML to make sure any relative URL later will be created correctly", "url", "=", "urlparse", "(", "urlOfXMLDefinition", ")", "baseURIPath", "=", "url", ".", "path", ".", "rpartition", "(", "'/'", ")", "[", "0", "]", "+", "\"/\"", "try", ":", "root", "=", "ET", ".", "fromstring", "(", "xml", ")", "except", "Exception", "as", "e", ":", "raise", "ValueError", "(", "\"Can not parse CPE definitions '\"", "+", "urlOfXMLDefinition", "+", "\"': \"", "+", "str", "(", "e", ")", ")", "self", ".", "__deviceServiceDefinitions", "=", "{", "}", "self", ".", "__deviceSCPD", "=", "{", "}", "self", ".", "__deviceInformations", "=", "{", "'rootURL'", ":", "urlOfXMLDefinition", "}", "self", ".", "__deviceUnknownKeys", "=", "{", "}", "self", ".", "__deviceXMLInitialized", "=", "False", "# iterate through all the informations", "self", ".", "_iterateToFindSCPDElements", "(", "root", ",", "baseURIPath", ")", "self", ".", "__deviceXMLInitialized", "=", "True" ]
Internal call to parse the XML of the device definition. :param urlOfXMLDefinition: the URL to the XML device defintions :param xml: the XML content to parse
[ "Internal", "call", "to", "parse", "the", "XML", "of", "the", "device", "definition", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L644-L668
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._iterateToFindSCPDElements
def _iterateToFindSCPDElements(self, element, baseURIPath): """Internal method to iterate through device definition XML tree. :param element: the XML root node of the device definitions :type element: xml.etree.ElementTree.Element :param str baseURIPath: the base URL """ for child in element.getchildren(): tagName = child.tag.lower() if tagName.endswith('servicelist'): self._processServiceList(child,baseURIPath) elif tagName.endswith('devicetype'): if "deviceType" not in self.__deviceInformations.keys(): self.__deviceInformations["deviceType"] = child.text elif tagName.endswith('friendlyname'): if "friendlyName" not in self.__deviceInformations.keys(): self.__deviceInformations["friendlyName"] = child.text elif tagName.endswith('manufacturer'): if "manufacturer" not in self.__deviceInformations.keys(): self.__deviceInformations["manufacturer"] = child.text elif tagName.endswith('manufacturerurl'): if "manufacturerURL" not in self.__deviceInformations.keys(): self.__deviceInformations["manufacturerURL"] = child.text elif tagName.endswith('modeldescription'): if "modelDescription" not in self.__deviceInformations.keys(): self.__deviceInformations["modelDescription"] = child.text elif tagName.endswith('modelname'): if "modelName" not in self.__deviceInformations.keys(): self.__deviceInformations["modelName"] = child.text elif tagName.endswith('modelurl'): if "modelURL" not in self.__deviceInformations.keys(): self.__deviceInformations["modelURL"] = child.text elif tagName.endswith('modelnumber'): if "modelNumber" not in self.__deviceInformations.keys(): self.__deviceInformations["modelNumber"] = child.text elif tagName.endswith('serialnumber'): if "serialNumber" not in self.__deviceInformations.keys(): self.__deviceInformations["serialNumber"] = child.text elif tagName.endswith('presentationurl'): if "presentationURL" not in self.__deviceInformations.keys(): self.__deviceInformations["presentationURL"] = child.text elif tagName.endswith('udn'): if "UDN" not in self.__deviceInformations.keys(): self.__deviceInformations["UDN"] = child.text elif tagName.endswith('upc'): if "UPC" not in self.__deviceInformations.keys(): self.__deviceInformations["UPC"] = child.text elif tagName.endswith('iconlist') or tagName.endswith('specversion'): # skip these items pass else: if not tagName.endswith('device') and not tagName.endswith('devicelist'): self.__deviceUnknownKeys[child.tag] = child.text self._iterateToFindSCPDElements(child, baseURIPath)
python
def _iterateToFindSCPDElements(self, element, baseURIPath): """Internal method to iterate through device definition XML tree. :param element: the XML root node of the device definitions :type element: xml.etree.ElementTree.Element :param str baseURIPath: the base URL """ for child in element.getchildren(): tagName = child.tag.lower() if tagName.endswith('servicelist'): self._processServiceList(child,baseURIPath) elif tagName.endswith('devicetype'): if "deviceType" not in self.__deviceInformations.keys(): self.__deviceInformations["deviceType"] = child.text elif tagName.endswith('friendlyname'): if "friendlyName" not in self.__deviceInformations.keys(): self.__deviceInformations["friendlyName"] = child.text elif tagName.endswith('manufacturer'): if "manufacturer" not in self.__deviceInformations.keys(): self.__deviceInformations["manufacturer"] = child.text elif tagName.endswith('manufacturerurl'): if "manufacturerURL" not in self.__deviceInformations.keys(): self.__deviceInformations["manufacturerURL"] = child.text elif tagName.endswith('modeldescription'): if "modelDescription" not in self.__deviceInformations.keys(): self.__deviceInformations["modelDescription"] = child.text elif tagName.endswith('modelname'): if "modelName" not in self.__deviceInformations.keys(): self.__deviceInformations["modelName"] = child.text elif tagName.endswith('modelurl'): if "modelURL" not in self.__deviceInformations.keys(): self.__deviceInformations["modelURL"] = child.text elif tagName.endswith('modelnumber'): if "modelNumber" not in self.__deviceInformations.keys(): self.__deviceInformations["modelNumber"] = child.text elif tagName.endswith('serialnumber'): if "serialNumber" not in self.__deviceInformations.keys(): self.__deviceInformations["serialNumber"] = child.text elif tagName.endswith('presentationurl'): if "presentationURL" not in self.__deviceInformations.keys(): self.__deviceInformations["presentationURL"] = child.text elif tagName.endswith('udn'): if "UDN" not in self.__deviceInformations.keys(): self.__deviceInformations["UDN"] = child.text elif tagName.endswith('upc'): if "UPC" not in self.__deviceInformations.keys(): self.__deviceInformations["UPC"] = child.text elif tagName.endswith('iconlist') or tagName.endswith('specversion'): # skip these items pass else: if not tagName.endswith('device') and not tagName.endswith('devicelist'): self.__deviceUnknownKeys[child.tag] = child.text self._iterateToFindSCPDElements(child, baseURIPath)
[ "def", "_iterateToFindSCPDElements", "(", "self", ",", "element", ",", "baseURIPath", ")", ":", "for", "child", "in", "element", ".", "getchildren", "(", ")", ":", "tagName", "=", "child", ".", "tag", ".", "lower", "(", ")", "if", "tagName", ".", "endswith", "(", "'servicelist'", ")", ":", "self", ".", "_processServiceList", "(", "child", ",", "baseURIPath", ")", "elif", "tagName", ".", "endswith", "(", "'devicetype'", ")", ":", "if", "\"deviceType\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"deviceType\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'friendlyname'", ")", ":", "if", "\"friendlyName\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"friendlyName\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'manufacturer'", ")", ":", "if", "\"manufacturer\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"manufacturer\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'manufacturerurl'", ")", ":", "if", "\"manufacturerURL\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"manufacturerURL\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'modeldescription'", ")", ":", "if", "\"modelDescription\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"modelDescription\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'modelname'", ")", ":", "if", "\"modelName\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"modelName\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'modelurl'", ")", ":", "if", "\"modelURL\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"modelURL\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'modelnumber'", ")", ":", "if", "\"modelNumber\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"modelNumber\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'serialnumber'", ")", ":", "if", "\"serialNumber\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"serialNumber\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'presentationurl'", ")", ":", "if", "\"presentationURL\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"presentationURL\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'udn'", ")", ":", "if", "\"UDN\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"UDN\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'upc'", ")", ":", "if", "\"UPC\"", "not", "in", "self", ".", "__deviceInformations", ".", "keys", "(", ")", ":", "self", ".", "__deviceInformations", "[", "\"UPC\"", "]", "=", "child", ".", "text", "elif", "tagName", ".", "endswith", "(", "'iconlist'", ")", "or", "tagName", ".", "endswith", "(", "'specversion'", ")", ":", "# skip these items", "pass", "else", ":", "if", "not", "tagName", ".", "endswith", "(", "'device'", ")", "and", "not", "tagName", ".", "endswith", "(", "'devicelist'", ")", ":", "self", ".", "__deviceUnknownKeys", "[", "child", ".", "tag", "]", "=", "child", ".", "text", "self", ".", "_iterateToFindSCPDElements", "(", "child", ",", "baseURIPath", ")" ]
Internal method to iterate through device definition XML tree. :param element: the XML root node of the device definitions :type element: xml.etree.ElementTree.Element :param str baseURIPath: the base URL
[ "Internal", "method", "to", "iterate", "through", "device", "definition", "XML", "tree", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L670-L725
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._processServiceList
def _processServiceList(self, serviceList, baseURIPath): """Internal method to iterate in the device definition XML tree through the service list. :param serviceList: the XML root node of a service list :type serviceList: xml.etree.ElementTree.Element """ # iterate through all children in serviceList XML tag for service in serviceList.getchildren(): # has to be a service if not service.tag.lower().endswith("service"): raise ValueError("Non service tag in servicelist: " + service.tag) serviceType = None controlURL = None scpdURL = None eventURL = None # go through all the tags of a service and remember the values, ignore unknowns for child in service: tag = child.tag.lower() if tag.endswith("servicetype") or (serviceType is None and tag.endswith("spectype")): serviceType = child.text elif tag.endswith("controlurl"): controlURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not controlURL.startswith("/") and not controlURL.startswith("http"): controlURL = baseURIPath + controlURL elif tag.endswith("scpdurl"): scpdURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not scpdURL.startswith("/") and not scpdURL.startswith("http"): scpdURL = baseURIPath + scpdURL elif tag.endswith("eventsuburl"): eventURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not eventURL.startswith("/") and not eventURL.startswith("http"): eventURL = baseURIPath + eventURL # check if serviceType and the URL's have been found if serviceType is None or controlURL is None: raise ValueError("Service is not complete: " + str(serviceType) + " - " + str(controlURL) + " - " + str(scpdURL)) # no service should be defined twice otherwise the old one will be overwritten if serviceType in self.__deviceServiceDefinitions.keys(): raise ValueError("Service type '" + serviceType + "' is defined twice.") self.__deviceServiceDefinitions[serviceType] = {"controlURL": controlURL} # if the scpd url is defined add it if scpdURL is not None: self.__deviceServiceDefinitions[serviceType]["scpdURL"] = scpdURL # if event url is given we add it as well if eventURL is not None: self.__deviceServiceDefinitions[serviceType]["eventSubURL"] = eventURL
python
def _processServiceList(self, serviceList, baseURIPath): """Internal method to iterate in the device definition XML tree through the service list. :param serviceList: the XML root node of a service list :type serviceList: xml.etree.ElementTree.Element """ # iterate through all children in serviceList XML tag for service in serviceList.getchildren(): # has to be a service if not service.tag.lower().endswith("service"): raise ValueError("Non service tag in servicelist: " + service.tag) serviceType = None controlURL = None scpdURL = None eventURL = None # go through all the tags of a service and remember the values, ignore unknowns for child in service: tag = child.tag.lower() if tag.endswith("servicetype") or (serviceType is None and tag.endswith("spectype")): serviceType = child.text elif tag.endswith("controlurl"): controlURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not controlURL.startswith("/") and not controlURL.startswith("http"): controlURL = baseURIPath + controlURL elif tag.endswith("scpdurl"): scpdURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not scpdURL.startswith("/") and not scpdURL.startswith("http"): scpdURL = baseURIPath + scpdURL elif tag.endswith("eventsuburl"): eventURL = str(child.text) # if the url does not start with / (relative) or with a protocol add the base path if not eventURL.startswith("/") and not eventURL.startswith("http"): eventURL = baseURIPath + eventURL # check if serviceType and the URL's have been found if serviceType is None or controlURL is None: raise ValueError("Service is not complete: " + str(serviceType) + " - " + str(controlURL) + " - " + str(scpdURL)) # no service should be defined twice otherwise the old one will be overwritten if serviceType in self.__deviceServiceDefinitions.keys(): raise ValueError("Service type '" + serviceType + "' is defined twice.") self.__deviceServiceDefinitions[serviceType] = {"controlURL": controlURL} # if the scpd url is defined add it if scpdURL is not None: self.__deviceServiceDefinitions[serviceType]["scpdURL"] = scpdURL # if event url is given we add it as well if eventURL is not None: self.__deviceServiceDefinitions[serviceType]["eventSubURL"] = eventURL
[ "def", "_processServiceList", "(", "self", ",", "serviceList", ",", "baseURIPath", ")", ":", "# iterate through all children in serviceList XML tag", "for", "service", "in", "serviceList", ".", "getchildren", "(", ")", ":", "# has to be a service", "if", "not", "service", ".", "tag", ".", "lower", "(", ")", ".", "endswith", "(", "\"service\"", ")", ":", "raise", "ValueError", "(", "\"Non service tag in servicelist: \"", "+", "service", ".", "tag", ")", "serviceType", "=", "None", "controlURL", "=", "None", "scpdURL", "=", "None", "eventURL", "=", "None", "# go through all the tags of a service and remember the values, ignore unknowns", "for", "child", "in", "service", ":", "tag", "=", "child", ".", "tag", ".", "lower", "(", ")", "if", "tag", ".", "endswith", "(", "\"servicetype\"", ")", "or", "(", "serviceType", "is", "None", "and", "tag", ".", "endswith", "(", "\"spectype\"", ")", ")", ":", "serviceType", "=", "child", ".", "text", "elif", "tag", ".", "endswith", "(", "\"controlurl\"", ")", ":", "controlURL", "=", "str", "(", "child", ".", "text", ")", "# if the url does not start with / (relative) or with a protocol add the base path", "if", "not", "controlURL", ".", "startswith", "(", "\"/\"", ")", "and", "not", "controlURL", ".", "startswith", "(", "\"http\"", ")", ":", "controlURL", "=", "baseURIPath", "+", "controlURL", "elif", "tag", ".", "endswith", "(", "\"scpdurl\"", ")", ":", "scpdURL", "=", "str", "(", "child", ".", "text", ")", "# if the url does not start with / (relative) or with a protocol add the base path", "if", "not", "scpdURL", ".", "startswith", "(", "\"/\"", ")", "and", "not", "scpdURL", ".", "startswith", "(", "\"http\"", ")", ":", "scpdURL", "=", "baseURIPath", "+", "scpdURL", "elif", "tag", ".", "endswith", "(", "\"eventsuburl\"", ")", ":", "eventURL", "=", "str", "(", "child", ".", "text", ")", "# if the url does not start with / (relative) or with a protocol add the base path", "if", "not", "eventURL", ".", "startswith", "(", "\"/\"", ")", "and", "not", "eventURL", ".", "startswith", "(", "\"http\"", ")", ":", "eventURL", "=", "baseURIPath", "+", "eventURL", "# check if serviceType and the URL's have been found", "if", "serviceType", "is", "None", "or", "controlURL", "is", "None", ":", "raise", "ValueError", "(", "\"Service is not complete: \"", "+", "str", "(", "serviceType", ")", "+", "\" - \"", "+", "str", "(", "controlURL", ")", "+", "\" - \"", "+", "str", "(", "scpdURL", ")", ")", "# no service should be defined twice otherwise the old one will be overwritten", "if", "serviceType", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Service type '\"", "+", "serviceType", "+", "\"' is defined twice.\"", ")", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "=", "{", "\"controlURL\"", ":", "controlURL", "}", "# if the scpd url is defined add it", "if", "scpdURL", "is", "not", "None", ":", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"scpdURL\"", "]", "=", "scpdURL", "# if event url is given we add it as well", "if", "eventURL", "is", "not", "None", ":", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"eventSubURL\"", "]", "=", "eventURL" ]
Internal method to iterate in the device definition XML tree through the service list. :param serviceList: the XML root node of a service list :type serviceList: xml.etree.ElementTree.Element
[ "Internal", "method", "to", "iterate", "in", "the", "device", "definition", "XML", "tree", "through", "the", "service", "list", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L727-L792
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64.loadSCPD
def loadSCPD(self, serviceType=None, timeout=3, ignoreFailures=False): """Load action definition(s) (Service Control Protocol Document). If the device definitions have been loaded via loadDeviceDefinitions() this method loads actions definitions. The action definitions are needed if you like to execute an action on a UPnP device. The actions definition contains the name of the action, the input and output parameter. You use the definition either with execute() or with the actions module of this library which predefines a lot of actions based on the TR64 standard. :param serviceType: the serviceType for which the action definitions should be loaded or all known service types if None. :param float timeout: the timeout for downloading :param bool ignoreFailures: if set to true and serviceType is None any failure in the iteration of loading all SCPD will be ignored. :raises ValueType: if the given serviceType is not known or when the definition can not be loaded. :raises requests.exceptions.ConnectionError: when the scpd can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :doc:`tr64` """ if serviceType is not None: self._loadSCPD(serviceType, float(timeout)) else: self.__deviceSCPD = {} for serviceType in self.__deviceServiceDefinitions.keys(): # remove any previous error self.__deviceServiceDefinitions[serviceType].pop("error", None) try: self._loadSCPD(serviceType, float(timeout)) except ValueError as e: if not ignoreFailures: # we not ignoring this so rethrow last exception raise else: # add a message in the structure self.__deviceServiceDefinitions[serviceType]["error"] = str(e)
python
def loadSCPD(self, serviceType=None, timeout=3, ignoreFailures=False): """Load action definition(s) (Service Control Protocol Document). If the device definitions have been loaded via loadDeviceDefinitions() this method loads actions definitions. The action definitions are needed if you like to execute an action on a UPnP device. The actions definition contains the name of the action, the input and output parameter. You use the definition either with execute() or with the actions module of this library which predefines a lot of actions based on the TR64 standard. :param serviceType: the serviceType for which the action definitions should be loaded or all known service types if None. :param float timeout: the timeout for downloading :param bool ignoreFailures: if set to true and serviceType is None any failure in the iteration of loading all SCPD will be ignored. :raises ValueType: if the given serviceType is not known or when the definition can not be loaded. :raises requests.exceptions.ConnectionError: when the scpd can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :doc:`tr64` """ if serviceType is not None: self._loadSCPD(serviceType, float(timeout)) else: self.__deviceSCPD = {} for serviceType in self.__deviceServiceDefinitions.keys(): # remove any previous error self.__deviceServiceDefinitions[serviceType].pop("error", None) try: self._loadSCPD(serviceType, float(timeout)) except ValueError as e: if not ignoreFailures: # we not ignoring this so rethrow last exception raise else: # add a message in the structure self.__deviceServiceDefinitions[serviceType]["error"] = str(e)
[ "def", "loadSCPD", "(", "self", ",", "serviceType", "=", "None", ",", "timeout", "=", "3", ",", "ignoreFailures", "=", "False", ")", ":", "if", "serviceType", "is", "not", "None", ":", "self", ".", "_loadSCPD", "(", "serviceType", ",", "float", "(", "timeout", ")", ")", "else", ":", "self", ".", "__deviceSCPD", "=", "{", "}", "for", "serviceType", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "# remove any previous error", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", ".", "pop", "(", "\"error\"", ",", "None", ")", "try", ":", "self", ".", "_loadSCPD", "(", "serviceType", ",", "float", "(", "timeout", ")", ")", "except", "ValueError", "as", "e", ":", "if", "not", "ignoreFailures", ":", "# we not ignoring this so rethrow last exception", "raise", "else", ":", "# add a message in the structure", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"error\"", "]", "=", "str", "(", "e", ")" ]
Load action definition(s) (Service Control Protocol Document). If the device definitions have been loaded via loadDeviceDefinitions() this method loads actions definitions. The action definitions are needed if you like to execute an action on a UPnP device. The actions definition contains the name of the action, the input and output parameter. You use the definition either with execute() or with the actions module of this library which predefines a lot of actions based on the TR64 standard. :param serviceType: the serviceType for which the action definitions should be loaded or all known service types if None. :param float timeout: the timeout for downloading :param bool ignoreFailures: if set to true and serviceType is None any failure in the iteration of loading all SCPD will be ignored. :raises ValueType: if the given serviceType is not known or when the definition can not be loaded. :raises requests.exceptions.ConnectionError: when the scpd can not be downloaded :raises requests.exceptions.ConnectTimeout: when download time out .. seealso:: :meth:`~simpletr64.DeviceTR64.loadDeviceDefinitions`, :meth:`~simpletr64.DeviceTR64.deviceSCPD`, :doc:`tr64`
[ "Load", "action", "definition", "(", "s", ")", "(", "Service", "Control", "Protocol", "Document", ")", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L794-L834
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._loadSCPD
def _loadSCPD(self, serviceType, timeout): """Internal method to load the action definitions. :param str serviceType: the service type to load :param int timeout: the timeout for downloading """ if serviceType not in self.__deviceServiceDefinitions.keys(): raise ValueError("Can not load SCPD, no service type defined for: " + serviceType) if "scpdURL" not in self.__deviceServiceDefinitions[serviceType].keys(): raise ValueError("No SCPD URL defined for: " + serviceType) # remove actions for given service type self.__deviceSCPD.pop(serviceType, None) uri = self.__deviceServiceDefinitions[serviceType]["scpdURL"] # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # build the URL location = self.__protocol + "://" + self.__hostname + ":" + str(self.port) + uri # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-2"} # http request request = requests.get(location, auth=auth, proxies=proxies, headers=headers, timeout=timeout, verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not load SCPD for "' + serviceType + '" from ' + location + ': ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) data = request.text.encode('utf-8') if len(data) == 0: return # parse XML return try: root = ET.fromstring(data) except Exception as e: raise ValueError("Can not parse SCPD content for '" + serviceType + "' from '" + location + "': " + str(e)) actions = {} variableTypes = {} variableParameterDict = {} # iterate through the full XML tree for element in root.getchildren(): tagName = element.tag.lower() # go deeper for action lists if tagName.endswith("actionlist"): # remember the actions and where a specific variable gets referenced self._parseSCPDActions(element, actions, variableParameterDict) # go deeper for the variable declarations elif tagName.endswith("servicestatetable"): self._parseSCPDVariableTypes(element, variableTypes) # everything have been parsed now merge the variable declarations into the action parameters for name in variableParameterDict.keys(): if name not in variableTypes.keys(): raise ValueError("Variable reference in action can not be resolved: " + name) # iterate through all arguments where this variable have been referenced for argument in variableParameterDict[name]: # fill in the type of this variable/argument argument["dataType"] = variableTypes[name]["dataType"] # if the variable declaration includes a default value add it to the action parameter as well if "defaultValue" in variableTypes[name].keys(): argument["defaultValue"] = variableTypes[name]["defaultValue"] self.__deviceSCPD[serviceType] = actions
python
def _loadSCPD(self, serviceType, timeout): """Internal method to load the action definitions. :param str serviceType: the service type to load :param int timeout: the timeout for downloading """ if serviceType not in self.__deviceServiceDefinitions.keys(): raise ValueError("Can not load SCPD, no service type defined for: " + serviceType) if "scpdURL" not in self.__deviceServiceDefinitions[serviceType].keys(): raise ValueError("No SCPD URL defined for: " + serviceType) # remove actions for given service type self.__deviceSCPD.pop(serviceType, None) uri = self.__deviceServiceDefinitions[serviceType]["scpdURL"] # setup proxies proxies = {} if self.__httpsProxy: proxies = {"https": self.__httpsProxy} if self.__httpProxy: proxies = {"http": self.__httpProxy} # setup authentication auth = None if self.__password: auth = HTTPDigestAuth(self.__username, self.__password) # build the URL location = self.__protocol + "://" + self.__hostname + ":" + str(self.port) + uri # some devices response differently without a User-Agent headers = {"User-Agent": "Mozilla/5.0; SimpleTR64-2"} # http request request = requests.get(location, auth=auth, proxies=proxies, headers=headers, timeout=timeout, verify=self.__verify) if request.status_code != 200: errorStr = DeviceTR64._extractErrorString(request) raise ValueError('Could not load SCPD for "' + serviceType + '" from ' + location + ': ' + str(request.status_code) + ' - ' + request.reason + " -- " + errorStr) data = request.text.encode('utf-8') if len(data) == 0: return # parse XML return try: root = ET.fromstring(data) except Exception as e: raise ValueError("Can not parse SCPD content for '" + serviceType + "' from '" + location + "': " + str(e)) actions = {} variableTypes = {} variableParameterDict = {} # iterate through the full XML tree for element in root.getchildren(): tagName = element.tag.lower() # go deeper for action lists if tagName.endswith("actionlist"): # remember the actions and where a specific variable gets referenced self._parseSCPDActions(element, actions, variableParameterDict) # go deeper for the variable declarations elif tagName.endswith("servicestatetable"): self._parseSCPDVariableTypes(element, variableTypes) # everything have been parsed now merge the variable declarations into the action parameters for name in variableParameterDict.keys(): if name not in variableTypes.keys(): raise ValueError("Variable reference in action can not be resolved: " + name) # iterate through all arguments where this variable have been referenced for argument in variableParameterDict[name]: # fill in the type of this variable/argument argument["dataType"] = variableTypes[name]["dataType"] # if the variable declaration includes a default value add it to the action parameter as well if "defaultValue" in variableTypes[name].keys(): argument["defaultValue"] = variableTypes[name]["defaultValue"] self.__deviceSCPD[serviceType] = actions
[ "def", "_loadSCPD", "(", "self", ",", "serviceType", ",", "timeout", ")", ":", "if", "serviceType", "not", "in", "self", ".", "__deviceServiceDefinitions", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Can not load SCPD, no service type defined for: \"", "+", "serviceType", ")", "if", "\"scpdURL\"", "not", "in", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"No SCPD URL defined for: \"", "+", "serviceType", ")", "# remove actions for given service type", "self", ".", "__deviceSCPD", ".", "pop", "(", "serviceType", ",", "None", ")", "uri", "=", "self", ".", "__deviceServiceDefinitions", "[", "serviceType", "]", "[", "\"scpdURL\"", "]", "# setup proxies", "proxies", "=", "{", "}", "if", "self", ".", "__httpsProxy", ":", "proxies", "=", "{", "\"https\"", ":", "self", ".", "__httpsProxy", "}", "if", "self", ".", "__httpProxy", ":", "proxies", "=", "{", "\"http\"", ":", "self", ".", "__httpProxy", "}", "# setup authentication", "auth", "=", "None", "if", "self", ".", "__password", ":", "auth", "=", "HTTPDigestAuth", "(", "self", ".", "__username", ",", "self", ".", "__password", ")", "# build the URL", "location", "=", "self", ".", "__protocol", "+", "\"://\"", "+", "self", ".", "__hostname", "+", "\":\"", "+", "str", "(", "self", ".", "port", ")", "+", "uri", "# some devices response differently without a User-Agent", "headers", "=", "{", "\"User-Agent\"", ":", "\"Mozilla/5.0; SimpleTR64-2\"", "}", "# http request", "request", "=", "requests", ".", "get", "(", "location", ",", "auth", "=", "auth", ",", "proxies", "=", "proxies", ",", "headers", "=", "headers", ",", "timeout", "=", "timeout", ",", "verify", "=", "self", ".", "__verify", ")", "if", "request", ".", "status_code", "!=", "200", ":", "errorStr", "=", "DeviceTR64", ".", "_extractErrorString", "(", "request", ")", "raise", "ValueError", "(", "'Could not load SCPD for \"'", "+", "serviceType", "+", "'\" from '", "+", "location", "+", "': '", "+", "str", "(", "request", ".", "status_code", ")", "+", "' - '", "+", "request", ".", "reason", "+", "\" -- \"", "+", "errorStr", ")", "data", "=", "request", ".", "text", ".", "encode", "(", "'utf-8'", ")", "if", "len", "(", "data", ")", "==", "0", ":", "return", "# parse XML return", "try", ":", "root", "=", "ET", ".", "fromstring", "(", "data", ")", "except", "Exception", "as", "e", ":", "raise", "ValueError", "(", "\"Can not parse SCPD content for '\"", "+", "serviceType", "+", "\"' from '\"", "+", "location", "+", "\"': \"", "+", "str", "(", "e", ")", ")", "actions", "=", "{", "}", "variableTypes", "=", "{", "}", "variableParameterDict", "=", "{", "}", "# iterate through the full XML tree", "for", "element", "in", "root", ".", "getchildren", "(", ")", ":", "tagName", "=", "element", ".", "tag", ".", "lower", "(", ")", "# go deeper for action lists", "if", "tagName", ".", "endswith", "(", "\"actionlist\"", ")", ":", "# remember the actions and where a specific variable gets referenced", "self", ".", "_parseSCPDActions", "(", "element", ",", "actions", ",", "variableParameterDict", ")", "# go deeper for the variable declarations", "elif", "tagName", ".", "endswith", "(", "\"servicestatetable\"", ")", ":", "self", ".", "_parseSCPDVariableTypes", "(", "element", ",", "variableTypes", ")", "# everything have been parsed now merge the variable declarations into the action parameters", "for", "name", "in", "variableParameterDict", ".", "keys", "(", ")", ":", "if", "name", "not", "in", "variableTypes", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Variable reference in action can not be resolved: \"", "+", "name", ")", "# iterate through all arguments where this variable have been referenced", "for", "argument", "in", "variableParameterDict", "[", "name", "]", ":", "# fill in the type of this variable/argument", "argument", "[", "\"dataType\"", "]", "=", "variableTypes", "[", "name", "]", "[", "\"dataType\"", "]", "# if the variable declaration includes a default value add it to the action parameter as well", "if", "\"defaultValue\"", "in", "variableTypes", "[", "name", "]", ".", "keys", "(", ")", ":", "argument", "[", "\"defaultValue\"", "]", "=", "variableTypes", "[", "name", "]", "[", "\"defaultValue\"", "]", "self", ".", "__deviceSCPD", "[", "serviceType", "]", "=", "actions" ]
Internal method to load the action definitions. :param str serviceType: the service type to load :param int timeout: the timeout for downloading
[ "Internal", "method", "to", "load", "the", "action", "definitions", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L836-L923
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._parseSCPDActions
def _parseSCPDActions(self, actionListElement, actions, variableParameterDict): """Internal method to parse the SCPD definitions. :param actionListElement: the action xml element :type actionListElement: xml.etree.ElementTree.Element :param dict actions: a container to store all actions :param dict variableParameterDict: remember where a variable gets referenced """ # go through all action elements in this list for actionElement in actionListElement.getchildren(): action = {} # go through all elements in this action for inActionElement in actionElement.getchildren(): tagName = inActionElement.tag.lower() if tagName.endswith("name"): # action name action["name"] = inActionElement.text elif tagName.endswith("argumentlist"): # parse the arguments of this action for argumentElement in inActionElement.getchildren(): argument = {} # go through the argument definition for inArgumentElement in argumentElement.getchildren(): tagName = inArgumentElement.tag.lower() if tagName.endswith("name"): # remember the argument name argument["name"] = inArgumentElement.text elif tagName.endswith("direction"): # is it an in or out argument argument["direction"] = inArgumentElement.text elif tagName.endswith("relatedstatevariable"): # remember the argument and safe it under the variable name to dereference later argument["variable"] = inArgumentElement.text if argument["variable"] not in variableParameterDict.keys(): variableParameterDict[argument["variable"]] = [] variableParameterDict[argument["variable"]].append(argument) if "name" not in argument.keys(): raise ValueError("Parameter definition does not contain a name.") if "direction" not in argument.keys(): raise ValueError("Parameter definition does not contain a direction: " + argument["name"]) direction = argument["direction"] + "Parameter" # store the actual argument in the action if direction not in action.keys(): action[direction] = {} action[direction][argument["name"]] = argument # cleanup, we stored the argument we dont need these values in there anymore otherwise they # would be redundant del argument["name"] del argument["direction"] if "name" not in action.keys(): raise ValueError("Action has not a name assigned.") if action["name"] in actions.keys(): raise ValueError("Action name defined more than ones: " + action["name"]) # save the action under its name actions[action["name"]] = action # cleanup, as we have saved the action under its name in the container it would be redundant del action["name"]
python
def _parseSCPDActions(self, actionListElement, actions, variableParameterDict): """Internal method to parse the SCPD definitions. :param actionListElement: the action xml element :type actionListElement: xml.etree.ElementTree.Element :param dict actions: a container to store all actions :param dict variableParameterDict: remember where a variable gets referenced """ # go through all action elements in this list for actionElement in actionListElement.getchildren(): action = {} # go through all elements in this action for inActionElement in actionElement.getchildren(): tagName = inActionElement.tag.lower() if tagName.endswith("name"): # action name action["name"] = inActionElement.text elif tagName.endswith("argumentlist"): # parse the arguments of this action for argumentElement in inActionElement.getchildren(): argument = {} # go through the argument definition for inArgumentElement in argumentElement.getchildren(): tagName = inArgumentElement.tag.lower() if tagName.endswith("name"): # remember the argument name argument["name"] = inArgumentElement.text elif tagName.endswith("direction"): # is it an in or out argument argument["direction"] = inArgumentElement.text elif tagName.endswith("relatedstatevariable"): # remember the argument and safe it under the variable name to dereference later argument["variable"] = inArgumentElement.text if argument["variable"] not in variableParameterDict.keys(): variableParameterDict[argument["variable"]] = [] variableParameterDict[argument["variable"]].append(argument) if "name" not in argument.keys(): raise ValueError("Parameter definition does not contain a name.") if "direction" not in argument.keys(): raise ValueError("Parameter definition does not contain a direction: " + argument["name"]) direction = argument["direction"] + "Parameter" # store the actual argument in the action if direction not in action.keys(): action[direction] = {} action[direction][argument["name"]] = argument # cleanup, we stored the argument we dont need these values in there anymore otherwise they # would be redundant del argument["name"] del argument["direction"] if "name" not in action.keys(): raise ValueError("Action has not a name assigned.") if action["name"] in actions.keys(): raise ValueError("Action name defined more than ones: " + action["name"]) # save the action under its name actions[action["name"]] = action # cleanup, as we have saved the action under its name in the container it would be redundant del action["name"]
[ "def", "_parseSCPDActions", "(", "self", ",", "actionListElement", ",", "actions", ",", "variableParameterDict", ")", ":", "# go through all action elements in this list", "for", "actionElement", "in", "actionListElement", ".", "getchildren", "(", ")", ":", "action", "=", "{", "}", "# go through all elements in this action", "for", "inActionElement", "in", "actionElement", ".", "getchildren", "(", ")", ":", "tagName", "=", "inActionElement", ".", "tag", ".", "lower", "(", ")", "if", "tagName", ".", "endswith", "(", "\"name\"", ")", ":", "# action name", "action", "[", "\"name\"", "]", "=", "inActionElement", ".", "text", "elif", "tagName", ".", "endswith", "(", "\"argumentlist\"", ")", ":", "# parse the arguments of this action", "for", "argumentElement", "in", "inActionElement", ".", "getchildren", "(", ")", ":", "argument", "=", "{", "}", "# go through the argument definition", "for", "inArgumentElement", "in", "argumentElement", ".", "getchildren", "(", ")", ":", "tagName", "=", "inArgumentElement", ".", "tag", ".", "lower", "(", ")", "if", "tagName", ".", "endswith", "(", "\"name\"", ")", ":", "# remember the argument name", "argument", "[", "\"name\"", "]", "=", "inArgumentElement", ".", "text", "elif", "tagName", ".", "endswith", "(", "\"direction\"", ")", ":", "# is it an in or out argument", "argument", "[", "\"direction\"", "]", "=", "inArgumentElement", ".", "text", "elif", "tagName", ".", "endswith", "(", "\"relatedstatevariable\"", ")", ":", "# remember the argument and safe it under the variable name to dereference later", "argument", "[", "\"variable\"", "]", "=", "inArgumentElement", ".", "text", "if", "argument", "[", "\"variable\"", "]", "not", "in", "variableParameterDict", ".", "keys", "(", ")", ":", "variableParameterDict", "[", "argument", "[", "\"variable\"", "]", "]", "=", "[", "]", "variableParameterDict", "[", "argument", "[", "\"variable\"", "]", "]", ".", "append", "(", "argument", ")", "if", "\"name\"", "not", "in", "argument", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Parameter definition does not contain a name.\"", ")", "if", "\"direction\"", "not", "in", "argument", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Parameter definition does not contain a direction: \"", "+", "argument", "[", "\"name\"", "]", ")", "direction", "=", "argument", "[", "\"direction\"", "]", "+", "\"Parameter\"", "# store the actual argument in the action", "if", "direction", "not", "in", "action", ".", "keys", "(", ")", ":", "action", "[", "direction", "]", "=", "{", "}", "action", "[", "direction", "]", "[", "argument", "[", "\"name\"", "]", "]", "=", "argument", "# cleanup, we stored the argument we dont need these values in there anymore otherwise they", "# would be redundant", "del", "argument", "[", "\"name\"", "]", "del", "argument", "[", "\"direction\"", "]", "if", "\"name\"", "not", "in", "action", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Action has not a name assigned.\"", ")", "if", "action", "[", "\"name\"", "]", "in", "actions", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Action name defined more than ones: \"", "+", "action", "[", "\"name\"", "]", ")", "# save the action under its name", "actions", "[", "action", "[", "\"name\"", "]", "]", "=", "action", "# cleanup, as we have saved the action under its name in the container it would be redundant", "del", "action", "[", "\"name\"", "]" ]
Internal method to parse the SCPD definitions. :param actionListElement: the action xml element :type actionListElement: xml.etree.ElementTree.Element :param dict actions: a container to store all actions :param dict variableParameterDict: remember where a variable gets referenced
[ "Internal", "method", "to", "parse", "the", "SCPD", "definitions", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L925-L1000
bpannier/simpletr64
simpletr64/devicetr64.py
DeviceTR64._parseSCPDVariableTypes
def _parseSCPDVariableTypes(self, variableListElement, variableTypes): """Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables """ # iterate through all variables for variableElement in variableListElement.getchildren(): variable = {} # iterate through the variable definition for inVariableElement in variableElement.getchildren(): tagName = inVariableElement.tag.lower() if tagName.endswith("name"): variable["name"] = inVariableElement.text elif tagName.endswith("datatype"): variable["dataType"] = inVariableElement.text elif tagName.endswith("defaultvalue"): variable["defaultValue"] = inVariableElement.text if "name" not in variable.keys(): raise ValueError("Variable has no name defined.") if "dataType" not in variable.keys(): raise ValueError("No dataType was defined by variable: " + variable["name"]) if variable["name"] in variableTypes.keys(): raise ValueError("Variable has been defined multiple times: " + variable["name"]) variableTypes[variable["name"]] = variable
python
def _parseSCPDVariableTypes(self, variableListElement, variableTypes): """Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables """ # iterate through all variables for variableElement in variableListElement.getchildren(): variable = {} # iterate through the variable definition for inVariableElement in variableElement.getchildren(): tagName = inVariableElement.tag.lower() if tagName.endswith("name"): variable["name"] = inVariableElement.text elif tagName.endswith("datatype"): variable["dataType"] = inVariableElement.text elif tagName.endswith("defaultvalue"): variable["defaultValue"] = inVariableElement.text if "name" not in variable.keys(): raise ValueError("Variable has no name defined.") if "dataType" not in variable.keys(): raise ValueError("No dataType was defined by variable: " + variable["name"]) if variable["name"] in variableTypes.keys(): raise ValueError("Variable has been defined multiple times: " + variable["name"]) variableTypes[variable["name"]] = variable
[ "def", "_parseSCPDVariableTypes", "(", "self", ",", "variableListElement", ",", "variableTypes", ")", ":", "# iterate through all variables", "for", "variableElement", "in", "variableListElement", ".", "getchildren", "(", ")", ":", "variable", "=", "{", "}", "# iterate through the variable definition", "for", "inVariableElement", "in", "variableElement", ".", "getchildren", "(", ")", ":", "tagName", "=", "inVariableElement", ".", "tag", ".", "lower", "(", ")", "if", "tagName", ".", "endswith", "(", "\"name\"", ")", ":", "variable", "[", "\"name\"", "]", "=", "inVariableElement", ".", "text", "elif", "tagName", ".", "endswith", "(", "\"datatype\"", ")", ":", "variable", "[", "\"dataType\"", "]", "=", "inVariableElement", ".", "text", "elif", "tagName", ".", "endswith", "(", "\"defaultvalue\"", ")", ":", "variable", "[", "\"defaultValue\"", "]", "=", "inVariableElement", ".", "text", "if", "\"name\"", "not", "in", "variable", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Variable has no name defined.\"", ")", "if", "\"dataType\"", "not", "in", "variable", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"No dataType was defined by variable: \"", "+", "variable", "[", "\"name\"", "]", ")", "if", "variable", "[", "\"name\"", "]", "in", "variableTypes", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Variable has been defined multiple times: \"", "+", "variable", "[", "\"name\"", "]", ")", "variableTypes", "[", "variable", "[", "\"name\"", "]", "]", "=", "variable" ]
Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables
[ "Internal", "method", "to", "parse", "the", "SCPD", "definitions", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/devicetr64.py#L1002-L1035
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.createFromURL
def createFromURL(urlOfXMLDefinition): """Factory method to create a DeviceTR64 from an URL to the XML device definitions. :param str urlOfXMLDefinition: :return: the new object :rtype: Wifi """ url = urlparse(urlOfXMLDefinition) if not url.port: if url.scheme.lower() == "https": port = 443 else: port = 80 else: port = url.port return Wifi(url.hostname, port, url.scheme)
python
def createFromURL(urlOfXMLDefinition): """Factory method to create a DeviceTR64 from an URL to the XML device definitions. :param str urlOfXMLDefinition: :return: the new object :rtype: Wifi """ url = urlparse(urlOfXMLDefinition) if not url.port: if url.scheme.lower() == "https": port = 443 else: port = 80 else: port = url.port return Wifi(url.hostname, port, url.scheme)
[ "def", "createFromURL", "(", "urlOfXMLDefinition", ")", ":", "url", "=", "urlparse", "(", "urlOfXMLDefinition", ")", "if", "not", "url", ".", "port", ":", "if", "url", ".", "scheme", ".", "lower", "(", ")", "==", "\"https\"", ":", "port", "=", "443", "else", ":", "port", "=", "80", "else", ":", "port", "=", "url", ".", "port", "return", "Wifi", "(", "url", ".", "hostname", ",", "port", ",", "url", ".", "scheme", ")" ]
Factory method to create a DeviceTR64 from an URL to the XML device definitions. :param str urlOfXMLDefinition: :return: the new object :rtype: Wifi
[ "Factory", "method", "to", "create", "a", "DeviceTR64", "from", "an", "URL", "to", "the", "XML", "device", "definitions", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L62-L79
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.getWifiInfo
def getWifiInfo(self, wifiInterfaceId=1, timeout=1): """Execute GetInfo action to get Wifi basic information's. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the basic informations :rtype: WifiBasicInfo """ namespace = Wifi.getServiceType("getWifiInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return WifiBasicInfo(results)
python
def getWifiInfo(self, wifiInterfaceId=1, timeout=1): """Execute GetInfo action to get Wifi basic information's. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the basic informations :rtype: WifiBasicInfo """ namespace = Wifi.getServiceType("getWifiInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return WifiBasicInfo(results)
[ "def", "getWifiInfo", "(", "self", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getWifiInfo\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetInfo\"", ",", "timeout", "=", "timeout", ")", "return", "WifiBasicInfo", "(", "results", ")" ]
Execute GetInfo action to get Wifi basic information's. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the basic informations :rtype: WifiBasicInfo
[ "Execute", "GetInfo", "action", "to", "get", "Wifi", "basic", "information", "s", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L93-L106
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.getTotalAssociations
def getTotalAssociations(self, wifiInterfaceId=1, timeout=1): """Execute GetTotalAssociations action to get the amount of associated Wifi clients. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the amount of Wifi clients :rtype: int .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo` """ namespace = Wifi.getServiceType("getTotalAssociations") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetTotalAssociations", timeout=timeout) return int(results["NewTotalAssociations"])
python
def getTotalAssociations(self, wifiInterfaceId=1, timeout=1): """Execute GetTotalAssociations action to get the amount of associated Wifi clients. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the amount of Wifi clients :rtype: int .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo` """ namespace = Wifi.getServiceType("getTotalAssociations") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetTotalAssociations", timeout=timeout) return int(results["NewTotalAssociations"])
[ "def", "getTotalAssociations", "(", "self", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getTotalAssociations\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalAssociations\"", ",", "timeout", "=", "timeout", ")", "return", "int", "(", "results", "[", "\"NewTotalAssociations\"", "]", ")" ]
Execute GetTotalAssociations action to get the amount of associated Wifi clients. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the amount of Wifi clients :rtype: int .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo`
[ "Execute", "GetTotalAssociations", "action", "to", "get", "the", "amount", "of", "associated", "Wifi", "clients", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L138-L153
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.getGenericAssociatedDeviceInfo
def getGenericAssociatedDeviceInfo(self, index, wifiInterfaceId=1, timeout=1): """Execute GetGenericAssociatedDeviceInfo action to get detailed information about a Wifi client. :param int index: the number of the client :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getTotalAssociations` """ namespace = Wifi.getServiceType("getGenericAssociatedDeviceInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetGenericAssociatedDeviceInfo", timeout=timeout, NewAssociatedDeviceIndex=index) return WifiDeviceInfo(results)
python
def getGenericAssociatedDeviceInfo(self, index, wifiInterfaceId=1, timeout=1): """Execute GetGenericAssociatedDeviceInfo action to get detailed information about a Wifi client. :param int index: the number of the client :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getTotalAssociations` """ namespace = Wifi.getServiceType("getGenericAssociatedDeviceInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetGenericAssociatedDeviceInfo", timeout=timeout, NewAssociatedDeviceIndex=index) return WifiDeviceInfo(results)
[ "def", "getGenericAssociatedDeviceInfo", "(", "self", ",", "index", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getGenericAssociatedDeviceInfo\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetGenericAssociatedDeviceInfo\"", ",", "timeout", "=", "timeout", ",", "NewAssociatedDeviceIndex", "=", "index", ")", "return", "WifiDeviceInfo", "(", "results", ")" ]
Execute GetGenericAssociatedDeviceInfo action to get detailed information about a Wifi client. :param int index: the number of the client :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getTotalAssociations`
[ "Execute", "GetGenericAssociatedDeviceInfo", "action", "to", "get", "detailed", "information", "about", "a", "Wifi", "client", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L155-L172
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.getSpecificAssociatedDeviceInfo
def getSpecificAssociatedDeviceInfo(self, macAddress, wifiInterfaceId=1, timeout=1): """Execute GetSpecificAssociatedDeviceInfo action to get detailed information about a Wifi client. :param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might be case sensitive, depending on the router :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo` """ namespace = Wifi.getServiceType("getSpecificAssociatedDeviceInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetSpecificAssociatedDeviceInfo", timeout=timeout, NewAssociatedDeviceMACAddress=macAddress) return WifiDeviceInfo(results, macAddress=macAddress)
python
def getSpecificAssociatedDeviceInfo(self, macAddress, wifiInterfaceId=1, timeout=1): """Execute GetSpecificAssociatedDeviceInfo action to get detailed information about a Wifi client. :param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might be case sensitive, depending on the router :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo` """ namespace = Wifi.getServiceType("getSpecificAssociatedDeviceInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetSpecificAssociatedDeviceInfo", timeout=timeout, NewAssociatedDeviceMACAddress=macAddress) return WifiDeviceInfo(results, macAddress=macAddress)
[ "def", "getSpecificAssociatedDeviceInfo", "(", "self", ",", "macAddress", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getSpecificAssociatedDeviceInfo\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetSpecificAssociatedDeviceInfo\"", ",", "timeout", "=", "timeout", ",", "NewAssociatedDeviceMACAddress", "=", "macAddress", ")", "return", "WifiDeviceInfo", "(", "results", ",", "macAddress", "=", "macAddress", ")" ]
Execute GetSpecificAssociatedDeviceInfo action to get detailed information about a Wifi client. :param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might be case sensitive, depending on the router :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the detailed information's about a Wifi client :rtype: WifiDeviceInfo .. seealso:: :meth:`~simpletr64.actions.Wifi.getGenericAssociatedDeviceInfo`
[ "Execute", "GetSpecificAssociatedDeviceInfo", "action", "to", "get", "detailed", "information", "about", "a", "Wifi", "client", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L174-L192
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.setEnable
def setEnable(self, status, wifiInterfaceId=1, timeout=1): """Set enable status for a Wifi interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setEnable") + str(wifiInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 else: setStatus = 0 self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus)
python
def setEnable(self, status, wifiInterfaceId=1, timeout=1): """Set enable status for a Wifi interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setEnable") + str(wifiInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 else: setStatus = 0 self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus)
[ "def", "setEnable", "(", "self", ",", "status", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"setEnable\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "if", "status", ":", "setStatus", "=", "1", "else", ":", "setStatus", "=", "0", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"SetEnable\"", ",", "timeout", "=", "timeout", ",", "NewEnable", "=", "setStatus", ")" ]
Set enable status for a Wifi interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed
[ "Set", "enable", "status", "for", "a", "Wifi", "interface", "be", "careful", "you", "don", "t", "cut", "yourself", "off", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L194-L209
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.setChannel
def setChannel(self, channel, wifiInterfaceId=1, timeout=1): """Set the channel of this Wifi interface :param int channel: the channel number :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setChannel") + str(wifiInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "SetChannel", timeout=timeout, NewChannel=channel)
python
def setChannel(self, channel, wifiInterfaceId=1, timeout=1): """Set the channel of this Wifi interface :param int channel: the channel number :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setChannel") + str(wifiInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "SetChannel", timeout=timeout, NewChannel=channel)
[ "def", "setChannel", "(", "self", ",", "channel", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"setChannel\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"SetChannel\"", ",", "timeout", "=", "timeout", ",", "NewChannel", "=", "channel", ")" ]
Set the channel of this Wifi interface :param int channel: the channel number :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed
[ "Set", "the", "channel", "of", "this", "Wifi", "interface" ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L211-L221
bpannier/simpletr64
simpletr64/actions/wifi.py
Wifi.setSSID
def setSSID(self, ssid, wifiInterfaceId=1, timeout=1): """Set the SSID (name of the Wifi network) :param str ssid: the SSID/wifi network name :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setChannel") + str(wifiInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "SetChannel", timeout=timeout, NewSSID=ssid)
python
def setSSID(self, ssid, wifiInterfaceId=1, timeout=1): """Set the SSID (name of the Wifi network) :param str ssid: the SSID/wifi network name :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setChannel") + str(wifiInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "SetChannel", timeout=timeout, NewSSID=ssid)
[ "def", "setSSID", "(", "self", ",", "ssid", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"setChannel\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"SetChannel\"", ",", "timeout", "=", "timeout", ",", "NewSSID", "=", "ssid", ")" ]
Set the SSID (name of the Wifi network) :param str ssid: the SSID/wifi network name :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed
[ "Set", "the", "SSID", "(", "name", "of", "the", "Wifi", "network", ")" ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wifi.py#L223-L233
DreamLab/VmShepherd
src/vmshepherd/http/rpc_api.py
RpcApi.enabled_checker
def enabled_checker(func): """ Access decorator which checks if a RPC method is enabled by our configuration """ @wraps(func) def wrap(self, *args, **kwargs): if self.allowed_methods and isinstance(self.allowed_methods, list) and func.__name__ not in self.allowed_methods: raise Exception("Method {} is disabled".format(func.__name__)) return func(self, *args, **kwargs) return wrap
python
def enabled_checker(func): """ Access decorator which checks if a RPC method is enabled by our configuration """ @wraps(func) def wrap(self, *args, **kwargs): if self.allowed_methods and isinstance(self.allowed_methods, list) and func.__name__ not in self.allowed_methods: raise Exception("Method {} is disabled".format(func.__name__)) return func(self, *args, **kwargs) return wrap
[ "def", "enabled_checker", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrap", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "allowed_methods", "and", "isinstance", "(", "self", ".", "allowed_methods", ",", "list", ")", "and", "func", ".", "__name__", "not", "in", "self", ".", "allowed_methods", ":", "raise", "Exception", "(", "\"Method {} is disabled\"", ".", "format", "(", "func", ".", "__name__", ")", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrap" ]
Access decorator which checks if a RPC method is enabled by our configuration
[ "Access", "decorator", "which", "checks", "if", "a", "RPC", "method", "is", "enabled", "by", "our", "configuration" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/rpc_api.py#L18-L26
DreamLab/VmShepherd
src/vmshepherd/http/rpc_api.py
RpcApi.list_presets
async def list_presets(self): """ Listing presets :return: (list of presets) :rtype: list Sample response: ``["preset1", "preset2"]`` """ presets = await self.request.app.vmshepherd.preset_manager.list_presets() return list(presets.keys())
python
async def list_presets(self): """ Listing presets :return: (list of presets) :rtype: list Sample response: ``["preset1", "preset2"]`` """ presets = await self.request.app.vmshepherd.preset_manager.list_presets() return list(presets.keys())
[ "async", "def", "list_presets", "(", "self", ")", ":", "presets", "=", "await", "self", ".", "request", ".", "app", ".", "vmshepherd", ".", "preset_manager", ".", "list_presets", "(", ")", "return", "list", "(", "presets", ".", "keys", "(", ")", ")" ]
Listing presets :return: (list of presets) :rtype: list Sample response: ``["preset1", "preset2"]``
[ "Listing", "presets" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/rpc_api.py#L29-L41
DreamLab/VmShepherd
src/vmshepherd/http/rpc_api.py
RpcApi.list_vms
async def list_vms(self, preset): """ Listing virtual machines in a given preset :arg string preset: preset name :return: (Size of a preset, list of virtual machines) - first element of a tuple is a size of virtual machines in a preset - second element is a dict which contains all Virtual Machines, where every element of this dict looks like that: ``{ "VIRTUAL_MACHINE_ID": { "ip": "IP_ADDR", "state": "VM_STATE" }`` :rtype: tuple Sample response: ``( 1, {'180aa486-ee46-4628-ab1c-f4554b63231': {'ip': '172.1.1.2', 'state': 'running'}} )`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) result_vms = {vm.id: {'ip': vm.ip[0], 'state': vm.state.value} for vm in preset.vms} return preset.count, result_vms
python
async def list_vms(self, preset): """ Listing virtual machines in a given preset :arg string preset: preset name :return: (Size of a preset, list of virtual machines) - first element of a tuple is a size of virtual machines in a preset - second element is a dict which contains all Virtual Machines, where every element of this dict looks like that: ``{ "VIRTUAL_MACHINE_ID": { "ip": "IP_ADDR", "state": "VM_STATE" }`` :rtype: tuple Sample response: ``( 1, {'180aa486-ee46-4628-ab1c-f4554b63231': {'ip': '172.1.1.2', 'state': 'running'}} )`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) result_vms = {vm.id: {'ip': vm.ip[0], 'state': vm.state.value} for vm in preset.vms} return preset.count, result_vms
[ "async", "def", "list_vms", "(", "self", ",", "preset", ")", ":", "vmshepherd", "=", "self", ".", "request", ".", "app", ".", "vmshepherd", "preset", "=", "vmshepherd", ".", "preset_manager", ".", "get_preset", "(", "preset", ")", "result_vms", "=", "{", "vm", ".", "id", ":", "{", "'ip'", ":", "vm", ".", "ip", "[", "0", "]", ",", "'state'", ":", "vm", ".", "state", ".", "value", "}", "for", "vm", "in", "preset", ".", "vms", "}", "return", "preset", ".", "count", ",", "result_vms" ]
Listing virtual machines in a given preset :arg string preset: preset name :return: (Size of a preset, list of virtual machines) - first element of a tuple is a size of virtual machines in a preset - second element is a dict which contains all Virtual Machines, where every element of this dict looks like that: ``{ "VIRTUAL_MACHINE_ID": { "ip": "IP_ADDR", "state": "VM_STATE" }`` :rtype: tuple Sample response: ``( 1, {'180aa486-ee46-4628-ab1c-f4554b63231': {'ip': '172.1.1.2', 'state': 'running'}} )``
[ "Listing", "virtual", "machines", "in", "a", "given", "preset" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/rpc_api.py#L44-L64
DreamLab/VmShepherd
src/vmshepherd/http/rpc_api.py
RpcApi.terminate_vm
async def terminate_vm(self, preset, vm_id): """ Discard vm in specified preset :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: 'OK' Sample response: ``OK`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) await preset.iaas.terminate_vm(vm_id) return 'OK'
python
async def terminate_vm(self, preset, vm_id): """ Discard vm in specified preset :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: 'OK' Sample response: ``OK`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) await preset.iaas.terminate_vm(vm_id) return 'OK'
[ "async", "def", "terminate_vm", "(", "self", ",", "preset", ",", "vm_id", ")", ":", "vmshepherd", "=", "self", ".", "request", ".", "app", ".", "vmshepherd", "preset", "=", "vmshepherd", ".", "preset_manager", ".", "get_preset", "(", "preset", ")", "await", "preset", ".", "iaas", ".", "terminate_vm", "(", "vm_id", ")", "return", "'OK'" ]
Discard vm in specified preset :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: 'OK' Sample response: ``OK``
[ "Discard", "vm", "in", "specified", "preset" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/rpc_api.py#L67-L80
DreamLab/VmShepherd
src/vmshepherd/http/rpc_api.py
RpcApi.get_vm_metadata
async def get_vm_metadata(self, preset, vm_id): """ Get vm metadata :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: Metadata for Virtual Machine :rtype: dict Sample response: ``{ 'time_shutdown' : "12312312321' }`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) vm_info = await preset.iaas.get_vm(vm_id) ret_info = copy.deepcopy(vm_info.metadata) if vm_info.metadata else {} ret_info['tags'] = vm_info.tags ret_info['iaas_shutdown'] = vm_info.timed_shutdown_at return ret_info
python
async def get_vm_metadata(self, preset, vm_id): """ Get vm metadata :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: Metadata for Virtual Machine :rtype: dict Sample response: ``{ 'time_shutdown' : "12312312321' }`` """ vmshepherd = self.request.app.vmshepherd preset = vmshepherd.preset_manager.get_preset(preset) vm_info = await preset.iaas.get_vm(vm_id) ret_info = copy.deepcopy(vm_info.metadata) if vm_info.metadata else {} ret_info['tags'] = vm_info.tags ret_info['iaas_shutdown'] = vm_info.timed_shutdown_at return ret_info
[ "async", "def", "get_vm_metadata", "(", "self", ",", "preset", ",", "vm_id", ")", ":", "vmshepherd", "=", "self", ".", "request", ".", "app", ".", "vmshepherd", "preset", "=", "vmshepherd", ".", "preset_manager", ".", "get_preset", "(", "preset", ")", "vm_info", "=", "await", "preset", ".", "iaas", ".", "get_vm", "(", "vm_id", ")", "ret_info", "=", "copy", ".", "deepcopy", "(", "vm_info", ".", "metadata", ")", "if", "vm_info", ".", "metadata", "else", "{", "}", "ret_info", "[", "'tags'", "]", "=", "vm_info", ".", "tags", "ret_info", "[", "'iaas_shutdown'", "]", "=", "vm_info", ".", "timed_shutdown_at", "return", "ret_info" ]
Get vm metadata :arg string preset: preset name :arg int vm_id: Virtual Machine id :return: Metadata for Virtual Machine :rtype: dict Sample response: ``{ 'time_shutdown' : "12312312321' }``
[ "Get", "vm", "metadata" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/rpc_api.py#L83-L100
Suor/autolink
autolink.py
linkify
def linkify(text, attrs={}): """ Convert URL-like and email-like strings into links. """ def separate_parentheses(s): start = re_find(r'^\(*', s) end = re_find(r'\)*$', s) n = min(len(start), len(end)) if n: return s[:n], s[n:-n], s[-n:] else: return '', s, '' def link_repl(url, proto='http://'): opening, url, closing = separate_parentheses(url) punct = re_find(punct_re, url) if punct: url = url[:-len(punct)] if re.search(proto_re, url): href = url else: href = proto + url href = escape_url(href) repl = u'{0!s}<a href="{1!s}"{2!s}>{3!s}</a>{4!s}{5!s}' return repl.format(opening, href, attrs_text, url, punct, closing) def repl(match): matches = match.groupdict() if matches['url']: return link_repl(matches['url']) else: return link_repl(matches['email'], proto='mailto:') # Prepare attrs attr = ' {0!s}="{1!s}"' attrs_text = ''.join(starmap(attr.format, attrs.items())) # Make replaces return re.sub(combined_re, repl, force_unicode(text))
python
def linkify(text, attrs={}): """ Convert URL-like and email-like strings into links. """ def separate_parentheses(s): start = re_find(r'^\(*', s) end = re_find(r'\)*$', s) n = min(len(start), len(end)) if n: return s[:n], s[n:-n], s[-n:] else: return '', s, '' def link_repl(url, proto='http://'): opening, url, closing = separate_parentheses(url) punct = re_find(punct_re, url) if punct: url = url[:-len(punct)] if re.search(proto_re, url): href = url else: href = proto + url href = escape_url(href) repl = u'{0!s}<a href="{1!s}"{2!s}>{3!s}</a>{4!s}{5!s}' return repl.format(opening, href, attrs_text, url, punct, closing) def repl(match): matches = match.groupdict() if matches['url']: return link_repl(matches['url']) else: return link_repl(matches['email'], proto='mailto:') # Prepare attrs attr = ' {0!s}="{1!s}"' attrs_text = ''.join(starmap(attr.format, attrs.items())) # Make replaces return re.sub(combined_re, repl, force_unicode(text))
[ "def", "linkify", "(", "text", ",", "attrs", "=", "{", "}", ")", ":", "def", "separate_parentheses", "(", "s", ")", ":", "start", "=", "re_find", "(", "r'^\\(*'", ",", "s", ")", "end", "=", "re_find", "(", "r'\\)*$'", ",", "s", ")", "n", "=", "min", "(", "len", "(", "start", ")", ",", "len", "(", "end", ")", ")", "if", "n", ":", "return", "s", "[", ":", "n", "]", ",", "s", "[", "n", ":", "-", "n", "]", ",", "s", "[", "-", "n", ":", "]", "else", ":", "return", "''", ",", "s", ",", "''", "def", "link_repl", "(", "url", ",", "proto", "=", "'http://'", ")", ":", "opening", ",", "url", ",", "closing", "=", "separate_parentheses", "(", "url", ")", "punct", "=", "re_find", "(", "punct_re", ",", "url", ")", "if", "punct", ":", "url", "=", "url", "[", ":", "-", "len", "(", "punct", ")", "]", "if", "re", ".", "search", "(", "proto_re", ",", "url", ")", ":", "href", "=", "url", "else", ":", "href", "=", "proto", "+", "url", "href", "=", "escape_url", "(", "href", ")", "repl", "=", "u'{0!s}<a href=\"{1!s}\"{2!s}>{3!s}</a>{4!s}{5!s}'", "return", "repl", ".", "format", "(", "opening", ",", "href", ",", "attrs_text", ",", "url", ",", "punct", ",", "closing", ")", "def", "repl", "(", "match", ")", ":", "matches", "=", "match", ".", "groupdict", "(", ")", "if", "matches", "[", "'url'", "]", ":", "return", "link_repl", "(", "matches", "[", "'url'", "]", ")", "else", ":", "return", "link_repl", "(", "matches", "[", "'email'", "]", ",", "proto", "=", "'mailto:'", ")", "# Prepare attrs", "attr", "=", "' {0!s}=\"{1!s}\"'", "attrs_text", "=", "''", ".", "join", "(", "starmap", "(", "attr", ".", "format", ",", "attrs", ".", "items", "(", ")", ")", ")", "# Make replaces", "return", "re", ".", "sub", "(", "combined_re", ",", "repl", ",", "force_unicode", "(", "text", ")", ")" ]
Convert URL-like and email-like strings into links.
[ "Convert", "URL", "-", "like", "and", "email", "-", "like", "strings", "into", "links", "." ]
train
https://github.com/Suor/autolink/blob/0a101e6fb6359ae18fce1b9f8907ff9113a6086f/autolink.py#L53-L96
Suor/autolink
autolink.py
force_unicode
def force_unicode(s, encoding='utf-8', errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type): return s if not isinstance(s, six.string_types): if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) else: s = six.text_type(s) else: s = six.text_type(bytes(s), encoding, errors) else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) return s
python
def force_unicode(s, encoding='utf-8', errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type): return s if not isinstance(s, six.string_types): if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) else: s = six.text_type(s) else: s = six.text_type(bytes(s), encoding, errors) else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) return s
[ "def", "force_unicode", "(", "s", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'strict'", ")", ":", "# Handle the common case first, saves 30-40% when s is an instance of", "# six.text_type. This function gets called often in that setting.", "if", "isinstance", "(", "s", ",", "six", ".", "text_type", ")", ":", "return", "s", "if", "not", "isinstance", "(", "s", ",", "six", ".", "string_types", ")", ":", "if", "six", ".", "PY3", ":", "if", "isinstance", "(", "s", ",", "bytes", ")", ":", "s", "=", "six", ".", "text_type", "(", "s", ",", "encoding", ",", "errors", ")", "else", ":", "s", "=", "six", ".", "text_type", "(", "s", ")", "else", ":", "s", "=", "six", ".", "text_type", "(", "bytes", "(", "s", ")", ",", "encoding", ",", "errors", ")", "else", ":", "# Note: We use .decode() here, instead of six.text_type(s,", "# encoding, errors), so that if s is a SafeBytes, it ends up being", "# a SafeText at the end.", "s", "=", "s", ".", "decode", "(", "encoding", ",", "errors", ")", "return", "s" ]
Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects.
[ "Similar", "to", "smart_text", "except", "that", "lazy", "instances", "are", "resolved", "to", "strings", "rather", "than", "kept", "as", "lazy", "objects", "." ]
train
https://github.com/Suor/autolink/blob/0a101e6fb6359ae18fce1b9f8907ff9113a6086f/autolink.py#L103-L125
mjirik/io3d
io3d/hdf5_io.py
save_dict_to_hdf5
def save_dict_to_hdf5(dic, filename): """ .... """ with h5py.File(filename, 'w') as h5file: rf = recursively_save_dict_contents_to_group(h5file, '/', dic) h5_rf = h5file.create_group("_reconstruction_flags") # h5_rf = h5file.create_group("_reconstruction_key_flags") for k, v in rf.items(): h5_rf.create_dataset("/_reconstruction_flags" + k, data=v)
python
def save_dict_to_hdf5(dic, filename): """ .... """ with h5py.File(filename, 'w') as h5file: rf = recursively_save_dict_contents_to_group(h5file, '/', dic) h5_rf = h5file.create_group("_reconstruction_flags") # h5_rf = h5file.create_group("_reconstruction_key_flags") for k, v in rf.items(): h5_rf.create_dataset("/_reconstruction_flags" + k, data=v)
[ "def", "save_dict_to_hdf5", "(", "dic", ",", "filename", ")", ":", "with", "h5py", ".", "File", "(", "filename", ",", "'w'", ")", "as", "h5file", ":", "rf", "=", "recursively_save_dict_contents_to_group", "(", "h5file", ",", "'/'", ",", "dic", ")", "h5_rf", "=", "h5file", ".", "create_group", "(", "\"_reconstruction_flags\"", ")", "# h5_rf = h5file.create_group(\"_reconstruction_key_flags\")", "for", "k", ",", "v", "in", "rf", ".", "items", "(", ")", ":", "h5_rf", ".", "create_dataset", "(", "\"/_reconstruction_flags\"", "+", "k", ",", "data", "=", "v", ")" ]
....
[ "...." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/hdf5_io.py#L16-L25
mjirik/io3d
io3d/hdf5_io.py
recursively_save_dict_contents_to_group
def recursively_save_dict_contents_to_group(h5file, path, dic): """ .... """ reconstruction_flags = {} # reconstruction_key_flags = {} for key, item in dic.items(): if type(key) is not str: # import pickle # key = pickle.dumps(key).decode("ascii") import json key = json.dumps(key) reconstruction_flags[path + key + "_key_/"] = "json_key" if item is None: import json jitem = json.dumps(item) h5file[path + key] = jitem reconstruction_flags[path + key + "_typ_/"] = "json_value" elif isinstance(item, (np.ndarray, np.int64, np.float64, str, bytes)): h5file[path + key] = item elif isinstance(item, (float)): h5file[path + key] = item reconstruction_flags[path + key + "_typ_/"] = "float" elif isinstance(item, (int)): h5file[path + key] = item reconstruction_flags[path + key + "_typ_/"] = "int" elif isinstance(item, dict): rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item) reconstruction_flags.update(rf) # reconstruction_key_flags.update(rkf) elif isinstance(item, list): # i = iter(item) item_dict = dict(zip(range(len(item)), item)) wholekey = path + key + "_typ_/" reconstruction_flags[wholekey] = "list" rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item_dict) reconstruction_flags.update(rf) # reconstruction_key_flags.update(rkf) elif isinstance(item, tuple): # i = iter(item) item_dict = dict(zip(range(len(item)), item)) wholekey = path + key + "_typ_/" reconstruction_flags[wholekey] = "tuple" rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item_dict) reconstruction_flags.update(rf) else: logger.info("Saving type {} with json".format(type(item))) import json jitem = json.dumps(item) h5file[path + key] = jitem reconstruction_flags[path + key + "_typ_/"] = "json_value" # raise ValueError('Cannot save %s type'%type(item)) return reconstruction_flags
python
def recursively_save_dict_contents_to_group(h5file, path, dic): """ .... """ reconstruction_flags = {} # reconstruction_key_flags = {} for key, item in dic.items(): if type(key) is not str: # import pickle # key = pickle.dumps(key).decode("ascii") import json key = json.dumps(key) reconstruction_flags[path + key + "_key_/"] = "json_key" if item is None: import json jitem = json.dumps(item) h5file[path + key] = jitem reconstruction_flags[path + key + "_typ_/"] = "json_value" elif isinstance(item, (np.ndarray, np.int64, np.float64, str, bytes)): h5file[path + key] = item elif isinstance(item, (float)): h5file[path + key] = item reconstruction_flags[path + key + "_typ_/"] = "float" elif isinstance(item, (int)): h5file[path + key] = item reconstruction_flags[path + key + "_typ_/"] = "int" elif isinstance(item, dict): rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item) reconstruction_flags.update(rf) # reconstruction_key_flags.update(rkf) elif isinstance(item, list): # i = iter(item) item_dict = dict(zip(range(len(item)), item)) wholekey = path + key + "_typ_/" reconstruction_flags[wholekey] = "list" rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item_dict) reconstruction_flags.update(rf) # reconstruction_key_flags.update(rkf) elif isinstance(item, tuple): # i = iter(item) item_dict = dict(zip(range(len(item)), item)) wholekey = path + key + "_typ_/" reconstruction_flags[wholekey] = "tuple" rf = recursively_save_dict_contents_to_group(h5file, path + key + '/', item_dict) reconstruction_flags.update(rf) else: logger.info("Saving type {} with json".format(type(item))) import json jitem = json.dumps(item) h5file[path + key] = jitem reconstruction_flags[path + key + "_typ_/"] = "json_value" # raise ValueError('Cannot save %s type'%type(item)) return reconstruction_flags
[ "def", "recursively_save_dict_contents_to_group", "(", "h5file", ",", "path", ",", "dic", ")", ":", "reconstruction_flags", "=", "{", "}", "# reconstruction_key_flags = {}", "for", "key", ",", "item", "in", "dic", ".", "items", "(", ")", ":", "if", "type", "(", "key", ")", "is", "not", "str", ":", "# import pickle", "# key = pickle.dumps(key).decode(\"ascii\")", "import", "json", "key", "=", "json", ".", "dumps", "(", "key", ")", "reconstruction_flags", "[", "path", "+", "key", "+", "\"_key_/\"", "]", "=", "\"json_key\"", "if", "item", "is", "None", ":", "import", "json", "jitem", "=", "json", ".", "dumps", "(", "item", ")", "h5file", "[", "path", "+", "key", "]", "=", "jitem", "reconstruction_flags", "[", "path", "+", "key", "+", "\"_typ_/\"", "]", "=", "\"json_value\"", "elif", "isinstance", "(", "item", ",", "(", "np", ".", "ndarray", ",", "np", ".", "int64", ",", "np", ".", "float64", ",", "str", ",", "bytes", ")", ")", ":", "h5file", "[", "path", "+", "key", "]", "=", "item", "elif", "isinstance", "(", "item", ",", "(", "float", ")", ")", ":", "h5file", "[", "path", "+", "key", "]", "=", "item", "reconstruction_flags", "[", "path", "+", "key", "+", "\"_typ_/\"", "]", "=", "\"float\"", "elif", "isinstance", "(", "item", ",", "(", "int", ")", ")", ":", "h5file", "[", "path", "+", "key", "]", "=", "item", "reconstruction_flags", "[", "path", "+", "key", "+", "\"_typ_/\"", "]", "=", "\"int\"", "elif", "isinstance", "(", "item", ",", "dict", ")", ":", "rf", "=", "recursively_save_dict_contents_to_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ",", "item", ")", "reconstruction_flags", ".", "update", "(", "rf", ")", "# reconstruction_key_flags.update(rkf)", "elif", "isinstance", "(", "item", ",", "list", ")", ":", "# i = iter(item)", "item_dict", "=", "dict", "(", "zip", "(", "range", "(", "len", "(", "item", ")", ")", ",", "item", ")", ")", "wholekey", "=", "path", "+", "key", "+", "\"_typ_/\"", "reconstruction_flags", "[", "wholekey", "]", "=", "\"list\"", "rf", "=", "recursively_save_dict_contents_to_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ",", "item_dict", ")", "reconstruction_flags", ".", "update", "(", "rf", ")", "# reconstruction_key_flags.update(rkf)", "elif", "isinstance", "(", "item", ",", "tuple", ")", ":", "# i = iter(item)", "item_dict", "=", "dict", "(", "zip", "(", "range", "(", "len", "(", "item", ")", ")", ",", "item", ")", ")", "wholekey", "=", "path", "+", "key", "+", "\"_typ_/\"", "reconstruction_flags", "[", "wholekey", "]", "=", "\"tuple\"", "rf", "=", "recursively_save_dict_contents_to_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ",", "item_dict", ")", "reconstruction_flags", ".", "update", "(", "rf", ")", "else", ":", "logger", ".", "info", "(", "\"Saving type {} with json\"", ".", "format", "(", "type", "(", "item", ")", ")", ")", "import", "json", "jitem", "=", "json", ".", "dumps", "(", "item", ")", "h5file", "[", "path", "+", "key", "]", "=", "jitem", "reconstruction_flags", "[", "path", "+", "key", "+", "\"_typ_/\"", "]", "=", "\"json_value\"", "# raise ValueError('Cannot save %s type'%type(item))", "return", "reconstruction_flags" ]
....
[ "...." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/hdf5_io.py#L30-L83
mjirik/io3d
io3d/hdf5_io.py
recursively_load_dict_contents_from_group
def recursively_load_dict_contents_from_group(h5file, path): """ .... """ rf = h5file["_reconstruction_flags"] # rkf = h5file["_reconstruction_key_flags"] ans = {} for key, item in h5file[path].items(): dest_key = key # if key in ("_reconstruction_flags", "_reconstruction_key_flags"): if key in "_reconstruction_flags": continue kkey = key + "_key_" tkey = key + "_typ_" if kkey in rf: flag = rf[kkey] if flag.value == "json_key": import json dest_key = json.loads(key) # import pickle # dest_key = pickle.loads(key.encode("ascii")) # logger.debug("unpickling key") if tkey in rf: flag = rf[tkey] if flag.value == "list": dict_to_output = recursively_load_dict_contents_from_group(h5file, path + key + '/') ans[dest_key] = list(dict_to_output.values()) continue if flag.value == "tuple": dict_to_output = recursively_load_dict_contents_from_group(h5file, path + key + '/') ans[dest_key] = tuple(dict_to_output.values()) continue elif flag.value == "json_value": import json ans[dest_key] = json.loads(item.value) continue elif flag.value == "float": ans[dest_key] = float(item.value) continue elif flag.value == "int": ans[dest_key] = int(item.value) continue if isinstance(item, h5py._hl.dataset.Dataset): ans[dest_key] = item.value elif isinstance(item, h5py._hl.group.Group): ans[dest_key] = recursively_load_dict_contents_from_group(h5file, path + key + '/') return ans
python
def recursively_load_dict_contents_from_group(h5file, path): """ .... """ rf = h5file["_reconstruction_flags"] # rkf = h5file["_reconstruction_key_flags"] ans = {} for key, item in h5file[path].items(): dest_key = key # if key in ("_reconstruction_flags", "_reconstruction_key_flags"): if key in "_reconstruction_flags": continue kkey = key + "_key_" tkey = key + "_typ_" if kkey in rf: flag = rf[kkey] if flag.value == "json_key": import json dest_key = json.loads(key) # import pickle # dest_key = pickle.loads(key.encode("ascii")) # logger.debug("unpickling key") if tkey in rf: flag = rf[tkey] if flag.value == "list": dict_to_output = recursively_load_dict_contents_from_group(h5file, path + key + '/') ans[dest_key] = list(dict_to_output.values()) continue if flag.value == "tuple": dict_to_output = recursively_load_dict_contents_from_group(h5file, path + key + '/') ans[dest_key] = tuple(dict_to_output.values()) continue elif flag.value == "json_value": import json ans[dest_key] = json.loads(item.value) continue elif flag.value == "float": ans[dest_key] = float(item.value) continue elif flag.value == "int": ans[dest_key] = int(item.value) continue if isinstance(item, h5py._hl.dataset.Dataset): ans[dest_key] = item.value elif isinstance(item, h5py._hl.group.Group): ans[dest_key] = recursively_load_dict_contents_from_group(h5file, path + key + '/') return ans
[ "def", "recursively_load_dict_contents_from_group", "(", "h5file", ",", "path", ")", ":", "rf", "=", "h5file", "[", "\"_reconstruction_flags\"", "]", "# rkf = h5file[\"_reconstruction_key_flags\"]", "ans", "=", "{", "}", "for", "key", ",", "item", "in", "h5file", "[", "path", "]", ".", "items", "(", ")", ":", "dest_key", "=", "key", "# if key in (\"_reconstruction_flags\", \"_reconstruction_key_flags\"):", "if", "key", "in", "\"_reconstruction_flags\"", ":", "continue", "kkey", "=", "key", "+", "\"_key_\"", "tkey", "=", "key", "+", "\"_typ_\"", "if", "kkey", "in", "rf", ":", "flag", "=", "rf", "[", "kkey", "]", "if", "flag", ".", "value", "==", "\"json_key\"", ":", "import", "json", "dest_key", "=", "json", ".", "loads", "(", "key", ")", "# import pickle", "# dest_key = pickle.loads(key.encode(\"ascii\"))", "# logger.debug(\"unpickling key\")", "if", "tkey", "in", "rf", ":", "flag", "=", "rf", "[", "tkey", "]", "if", "flag", ".", "value", "==", "\"list\"", ":", "dict_to_output", "=", "recursively_load_dict_contents_from_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ")", "ans", "[", "dest_key", "]", "=", "list", "(", "dict_to_output", ".", "values", "(", ")", ")", "continue", "if", "flag", ".", "value", "==", "\"tuple\"", ":", "dict_to_output", "=", "recursively_load_dict_contents_from_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ")", "ans", "[", "dest_key", "]", "=", "tuple", "(", "dict_to_output", ".", "values", "(", ")", ")", "continue", "elif", "flag", ".", "value", "==", "\"json_value\"", ":", "import", "json", "ans", "[", "dest_key", "]", "=", "json", ".", "loads", "(", "item", ".", "value", ")", "continue", "elif", "flag", ".", "value", "==", "\"float\"", ":", "ans", "[", "dest_key", "]", "=", "float", "(", "item", ".", "value", ")", "continue", "elif", "flag", ".", "value", "==", "\"int\"", ":", "ans", "[", "dest_key", "]", "=", "int", "(", "item", ".", "value", ")", "continue", "if", "isinstance", "(", "item", ",", "h5py", ".", "_hl", ".", "dataset", ".", "Dataset", ")", ":", "ans", "[", "dest_key", "]", "=", "item", ".", "value", "elif", "isinstance", "(", "item", ",", "h5py", ".", "_hl", ".", "group", ".", "Group", ")", ":", "ans", "[", "dest_key", "]", "=", "recursively_load_dict_contents_from_group", "(", "h5file", ",", "path", "+", "key", "+", "'/'", ")", "return", "ans" ]
....
[ "...." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/hdf5_io.py#L92-L140
PlaidWeb/Pushl
pushl/__main__.py
parse_args
def parse_args(*args): """ Parse the arguments for the command """ parser = argparse.ArgumentParser( description="Send push notifications for a feed") parser.add_argument('--version', action='version', version="%(prog)s " + __version__.__version__) parser.add_argument('feeds', type=str, nargs='*', metavar='feed_url', help='A URL for a feed to process') parser.add_argument('--cache', '-c', type=str, dest='cache_dir', help='Cache storage directory', required=False) parser.add_argument("-v", "--verbosity", action="count", help="increase output verbosity", default=0) parser.add_argument("-e", "--entry", nargs='+', help='URLs to entries/pages to index directly', metavar='entry_url', dest='entries') parser.add_argument("-s", "--websub-only", nargs='+', help='URLs/feeds to only send WebSub notifications for', metavar='feed_url', dest='websub_only') parser.add_argument('--timeout', '-t', type=int, dest='timeout', help='Connection timeout, in seconds', default=120) parser.add_argument('--max-connections', type=int, dest='max_connections', help='Maximum number of connections to have open at once', default=100) parser.add_argument('--max-per-host', type=int, dest='max_per_host', help='Maximum number of connections per host', default=0) parser.add_argument('--rel-whitelist', '-w', dest='rel_whitelist', type=str, help="Comma-separated list of link RELs to whitelist" + " for sending webmentions") parser.add_argument('--rel-blacklist', '-b', dest='rel_blacklist', type=str, help="Comma-separated list of link RELs to blacklist" + " from sending webmentions", default="nofollow") parser.add_argument('--max-time', '-m', dest='max_time', type=float, help="Maximum time (in seconds) to spend on this", default=1800) parser.add_argument('--user-agent', dest='user_agent', type=str, help="User-agent string to send", default=__version__.USER_AGENT) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--keepalive', dest='keepalive', action='store_true', help="Keep TCP connections alive") feature.add_argument('--no-keepalive', dest='keepalive', action='store_false', help="Don't keep TCP connections alive") feature.set_defaults(keepalive=False) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--archive', '-a', dest='archive', action='store_true', help='Process archive links in the feed per RFC 5005') feature.add_argument('--no-archive', dest='archive', action='store_false', help='Do not process archive links in the feed') feature.set_defaults(archive=False) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--recurse', '-r', help="Recursively check other discovered feeds", action='store_true', dest='recurse') feature.add_argument('--no-recurse', dest='recurse', action='store_false', help="Do not recurse into other feeds") feature.set_defaults(recurse=False) return parser.parse_args(*args)
python
def parse_args(*args): """ Parse the arguments for the command """ parser = argparse.ArgumentParser( description="Send push notifications for a feed") parser.add_argument('--version', action='version', version="%(prog)s " + __version__.__version__) parser.add_argument('feeds', type=str, nargs='*', metavar='feed_url', help='A URL for a feed to process') parser.add_argument('--cache', '-c', type=str, dest='cache_dir', help='Cache storage directory', required=False) parser.add_argument("-v", "--verbosity", action="count", help="increase output verbosity", default=0) parser.add_argument("-e", "--entry", nargs='+', help='URLs to entries/pages to index directly', metavar='entry_url', dest='entries') parser.add_argument("-s", "--websub-only", nargs='+', help='URLs/feeds to only send WebSub notifications for', metavar='feed_url', dest='websub_only') parser.add_argument('--timeout', '-t', type=int, dest='timeout', help='Connection timeout, in seconds', default=120) parser.add_argument('--max-connections', type=int, dest='max_connections', help='Maximum number of connections to have open at once', default=100) parser.add_argument('--max-per-host', type=int, dest='max_per_host', help='Maximum number of connections per host', default=0) parser.add_argument('--rel-whitelist', '-w', dest='rel_whitelist', type=str, help="Comma-separated list of link RELs to whitelist" + " for sending webmentions") parser.add_argument('--rel-blacklist', '-b', dest='rel_blacklist', type=str, help="Comma-separated list of link RELs to blacklist" + " from sending webmentions", default="nofollow") parser.add_argument('--max-time', '-m', dest='max_time', type=float, help="Maximum time (in seconds) to spend on this", default=1800) parser.add_argument('--user-agent', dest='user_agent', type=str, help="User-agent string to send", default=__version__.USER_AGENT) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--keepalive', dest='keepalive', action='store_true', help="Keep TCP connections alive") feature.add_argument('--no-keepalive', dest='keepalive', action='store_false', help="Don't keep TCP connections alive") feature.set_defaults(keepalive=False) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--archive', '-a', dest='archive', action='store_true', help='Process archive links in the feed per RFC 5005') feature.add_argument('--no-archive', dest='archive', action='store_false', help='Do not process archive links in the feed') feature.set_defaults(archive=False) feature = parser.add_mutually_exclusive_group(required=False) feature.add_argument('--recurse', '-r', help="Recursively check other discovered feeds", action='store_true', dest='recurse') feature.add_argument('--no-recurse', dest='recurse', action='store_false', help="Do not recurse into other feeds") feature.set_defaults(recurse=False) return parser.parse_args(*args)
[ "def", "parse_args", "(", "*", "args", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Send push notifications for a feed\"", ")", "parser", ".", "add_argument", "(", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "\"%(prog)s \"", "+", "__version__", ".", "__version__", ")", "parser", ".", "add_argument", "(", "'feeds'", ",", "type", "=", "str", ",", "nargs", "=", "'*'", ",", "metavar", "=", "'feed_url'", ",", "help", "=", "'A URL for a feed to process'", ")", "parser", ".", "add_argument", "(", "'--cache'", ",", "'-c'", ",", "type", "=", "str", ",", "dest", "=", "'cache_dir'", ",", "help", "=", "'Cache storage directory'", ",", "required", "=", "False", ")", "parser", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbosity\"", ",", "action", "=", "\"count\"", ",", "help", "=", "\"increase output verbosity\"", ",", "default", "=", "0", ")", "parser", ".", "add_argument", "(", "\"-e\"", ",", "\"--entry\"", ",", "nargs", "=", "'+'", ",", "help", "=", "'URLs to entries/pages to index directly'", ",", "metavar", "=", "'entry_url'", ",", "dest", "=", "'entries'", ")", "parser", ".", "add_argument", "(", "\"-s\"", ",", "\"--websub-only\"", ",", "nargs", "=", "'+'", ",", "help", "=", "'URLs/feeds to only send WebSub notifications for'", ",", "metavar", "=", "'feed_url'", ",", "dest", "=", "'websub_only'", ")", "parser", ".", "add_argument", "(", "'--timeout'", ",", "'-t'", ",", "type", "=", "int", ",", "dest", "=", "'timeout'", ",", "help", "=", "'Connection timeout, in seconds'", ",", "default", "=", "120", ")", "parser", ".", "add_argument", "(", "'--max-connections'", ",", "type", "=", "int", ",", "dest", "=", "'max_connections'", ",", "help", "=", "'Maximum number of connections to have open at once'", ",", "default", "=", "100", ")", "parser", ".", "add_argument", "(", "'--max-per-host'", ",", "type", "=", "int", ",", "dest", "=", "'max_per_host'", ",", "help", "=", "'Maximum number of connections per host'", ",", "default", "=", "0", ")", "parser", ".", "add_argument", "(", "'--rel-whitelist'", ",", "'-w'", ",", "dest", "=", "'rel_whitelist'", ",", "type", "=", "str", ",", "help", "=", "\"Comma-separated list of link RELs to whitelist\"", "+", "\" for sending webmentions\"", ")", "parser", ".", "add_argument", "(", "'--rel-blacklist'", ",", "'-b'", ",", "dest", "=", "'rel_blacklist'", ",", "type", "=", "str", ",", "help", "=", "\"Comma-separated list of link RELs to blacklist\"", "+", "\" from sending webmentions\"", ",", "default", "=", "\"nofollow\"", ")", "parser", ".", "add_argument", "(", "'--max-time'", ",", "'-m'", ",", "dest", "=", "'max_time'", ",", "type", "=", "float", ",", "help", "=", "\"Maximum time (in seconds) to spend on this\"", ",", "default", "=", "1800", ")", "parser", ".", "add_argument", "(", "'--user-agent'", ",", "dest", "=", "'user_agent'", ",", "type", "=", "str", ",", "help", "=", "\"User-agent string to send\"", ",", "default", "=", "__version__", ".", "USER_AGENT", ")", "feature", "=", "parser", ".", "add_mutually_exclusive_group", "(", "required", "=", "False", ")", "feature", ".", "add_argument", "(", "'--keepalive'", ",", "dest", "=", "'keepalive'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Keep TCP connections alive\"", ")", "feature", ".", "add_argument", "(", "'--no-keepalive'", ",", "dest", "=", "'keepalive'", ",", "action", "=", "'store_false'", ",", "help", "=", "\"Don't keep TCP connections alive\"", ")", "feature", ".", "set_defaults", "(", "keepalive", "=", "False", ")", "feature", "=", "parser", ".", "add_mutually_exclusive_group", "(", "required", "=", "False", ")", "feature", ".", "add_argument", "(", "'--archive'", ",", "'-a'", ",", "dest", "=", "'archive'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Process archive links in the feed per RFC 5005'", ")", "feature", ".", "add_argument", "(", "'--no-archive'", ",", "dest", "=", "'archive'", ",", "action", "=", "'store_false'", ",", "help", "=", "'Do not process archive links in the feed'", ")", "feature", ".", "set_defaults", "(", "archive", "=", "False", ")", "feature", "=", "parser", ".", "add_mutually_exclusive_group", "(", "required", "=", "False", ")", "feature", ".", "add_argument", "(", "'--recurse'", ",", "'-r'", ",", "help", "=", "\"Recursively check other discovered feeds\"", ",", "action", "=", "'store_true'", ",", "dest", "=", "'recurse'", ")", "feature", ".", "add_argument", "(", "'--no-recurse'", ",", "dest", "=", "'recurse'", ",", "action", "=", "'store_false'", ",", "help", "=", "\"Do not recurse into other feeds\"", ")", "feature", ".", "set_defaults", "(", "recurse", "=", "False", ")", "return", "parser", ".", "parse_args", "(", "*", "args", ")" ]
Parse the arguments for the command
[ "Parse", "the", "arguments", "for", "the", "command" ]
train
https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/__main__.py#L17-L89
PlaidWeb/Pushl
pushl/__main__.py
main
def main(): """ main entry point """ args = parse_args() logging.basicConfig(level=LOG_LEVELS[min( args.verbosity, len(LOG_LEVELS) - 1)]) loop = asyncio.get_event_loop() loop.run_until_complete(_run(args))
python
def main(): """ main entry point """ args = parse_args() logging.basicConfig(level=LOG_LEVELS[min( args.verbosity, len(LOG_LEVELS) - 1)]) loop = asyncio.get_event_loop() loop.run_until_complete(_run(args))
[ "def", "main", "(", ")", ":", "args", "=", "parse_args", "(", ")", "logging", ".", "basicConfig", "(", "level", "=", "LOG_LEVELS", "[", "min", "(", "args", ".", "verbosity", ",", "len", "(", "LOG_LEVELS", ")", "-", "1", ")", "]", ")", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "loop", ".", "run_until_complete", "(", "_run", "(", "args", ")", ")" ]
main entry point
[ "main", "entry", "point" ]
train
https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/__main__.py#L92-L99
launchdarkly/relayCommander
relay_commander/ld.py
LaunchDarklyApi.get_environments
def get_environments(self, project_key: str) -> dict: """ Retrieve all environments for a given project. Includes name, key, and mobile key. :param project_key: Key for project. :returns: dictionary of environments. """ try: resp = self.client.get_project(project_key) except launchdarkly_api.rest.ApiException as ex: msg = "Unable to get environments." resp = "API response was {0} {1}.".format(ex.status, ex.reason) LOG.error("%s %s", msg, resp) sys.exit(1) envs = [] for env in resp.environments: env = dict( key=env.key, api_key=env.api_key, client_id=env.id ) envs.append(env) return envs
python
def get_environments(self, project_key: str) -> dict: """ Retrieve all environments for a given project. Includes name, key, and mobile key. :param project_key: Key for project. :returns: dictionary of environments. """ try: resp = self.client.get_project(project_key) except launchdarkly_api.rest.ApiException as ex: msg = "Unable to get environments." resp = "API response was {0} {1}.".format(ex.status, ex.reason) LOG.error("%s %s", msg, resp) sys.exit(1) envs = [] for env in resp.environments: env = dict( key=env.key, api_key=env.api_key, client_id=env.id ) envs.append(env) return envs
[ "def", "get_environments", "(", "self", ",", "project_key", ":", "str", ")", "->", "dict", ":", "try", ":", "resp", "=", "self", ".", "client", ".", "get_project", "(", "project_key", ")", "except", "launchdarkly_api", ".", "rest", ".", "ApiException", "as", "ex", ":", "msg", "=", "\"Unable to get environments.\"", "resp", "=", "\"API response was {0} {1}.\"", ".", "format", "(", "ex", ".", "status", ",", "ex", ".", "reason", ")", "LOG", ".", "error", "(", "\"%s %s\"", ",", "msg", ",", "resp", ")", "sys", ".", "exit", "(", "1", ")", "envs", "=", "[", "]", "for", "env", "in", "resp", ".", "environments", ":", "env", "=", "dict", "(", "key", "=", "env", ".", "key", ",", "api_key", "=", "env", ".", "api_key", ",", "client_id", "=", "env", ".", "id", ")", "envs", ".", "append", "(", "env", ")", "return", "envs" ]
Retrieve all environments for a given project. Includes name, key, and mobile key. :param project_key: Key for project. :returns: dictionary of environments.
[ "Retrieve", "all", "environments", "for", "a", "given", "project", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/ld.py#L50-L78
launchdarkly/relayCommander
relay_commander/ld.py
LaunchDarklyApi.update_flag
def update_flag(self, state: str, feature_key: str) \ -> launchdarkly_api.FeatureFlag: """ Update the flag status for the specified feature flag. :param state: New feature flag state :param featureKey: Feature flag key :returns: FeatureFlag object. """ build_env = "/environments/" + self.environment_key + "/on" patch_comment = [{"op": "replace", "path": build_env, "value": state}] try: resp = self.feature.patch_feature_flag( self.project_key, feature_key, patch_comment) except launchdarkly_api.rest.ApiException as ex: msg = "Unable to update flag." resp = "API response was {0} {1}.".format(ex.status, ex.reason) LOG.error("%s %s", msg, resp) sys.exit(1) return resp
python
def update_flag(self, state: str, feature_key: str) \ -> launchdarkly_api.FeatureFlag: """ Update the flag status for the specified feature flag. :param state: New feature flag state :param featureKey: Feature flag key :returns: FeatureFlag object. """ build_env = "/environments/" + self.environment_key + "/on" patch_comment = [{"op": "replace", "path": build_env, "value": state}] try: resp = self.feature.patch_feature_flag( self.project_key, feature_key, patch_comment) except launchdarkly_api.rest.ApiException as ex: msg = "Unable to update flag." resp = "API response was {0} {1}.".format(ex.status, ex.reason) LOG.error("%s %s", msg, resp) sys.exit(1) return resp
[ "def", "update_flag", "(", "self", ",", "state", ":", "str", ",", "feature_key", ":", "str", ")", "->", "launchdarkly_api", ".", "FeatureFlag", ":", "build_env", "=", "\"/environments/\"", "+", "self", ".", "environment_key", "+", "\"/on\"", "patch_comment", "=", "[", "{", "\"op\"", ":", "\"replace\"", ",", "\"path\"", ":", "build_env", ",", "\"value\"", ":", "state", "}", "]", "try", ":", "resp", "=", "self", ".", "feature", ".", "patch_feature_flag", "(", "self", ".", "project_key", ",", "feature_key", ",", "patch_comment", ")", "except", "launchdarkly_api", ".", "rest", ".", "ApiException", "as", "ex", ":", "msg", "=", "\"Unable to update flag.\"", "resp", "=", "\"API response was {0} {1}.\"", ".", "format", "(", "ex", ".", "status", ",", "ex", ".", "reason", ")", "LOG", ".", "error", "(", "\"%s %s\"", ",", "msg", ",", "resp", ")", "sys", ".", "exit", "(", "1", ")", "return", "resp" ]
Update the flag status for the specified feature flag. :param state: New feature flag state :param featureKey: Feature flag key :returns: FeatureFlag object.
[ "Update", "the", "flag", "status", "for", "the", "specified", "feature", "flag", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/ld.py#L80-L102
SetBased/py-stratum
pystratum/wrapper/RowsWithKeyWrapper.py
RowsWithKeyWrapper._write_result_handler
def _write_result_handler(self, routine): """ Generates code for calling the stored routine in the wrapper method. """ self._write_line('ret = {}') self._write_execute_rows(routine) self._write_line('for row in rows:') num_of_dict = len(routine['columns']) i = 0 while i < num_of_dict: value = "row['{0!s}']".format(routine['columns'][i]) stack = '' j = 0 while j < i: stack += "[row['{0!s}']]".format(routine['columns'][j]) j += 1 line = 'if {0!s} in ret{1!s}:'.format(value, stack) self._write_line(line) i += 1 line = "raise Exception('Duplicate key for %s.' % str(({0!s})))". \ format(", ".join(["row['{0!s}']".format(column_name) for column_name in routine['columns']])) self._write_line(line) self._indent_level_down() i = num_of_dict while i > 0: self._write_line('else:') part1 = '' j = 0 while j < i - 1: part1 += "[row['{0!s}']]".format(routine['columns'][j]) j += 1 part1 += "[row['{0!s}']]".format(routine['columns'][j]) part2 = '' j = i - 1 while j < num_of_dict: if j + 1 != i: part2 += "{{row['{0!s}']: ".format(routine['columns'][j]) j += 1 part2 += "row" + ('}' * (num_of_dict - i)) line = "ret{0!s} = {1!s}".format(part1, part2) self._write_line(line) self._indent_level_down() if i > 1: self._indent_level_down() i -= 1 self._write_line() self._write_line('return ret')
python
def _write_result_handler(self, routine): """ Generates code for calling the stored routine in the wrapper method. """ self._write_line('ret = {}') self._write_execute_rows(routine) self._write_line('for row in rows:') num_of_dict = len(routine['columns']) i = 0 while i < num_of_dict: value = "row['{0!s}']".format(routine['columns'][i]) stack = '' j = 0 while j < i: stack += "[row['{0!s}']]".format(routine['columns'][j]) j += 1 line = 'if {0!s} in ret{1!s}:'.format(value, stack) self._write_line(line) i += 1 line = "raise Exception('Duplicate key for %s.' % str(({0!s})))". \ format(", ".join(["row['{0!s}']".format(column_name) for column_name in routine['columns']])) self._write_line(line) self._indent_level_down() i = num_of_dict while i > 0: self._write_line('else:') part1 = '' j = 0 while j < i - 1: part1 += "[row['{0!s}']]".format(routine['columns'][j]) j += 1 part1 += "[row['{0!s}']]".format(routine['columns'][j]) part2 = '' j = i - 1 while j < num_of_dict: if j + 1 != i: part2 += "{{row['{0!s}']: ".format(routine['columns'][j]) j += 1 part2 += "row" + ('}' * (num_of_dict - i)) line = "ret{0!s} = {1!s}".format(part1, part2) self._write_line(line) self._indent_level_down() if i > 1: self._indent_level_down() i -= 1 self._write_line() self._write_line('return ret')
[ "def", "_write_result_handler", "(", "self", ",", "routine", ")", ":", "self", ".", "_write_line", "(", "'ret = {}'", ")", "self", ".", "_write_execute_rows", "(", "routine", ")", "self", ".", "_write_line", "(", "'for row in rows:'", ")", "num_of_dict", "=", "len", "(", "routine", "[", "'columns'", "]", ")", "i", "=", "0", "while", "i", "<", "num_of_dict", ":", "value", "=", "\"row['{0!s}']\"", ".", "format", "(", "routine", "[", "'columns'", "]", "[", "i", "]", ")", "stack", "=", "''", "j", "=", "0", "while", "j", "<", "i", ":", "stack", "+=", "\"[row['{0!s}']]\"", ".", "format", "(", "routine", "[", "'columns'", "]", "[", "j", "]", ")", "j", "+=", "1", "line", "=", "'if {0!s} in ret{1!s}:'", ".", "format", "(", "value", ",", "stack", ")", "self", ".", "_write_line", "(", "line", ")", "i", "+=", "1", "line", "=", "\"raise Exception('Duplicate key for %s.' % str(({0!s})))\"", ".", "format", "(", "\", \"", ".", "join", "(", "[", "\"row['{0!s}']\"", ".", "format", "(", "column_name", ")", "for", "column_name", "in", "routine", "[", "'columns'", "]", "]", ")", ")", "self", ".", "_write_line", "(", "line", ")", "self", ".", "_indent_level_down", "(", ")", "i", "=", "num_of_dict", "while", "i", ">", "0", ":", "self", ".", "_write_line", "(", "'else:'", ")", "part1", "=", "''", "j", "=", "0", "while", "j", "<", "i", "-", "1", ":", "part1", "+=", "\"[row['{0!s}']]\"", ".", "format", "(", "routine", "[", "'columns'", "]", "[", "j", "]", ")", "j", "+=", "1", "part1", "+=", "\"[row['{0!s}']]\"", ".", "format", "(", "routine", "[", "'columns'", "]", "[", "j", "]", ")", "part2", "=", "''", "j", "=", "i", "-", "1", "while", "j", "<", "num_of_dict", ":", "if", "j", "+", "1", "!=", "i", ":", "part2", "+=", "\"{{row['{0!s}']: \"", ".", "format", "(", "routine", "[", "'columns'", "]", "[", "j", "]", ")", "j", "+=", "1", "part2", "+=", "\"row\"", "+", "(", "'}'", "*", "(", "num_of_dict", "-", "i", ")", ")", "line", "=", "\"ret{0!s} = {1!s}\"", ".", "format", "(", "part1", ",", "part2", ")", "self", ".", "_write_line", "(", "line", ")", "self", ".", "_indent_level_down", "(", ")", "if", "i", ">", "1", ":", "self", ".", "_indent_level_down", "(", ")", "i", "-=", "1", "self", ".", "_write_line", "(", ")", "self", ".", "_write_line", "(", "'return ret'", ")" ]
Generates code for calling the stored routine in the wrapper method.
[ "Generates", "code", "for", "calling", "the", "stored", "routine", "in", "the", "wrapper", "method", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/wrapper/RowsWithKeyWrapper.py#L30-L86
SetBased/py-stratum
pystratum/MetadataDataLayer.py
MetadataDataLayer._log_query
def _log_query(query): """ Logs the query on the console. :param str query: The query. """ query = query.strip() if os.linesep in query: # Query is a multi line query MetadataDataLayer.io.log_very_verbose('Executing query:') MetadataDataLayer.io.log_very_verbose('<sql>{0}</sql>'.format(query)) else: # Query is a single line query. MetadataDataLayer.io.log_very_verbose('Executing query: <sql>{0}</sql>'.format(query))
python
def _log_query(query): """ Logs the query on the console. :param str query: The query. """ query = query.strip() if os.linesep in query: # Query is a multi line query MetadataDataLayer.io.log_very_verbose('Executing query:') MetadataDataLayer.io.log_very_verbose('<sql>{0}</sql>'.format(query)) else: # Query is a single line query. MetadataDataLayer.io.log_very_verbose('Executing query: <sql>{0}</sql>'.format(query))
[ "def", "_log_query", "(", "query", ")", ":", "query", "=", "query", ".", "strip", "(", ")", "if", "os", ".", "linesep", "in", "query", ":", "# Query is a multi line query", "MetadataDataLayer", ".", "io", ".", "log_very_verbose", "(", "'Executing query:'", ")", "MetadataDataLayer", ".", "io", ".", "log_very_verbose", "(", "'<sql>{0}</sql>'", ".", "format", "(", "query", ")", ")", "else", ":", "# Query is a single line query.", "MetadataDataLayer", ".", "io", ".", "log_very_verbose", "(", "'Executing query: <sql>{0}</sql>'", ".", "format", "(", "query", ")", ")" ]
Logs the query on the console. :param str query: The query.
[ "Logs", "the", "query", "on", "the", "console", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/MetadataDataLayer.py#L20-L34
vsoch/helpme
helpme/main/discourse/utils.py
request_token
def request_token(self, board): '''send a public key to request a token. When we call this function, we already have an RSA key at self.key board: the discourse board to post to ''' nonce = str(uuid.uuid4()) data = {'scopes': 'write', 'client_id': self.client_id, 'application_name': 'HelpMe', 'public_key': self.public_key.replace("'",""), 'nonce': nonce } url = (board + "/user-api-key/new?scopes=write&application_name=HelpMe&public_key=" + self.public_key.replace("'", "") + "&client_id=" + self.client_id + "&nonce=" + nonce ) bot.newline() bot.info('Open browser to:') bot.info(url) bot.newline() # the user will open browser, get a token, and then have it saved here. bot.info('Copy paste token, press Ctrl-D to save it:') lines = [] # The message is multiple lines while True: try: line = enter_input() except EOFError: break if line: lines.append(line) message = "\n".join(lines) # Write to temporary file, we only need to get key tmpfile = mktemp() with open(tmpfile, 'w') as filey: filey.write(message) # Read in again, and get token **important** is binary with open(tmpfile, 'rb') as filey: message = filey.read() # uses pycryptodome (3.7.2) cipher = Cipher_PKCS1_v1_5.new(self.key) decrypted = json.loads(cipher.decrypt(b64decode(message), None).decode()) # Validate nonce is in response if "nonce" not in decrypted: bot.exit('Missing nonce field in response for token, invalid.') # Must return nonce that we sent if decrypted['nonce'] != nonce: bot.exit('Invalid nonce, exiting.') return decrypted['key']
python
def request_token(self, board): '''send a public key to request a token. When we call this function, we already have an RSA key at self.key board: the discourse board to post to ''' nonce = str(uuid.uuid4()) data = {'scopes': 'write', 'client_id': self.client_id, 'application_name': 'HelpMe', 'public_key': self.public_key.replace("'",""), 'nonce': nonce } url = (board + "/user-api-key/new?scopes=write&application_name=HelpMe&public_key=" + self.public_key.replace("'", "") + "&client_id=" + self.client_id + "&nonce=" + nonce ) bot.newline() bot.info('Open browser to:') bot.info(url) bot.newline() # the user will open browser, get a token, and then have it saved here. bot.info('Copy paste token, press Ctrl-D to save it:') lines = [] # The message is multiple lines while True: try: line = enter_input() except EOFError: break if line: lines.append(line) message = "\n".join(lines) # Write to temporary file, we only need to get key tmpfile = mktemp() with open(tmpfile, 'w') as filey: filey.write(message) # Read in again, and get token **important** is binary with open(tmpfile, 'rb') as filey: message = filey.read() # uses pycryptodome (3.7.2) cipher = Cipher_PKCS1_v1_5.new(self.key) decrypted = json.loads(cipher.decrypt(b64decode(message), None).decode()) # Validate nonce is in response if "nonce" not in decrypted: bot.exit('Missing nonce field in response for token, invalid.') # Must return nonce that we sent if decrypted['nonce'] != nonce: bot.exit('Invalid nonce, exiting.') return decrypted['key']
[ "def", "request_token", "(", "self", ",", "board", ")", ":", "nonce", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "data", "=", "{", "'scopes'", ":", "'write'", ",", "'client_id'", ":", "self", ".", "client_id", ",", "'application_name'", ":", "'HelpMe'", ",", "'public_key'", ":", "self", ".", "public_key", ".", "replace", "(", "\"'\"", ",", "\"\"", ")", ",", "'nonce'", ":", "nonce", "}", "url", "=", "(", "board", "+", "\"/user-api-key/new?scopes=write&application_name=HelpMe&public_key=\"", "+", "self", ".", "public_key", ".", "replace", "(", "\"'\"", ",", "\"\"", ")", "+", "\"&client_id=\"", "+", "self", ".", "client_id", "+", "\"&nonce=\"", "+", "nonce", ")", "bot", ".", "newline", "(", ")", "bot", ".", "info", "(", "'Open browser to:'", ")", "bot", ".", "info", "(", "url", ")", "bot", ".", "newline", "(", ")", "# the user will open browser, get a token, and then have it saved here.", "bot", ".", "info", "(", "'Copy paste token, press Ctrl-D to save it:'", ")", "lines", "=", "[", "]", "# The message is multiple lines", "while", "True", ":", "try", ":", "line", "=", "enter_input", "(", ")", "except", "EOFError", ":", "break", "if", "line", ":", "lines", ".", "append", "(", "line", ")", "message", "=", "\"\\n\"", ".", "join", "(", "lines", ")", "# Write to temporary file, we only need to get key", "tmpfile", "=", "mktemp", "(", ")", "with", "open", "(", "tmpfile", ",", "'w'", ")", "as", "filey", ":", "filey", ".", "write", "(", "message", ")", "# Read in again, and get token **important** is binary", "with", "open", "(", "tmpfile", ",", "'rb'", ")", "as", "filey", ":", "message", "=", "filey", ".", "read", "(", ")", "# uses pycryptodome (3.7.2)", "cipher", "=", "Cipher_PKCS1_v1_5", ".", "new", "(", "self", ".", "key", ")", "decrypted", "=", "json", ".", "loads", "(", "cipher", ".", "decrypt", "(", "b64decode", "(", "message", ")", ",", "None", ")", ".", "decode", "(", ")", ")", "# Validate nonce is in response", "if", "\"nonce\"", "not", "in", "decrypted", ":", "bot", ".", "exit", "(", "'Missing nonce field in response for token, invalid.'", ")", "# Must return nonce that we sent", "if", "decrypted", "[", "'nonce'", "]", "!=", "nonce", ":", "bot", ".", "exit", "(", "'Invalid nonce, exiting.'", ")", "return", "decrypted", "[", "'key'", "]" ]
send a public key to request a token. When we call this function, we already have an RSA key at self.key board: the discourse board to post to
[ "send", "a", "public", "key", "to", "request", "a", "token", ".", "When", "we", "call", "this", "function", "we", "already", "have", "an", "RSA", "key", "at", "self", ".", "key" ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/discourse/utils.py#L32-L93
vsoch/helpme
helpme/main/discourse/utils.py
create_post
def create_post(self, title, body, board, category, username): '''create a Discourse post, given a title, body, board, and token. Parameters ========== title: the issue title body: the issue body board: the discourse board to post to ''' category_url = "%s/categories.json" % board response = requests.get(category_url) if response.status_code != 200: print('Error with retrieving %s' % category_url) sys.exit(1) # Get a list of all categories categories = response.json()['category_list']['categories'] categories = {c['name']:c['id'] for c in categories} # And if not valid, warn the user if category not in categories: bot.warning('%s is not valid, will use default' % category) category_id = categories.get(category, None) headers = {"Content-Type": "application/json", "User-Api-Client-Id": self.client_id, "User-Api-Key": self.token } # First get the category ids data = {'title': title, 'raw': body, 'category': category_id} response = requests.post("%s/posts.json" % board, headers=headers, data=json.dumps(data)) if response.status_code in [200, 201, 202]: topic = response.json() url = "%s/t/%s/%s" %(board, topic['topic_slug'], topic['topic_id']) bot.info(url) return url elif response.status_code == 404: bot.error('Cannot post to board, not found. Do you have permission?') sys.exit(1) else: bot.error('Cannot post to board %s' % board) bot.error(response.content) sys.exit(1)
python
def create_post(self, title, body, board, category, username): '''create a Discourse post, given a title, body, board, and token. Parameters ========== title: the issue title body: the issue body board: the discourse board to post to ''' category_url = "%s/categories.json" % board response = requests.get(category_url) if response.status_code != 200: print('Error with retrieving %s' % category_url) sys.exit(1) # Get a list of all categories categories = response.json()['category_list']['categories'] categories = {c['name']:c['id'] for c in categories} # And if not valid, warn the user if category not in categories: bot.warning('%s is not valid, will use default' % category) category_id = categories.get(category, None) headers = {"Content-Type": "application/json", "User-Api-Client-Id": self.client_id, "User-Api-Key": self.token } # First get the category ids data = {'title': title, 'raw': body, 'category': category_id} response = requests.post("%s/posts.json" % board, headers=headers, data=json.dumps(data)) if response.status_code in [200, 201, 202]: topic = response.json() url = "%s/t/%s/%s" %(board, topic['topic_slug'], topic['topic_id']) bot.info(url) return url elif response.status_code == 404: bot.error('Cannot post to board, not found. Do you have permission?') sys.exit(1) else: bot.error('Cannot post to board %s' % board) bot.error(response.content) sys.exit(1)
[ "def", "create_post", "(", "self", ",", "title", ",", "body", ",", "board", ",", "category", ",", "username", ")", ":", "category_url", "=", "\"%s/categories.json\"", "%", "board", "response", "=", "requests", ".", "get", "(", "category_url", ")", "if", "response", ".", "status_code", "!=", "200", ":", "print", "(", "'Error with retrieving %s'", "%", "category_url", ")", "sys", ".", "exit", "(", "1", ")", "# Get a list of all categories", "categories", "=", "response", ".", "json", "(", ")", "[", "'category_list'", "]", "[", "'categories'", "]", "categories", "=", "{", "c", "[", "'name'", "]", ":", "c", "[", "'id'", "]", "for", "c", "in", "categories", "}", "# And if not valid, warn the user", "if", "category", "not", "in", "categories", ":", "bot", ".", "warning", "(", "'%s is not valid, will use default'", "%", "category", ")", "category_id", "=", "categories", ".", "get", "(", "category", ",", "None", ")", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/json\"", ",", "\"User-Api-Client-Id\"", ":", "self", ".", "client_id", ",", "\"User-Api-Key\"", ":", "self", ".", "token", "}", "# First get the category ids", "data", "=", "{", "'title'", ":", "title", ",", "'raw'", ":", "body", ",", "'category'", ":", "category_id", "}", "response", "=", "requests", ".", "post", "(", "\"%s/posts.json\"", "%", "board", ",", "headers", "=", "headers", ",", "data", "=", "json", ".", "dumps", "(", "data", ")", ")", "if", "response", ".", "status_code", "in", "[", "200", ",", "201", ",", "202", "]", ":", "topic", "=", "response", ".", "json", "(", ")", "url", "=", "\"%s/t/%s/%s\"", "%", "(", "board", ",", "topic", "[", "'topic_slug'", "]", ",", "topic", "[", "'topic_id'", "]", ")", "bot", ".", "info", "(", "url", ")", "return", "url", "elif", "response", ".", "status_code", "==", "404", ":", "bot", ".", "error", "(", "'Cannot post to board, not found. Do you have permission?'", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "bot", ".", "error", "(", "'Cannot post to board %s'", "%", "board", ")", "bot", ".", "error", "(", "response", ".", "content", ")", "sys", ".", "exit", "(", "1", ")" ]
create a Discourse post, given a title, body, board, and token. Parameters ========== title: the issue title body: the issue body board: the discourse board to post to
[ "create", "a", "Discourse", "post", "given", "a", "title", "body", "board", "and", "token", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/discourse/utils.py#L97-L152
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.main
def main(self, config_filename, file_names=None): """ Loads stored routines into the current schema. :param str config_filename: The name of the configuration file of the current project :param list[str] file_names: The sources that must be loaded. If empty all sources (if required) will loaded. :rtype: int The status of exit. """ self._io.title('Loader') if file_names: self.__load_list(config_filename, file_names) else: self.__load_all(config_filename) if self.error_file_names: self.__log_overview_errors() return 1 else: return 0
python
def main(self, config_filename, file_names=None): """ Loads stored routines into the current schema. :param str config_filename: The name of the configuration file of the current project :param list[str] file_names: The sources that must be loaded. If empty all sources (if required) will loaded. :rtype: int The status of exit. """ self._io.title('Loader') if file_names: self.__load_list(config_filename, file_names) else: self.__load_all(config_filename) if self.error_file_names: self.__log_overview_errors() return 1 else: return 0
[ "def", "main", "(", "self", ",", "config_filename", ",", "file_names", "=", "None", ")", ":", "self", ".", "_io", ".", "title", "(", "'Loader'", ")", "if", "file_names", ":", "self", ".", "__load_list", "(", "config_filename", ",", "file_names", ")", "else", ":", "self", ".", "__load_all", "(", "config_filename", ")", "if", "self", ".", "error_file_names", ":", "self", ".", "__log_overview_errors", "(", ")", "return", "1", "else", ":", "return", "0" ]
Loads stored routines into the current schema. :param str config_filename: The name of the configuration file of the current project :param list[str] file_names: The sources that must be loaded. If empty all sources (if required) will loaded. :rtype: int The status of exit.
[ "Loads", "stored", "routines", "into", "the", "current", "schema", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L109-L129
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__log_overview_errors
def __log_overview_errors(self): """ Show info about sources files of stored routines that were not loaded successfully. """ if self.error_file_names: self._io.warning('Routines in the files below are not loaded:') self._io.listing(sorted(self.error_file_names))
python
def __log_overview_errors(self): """ Show info about sources files of stored routines that were not loaded successfully. """ if self.error_file_names: self._io.warning('Routines in the files below are not loaded:') self._io.listing(sorted(self.error_file_names))
[ "def", "__log_overview_errors", "(", "self", ")", ":", "if", "self", ".", "error_file_names", ":", "self", ".", "_io", ".", "warning", "(", "'Routines in the files below are not loaded:'", ")", "self", ".", "_io", ".", "listing", "(", "sorted", "(", "self", ".", "error_file_names", ")", ")" ]
Show info about sources files of stored routines that were not loaded successfully.
[ "Show", "info", "about", "sources", "files", "of", "stored", "routines", "that", "were", "not", "loaded", "successfully", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L132-L138
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader._add_replace_pair
def _add_replace_pair(self, name, value, quote): """ Adds a replace part to the map of replace pairs. :param name: The name of the replace pair. :param value: The value of value of the replace pair. """ key = '@' + name + '@' key = key.lower() class_name = value.__class__.__name__ if class_name in ['int', 'float']: value = str(value) elif class_name in ['bool']: value = '1' if value else '0' elif class_name in ['str']: if quote: value = "'" + value + "'" else: self._io.log_verbose("Ignoring constant {} which is an instance of {}".format(name, class_name)) self._replace_pairs[key] = value
python
def _add_replace_pair(self, name, value, quote): """ Adds a replace part to the map of replace pairs. :param name: The name of the replace pair. :param value: The value of value of the replace pair. """ key = '@' + name + '@' key = key.lower() class_name = value.__class__.__name__ if class_name in ['int', 'float']: value = str(value) elif class_name in ['bool']: value = '1' if value else '0' elif class_name in ['str']: if quote: value = "'" + value + "'" else: self._io.log_verbose("Ignoring constant {} which is an instance of {}".format(name, class_name)) self._replace_pairs[key] = value
[ "def", "_add_replace_pair", "(", "self", ",", "name", ",", "value", ",", "quote", ")", ":", "key", "=", "'@'", "+", "name", "+", "'@'", "key", "=", "key", ".", "lower", "(", ")", "class_name", "=", "value", ".", "__class__", ".", "__name__", "if", "class_name", "in", "[", "'int'", ",", "'float'", "]", ":", "value", "=", "str", "(", "value", ")", "elif", "class_name", "in", "[", "'bool'", "]", ":", "value", "=", "'1'", "if", "value", "else", "'0'", "elif", "class_name", "in", "[", "'str'", "]", ":", "if", "quote", ":", "value", "=", "\"'\"", "+", "value", "+", "\"'\"", "else", ":", "self", ".", "_io", ".", "log_verbose", "(", "\"Ignoring constant {} which is an instance of {}\"", ".", "format", "(", "name", ",", "class_name", ")", ")", "self", ".", "_replace_pairs", "[", "key", "]", "=", "value" ]
Adds a replace part to the map of replace pairs. :param name: The name of the replace pair. :param value: The value of value of the replace pair.
[ "Adds", "a", "replace", "part", "to", "the", "map", "of", "replace", "pairs", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L157-L179
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__load_list
def __load_list(self, config_filename, file_names): """ Loads all stored routines in a list into the RDBMS instance. :param str config_filename: The filename of the configuration file. :param list[str] file_names: The list of files to be loaded. """ self._read_configuration_file(config_filename) self.connect() self.find_source_files_from_list(file_names) self._get_column_type() self.__read_stored_routine_metadata() self.__get_constants() self._get_old_stored_routine_info() self._get_correct_sql_mode() self.__load_stored_routines() self.__write_stored_routine_metadata() self.disconnect()
python
def __load_list(self, config_filename, file_names): """ Loads all stored routines in a list into the RDBMS instance. :param str config_filename: The filename of the configuration file. :param list[str] file_names: The list of files to be loaded. """ self._read_configuration_file(config_filename) self.connect() self.find_source_files_from_list(file_names) self._get_column_type() self.__read_stored_routine_metadata() self.__get_constants() self._get_old_stored_routine_info() self._get_correct_sql_mode() self.__load_stored_routines() self.__write_stored_routine_metadata() self.disconnect()
[ "def", "__load_list", "(", "self", ",", "config_filename", ",", "file_names", ")", ":", "self", ".", "_read_configuration_file", "(", "config_filename", ")", "self", ".", "connect", "(", ")", "self", ".", "find_source_files_from_list", "(", "file_names", ")", "self", ".", "_get_column_type", "(", ")", "self", ".", "__read_stored_routine_metadata", "(", ")", "self", ".", "__get_constants", "(", ")", "self", ".", "_get_old_stored_routine_info", "(", ")", "self", ".", "_get_correct_sql_mode", "(", ")", "self", ".", "__load_stored_routines", "(", ")", "self", ".", "__write_stored_routine_metadata", "(", ")", "self", ".", "disconnect", "(", ")" ]
Loads all stored routines in a list into the RDBMS instance. :param str config_filename: The filename of the configuration file. :param list[str] file_names: The list of files to be loaded.
[ "Loads", "all", "stored", "routines", "in", "a", "list", "into", "the", "RDBMS", "instance", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L182-L199
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__load_all
def __load_all(self, config_filename): """ Loads all stored routines into the RDBMS instance. :param str config_filename: string The filename of the configuration file. """ self._read_configuration_file(config_filename) self.connect() self.__find_source_files() self._get_column_type() self.__read_stored_routine_metadata() self.__get_constants() self._get_old_stored_routine_info() self._get_correct_sql_mode() self.__load_stored_routines() self._drop_obsolete_routines() self.__remove_obsolete_metadata() self.__write_stored_routine_metadata() self.disconnect()
python
def __load_all(self, config_filename): """ Loads all stored routines into the RDBMS instance. :param str config_filename: string The filename of the configuration file. """ self._read_configuration_file(config_filename) self.connect() self.__find_source_files() self._get_column_type() self.__read_stored_routine_metadata() self.__get_constants() self._get_old_stored_routine_info() self._get_correct_sql_mode() self.__load_stored_routines() self._drop_obsolete_routines() self.__remove_obsolete_metadata() self.__write_stored_routine_metadata() self.disconnect()
[ "def", "__load_all", "(", "self", ",", "config_filename", ")", ":", "self", ".", "_read_configuration_file", "(", "config_filename", ")", "self", ".", "connect", "(", ")", "self", ".", "__find_source_files", "(", ")", "self", ".", "_get_column_type", "(", ")", "self", ".", "__read_stored_routine_metadata", "(", ")", "self", ".", "__get_constants", "(", ")", "self", ".", "_get_old_stored_routine_info", "(", ")", "self", ".", "_get_correct_sql_mode", "(", ")", "self", ".", "__load_stored_routines", "(", ")", "self", ".", "_drop_obsolete_routines", "(", ")", "self", ".", "__remove_obsolete_metadata", "(", ")", "self", ".", "__write_stored_routine_metadata", "(", ")", "self", ".", "disconnect", "(", ")" ]
Loads all stored routines into the RDBMS instance. :param str config_filename: string The filename of the configuration file.
[ "Loads", "all", "stored", "routines", "into", "the", "RDBMS", "instance", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L202-L220
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader._read_configuration_file
def _read_configuration_file(self, config_filename): """ Reads parameters from the configuration file. :param str config_filename: The name of the configuration file. """ config = configparser.ConfigParser() config.read(config_filename) self._source_directory = config.get('loader', 'source_directory') self._source_file_extension = config.get('loader', 'extension') self._source_file_encoding = config.get('loader', 'encoding') self.__shadow_directory = config.get('loader', 'shadow_directory', fallback=None) self._pystratum_metadata_filename = config.get('wrapper', 'metadata') self._constants_class_name = config.get('constants', 'class')
python
def _read_configuration_file(self, config_filename): """ Reads parameters from the configuration file. :param str config_filename: The name of the configuration file. """ config = configparser.ConfigParser() config.read(config_filename) self._source_directory = config.get('loader', 'source_directory') self._source_file_extension = config.get('loader', 'extension') self._source_file_encoding = config.get('loader', 'encoding') self.__shadow_directory = config.get('loader', 'shadow_directory', fallback=None) self._pystratum_metadata_filename = config.get('wrapper', 'metadata') self._constants_class_name = config.get('constants', 'class')
[ "def", "_read_configuration_file", "(", "self", ",", "config_filename", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "config_filename", ")", "self", ".", "_source_directory", "=", "config", ".", "get", "(", "'loader'", ",", "'source_directory'", ")", "self", ".", "_source_file_extension", "=", "config", ".", "get", "(", "'loader'", ",", "'extension'", ")", "self", ".", "_source_file_encoding", "=", "config", ".", "get", "(", "'loader'", ",", "'encoding'", ")", "self", ".", "__shadow_directory", "=", "config", ".", "get", "(", "'loader'", ",", "'shadow_directory'", ",", "fallback", "=", "None", ")", "self", ".", "_pystratum_metadata_filename", "=", "config", ".", "get", "(", "'wrapper'", ",", "'metadata'", ")", "self", ".", "_constants_class_name", "=", "config", ".", "get", "(", "'constants'", ",", "'class'", ")" ]
Reads parameters from the configuration file. :param str config_filename: The name of the configuration file.
[ "Reads", "parameters", "from", "the", "configuration", "file", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L223-L239
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__find_source_files
def __find_source_files(self): """ Searches recursively for all source files in a directory. """ for dir_path, _, files in os.walk(self._source_directory): for name in files: if name.lower().endswith(self._source_file_extension): basename = os.path.splitext(os.path.basename(name))[0] relative_path = os.path.relpath(os.path.join(dir_path, name)) if basename in self._source_file_names: self._io.error("Files '{0}' and '{1}' have the same basename.". format(self._source_file_names[basename], relative_path)) self.error_file_names.add(relative_path) else: self._source_file_names[basename] = relative_path
python
def __find_source_files(self): """ Searches recursively for all source files in a directory. """ for dir_path, _, files in os.walk(self._source_directory): for name in files: if name.lower().endswith(self._source_file_extension): basename = os.path.splitext(os.path.basename(name))[0] relative_path = os.path.relpath(os.path.join(dir_path, name)) if basename in self._source_file_names: self._io.error("Files '{0}' and '{1}' have the same basename.". format(self._source_file_names[basename], relative_path)) self.error_file_names.add(relative_path) else: self._source_file_names[basename] = relative_path
[ "def", "__find_source_files", "(", "self", ")", ":", "for", "dir_path", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "_source_directory", ")", ":", "for", "name", "in", "files", ":", "if", "name", ".", "lower", "(", ")", ".", "endswith", "(", "self", ".", "_source_file_extension", ")", ":", "basename", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "name", ")", ")", "[", "0", "]", "relative_path", "=", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "dir_path", ",", "name", ")", ")", "if", "basename", "in", "self", ".", "_source_file_names", ":", "self", ".", "_io", ".", "error", "(", "\"Files '{0}' and '{1}' have the same basename.\"", ".", "format", "(", "self", ".", "_source_file_names", "[", "basename", "]", ",", "relative_path", ")", ")", "self", ".", "error_file_names", ".", "add", "(", "relative_path", ")", "else", ":", "self", ".", "_source_file_names", "[", "basename", "]", "=", "relative_path" ]
Searches recursively for all source files in a directory.
[ "Searches", "recursively", "for", "all", "source", "files", "in", "a", "directory", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L242-L257
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__read_stored_routine_metadata
def __read_stored_routine_metadata(self): """ Reads the metadata of stored routines from the metadata file. """ if os.path.isfile(self._pystratum_metadata_filename): with open(self._pystratum_metadata_filename, 'r') as file: self._pystratum_metadata = json.load(file)
python
def __read_stored_routine_metadata(self): """ Reads the metadata of stored routines from the metadata file. """ if os.path.isfile(self._pystratum_metadata_filename): with open(self._pystratum_metadata_filename, 'r') as file: self._pystratum_metadata = json.load(file)
[ "def", "__read_stored_routine_metadata", "(", "self", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "_pystratum_metadata_filename", ")", ":", "with", "open", "(", "self", ".", "_pystratum_metadata_filename", ",", "'r'", ")", "as", "file", ":", "self", ".", "_pystratum_metadata", "=", "json", ".", "load", "(", "file", ")" ]
Reads the metadata of stored routines from the metadata file.
[ "Reads", "the", "metadata", "of", "stored", "routines", "from", "the", "metadata", "file", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L260-L266
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__load_stored_routines
def __load_stored_routines(self): """ Loads all stored routines into the RDBMS instance. """ self._io.writeln('') for routine_name in sorted(self._source_file_names): if routine_name in self._pystratum_metadata: old_metadata = self._pystratum_metadata[routine_name] else: old_metadata = None if routine_name in self._rdbms_old_metadata: old_routine_info = self._rdbms_old_metadata[routine_name] else: old_routine_info = None routine_loader_helper = self.create_routine_loader_helper(routine_name, old_metadata, old_routine_info) routine_loader_helper.shadow_directory = self.__shadow_directory metadata = routine_loader_helper.load_stored_routine() if not metadata: self.error_file_names.add(self._source_file_names[routine_name]) if routine_name in self._pystratum_metadata: del self._pystratum_metadata[routine_name] else: self._pystratum_metadata[routine_name] = metadata
python
def __load_stored_routines(self): """ Loads all stored routines into the RDBMS instance. """ self._io.writeln('') for routine_name in sorted(self._source_file_names): if routine_name in self._pystratum_metadata: old_metadata = self._pystratum_metadata[routine_name] else: old_metadata = None if routine_name in self._rdbms_old_metadata: old_routine_info = self._rdbms_old_metadata[routine_name] else: old_routine_info = None routine_loader_helper = self.create_routine_loader_helper(routine_name, old_metadata, old_routine_info) routine_loader_helper.shadow_directory = self.__shadow_directory metadata = routine_loader_helper.load_stored_routine() if not metadata: self.error_file_names.add(self._source_file_names[routine_name]) if routine_name in self._pystratum_metadata: del self._pystratum_metadata[routine_name] else: self._pystratum_metadata[routine_name] = metadata
[ "def", "__load_stored_routines", "(", "self", ")", ":", "self", ".", "_io", ".", "writeln", "(", "''", ")", "for", "routine_name", "in", "sorted", "(", "self", ".", "_source_file_names", ")", ":", "if", "routine_name", "in", "self", ".", "_pystratum_metadata", ":", "old_metadata", "=", "self", ".", "_pystratum_metadata", "[", "routine_name", "]", "else", ":", "old_metadata", "=", "None", "if", "routine_name", "in", "self", ".", "_rdbms_old_metadata", ":", "old_routine_info", "=", "self", ".", "_rdbms_old_metadata", "[", "routine_name", "]", "else", ":", "old_routine_info", "=", "None", "routine_loader_helper", "=", "self", ".", "create_routine_loader_helper", "(", "routine_name", ",", "old_metadata", ",", "old_routine_info", ")", "routine_loader_helper", ".", "shadow_directory", "=", "self", ".", "__shadow_directory", "metadata", "=", "routine_loader_helper", ".", "load_stored_routine", "(", ")", "if", "not", "metadata", ":", "self", ".", "error_file_names", ".", "add", "(", "self", ".", "_source_file_names", "[", "routine_name", "]", ")", "if", "routine_name", "in", "self", ".", "_pystratum_metadata", ":", "del", "self", ".", "_pystratum_metadata", "[", "routine_name", "]", "else", ":", "self", ".", "_pystratum_metadata", "[", "routine_name", "]", "=", "metadata" ]
Loads all stored routines into the RDBMS instance.
[ "Loads", "all", "stored", "routines", "into", "the", "RDBMS", "instance", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L291-L318
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__remove_obsolete_metadata
def __remove_obsolete_metadata(self): """ Removes obsolete entries from the metadata of all stored routines. """ clean = {} for key, _ in self._source_file_names.items(): if key in self._pystratum_metadata: clean[key] = self._pystratum_metadata[key] self._pystratum_metadata = clean
python
def __remove_obsolete_metadata(self): """ Removes obsolete entries from the metadata of all stored routines. """ clean = {} for key, _ in self._source_file_names.items(): if key in self._pystratum_metadata: clean[key] = self._pystratum_metadata[key] self._pystratum_metadata = clean
[ "def", "__remove_obsolete_metadata", "(", "self", ")", ":", "clean", "=", "{", "}", "for", "key", ",", "_", "in", "self", ".", "_source_file_names", ".", "items", "(", ")", ":", "if", "key", "in", "self", ".", "_pystratum_metadata", ":", "clean", "[", "key", "]", "=", "self", ".", "_pystratum_metadata", "[", "key", "]", "self", ".", "_pystratum_metadata", "=", "clean" ]
Removes obsolete entries from the metadata of all stored routines.
[ "Removes", "obsolete", "entries", "from", "the", "metadata", "of", "all", "stored", "routines", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L345-L354
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__write_stored_routine_metadata
def __write_stored_routine_metadata(self): """ Writes the metadata of all stored routines to the metadata file. """ with open(self._pystratum_metadata_filename, 'w') as stream: json.dump(self._pystratum_metadata, stream, indent=4, sort_keys=True)
python
def __write_stored_routine_metadata(self): """ Writes the metadata of all stored routines to the metadata file. """ with open(self._pystratum_metadata_filename, 'w') as stream: json.dump(self._pystratum_metadata, stream, indent=4, sort_keys=True)
[ "def", "__write_stored_routine_metadata", "(", "self", ")", ":", "with", "open", "(", "self", ".", "_pystratum_metadata_filename", ",", "'w'", ")", "as", "stream", ":", "json", ".", "dump", "(", "self", ".", "_pystratum_metadata", ",", "stream", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")" ]
Writes the metadata of all stored routines to the metadata file.
[ "Writes", "the", "metadata", "of", "all", "stored", "routines", "to", "the", "metadata", "file", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L357-L362
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.find_source_files_from_list
def find_source_files_from_list(self, file_names): """ Finds all source files that actually exists from a list of file names. :param list[str] file_names: The list of file names. """ for file_name in file_names: if os.path.exists(file_name): routine_name = os.path.splitext(os.path.basename(file_name))[0] if routine_name not in self._source_file_names: self._source_file_names[routine_name] = file_name else: self._io.error("Files '{0}' and '{1}' have the same basename.". format(self._source_file_names[routine_name], file_name)) self.error_file_names.add(file_name) else: self._io.error("File not exists: '{0}'".format(file_name)) self.error_file_names.add(file_name)
python
def find_source_files_from_list(self, file_names): """ Finds all source files that actually exists from a list of file names. :param list[str] file_names: The list of file names. """ for file_name in file_names: if os.path.exists(file_name): routine_name = os.path.splitext(os.path.basename(file_name))[0] if routine_name not in self._source_file_names: self._source_file_names[routine_name] = file_name else: self._io.error("Files '{0}' and '{1}' have the same basename.". format(self._source_file_names[routine_name], file_name)) self.error_file_names.add(file_name) else: self._io.error("File not exists: '{0}'".format(file_name)) self.error_file_names.add(file_name)
[ "def", "find_source_files_from_list", "(", "self", ",", "file_names", ")", ":", "for", "file_name", "in", "file_names", ":", "if", "os", ".", "path", ".", "exists", "(", "file_name", ")", ":", "routine_name", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "file_name", ")", ")", "[", "0", "]", "if", "routine_name", "not", "in", "self", ".", "_source_file_names", ":", "self", ".", "_source_file_names", "[", "routine_name", "]", "=", "file_name", "else", ":", "self", ".", "_io", ".", "error", "(", "\"Files '{0}' and '{1}' have the same basename.\"", ".", "format", "(", "self", ".", "_source_file_names", "[", "routine_name", "]", ",", "file_name", ")", ")", "self", ".", "error_file_names", ".", "add", "(", "file_name", ")", "else", ":", "self", ".", "_io", ".", "error", "(", "\"File not exists: '{0}'\"", ".", "format", "(", "file_name", ")", ")", "self", ".", "error_file_names", ".", "add", "(", "file_name", ")" ]
Finds all source files that actually exists from a list of file names. :param list[str] file_names: The list of file names.
[ "Finds", "all", "source", "files", "that", "actually", "exists", "from", "a", "list", "of", "file", "names", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L365-L382
SetBased/py-stratum
pystratum/RoutineLoader.py
RoutineLoader.__get_constants
def __get_constants(self): """ Gets the constants from the class that acts like a namespace for constants and adds them to the replace pairs. """ helper = ConstantClass(self._constants_class_name, self._io) helper.reload() constants = helper.constants() for name, value in constants.items(): self._add_replace_pair(name, value, True) self._io.text('Read {0} constants for substitution from <fso>{1}</fso>'. format(len(constants), helper.file_name()))
python
def __get_constants(self): """ Gets the constants from the class that acts like a namespace for constants and adds them to the replace pairs. """ helper = ConstantClass(self._constants_class_name, self._io) helper.reload() constants = helper.constants() for name, value in constants.items(): self._add_replace_pair(name, value, True) self._io.text('Read {0} constants for substitution from <fso>{1}</fso>'. format(len(constants), helper.file_name()))
[ "def", "__get_constants", "(", "self", ")", ":", "helper", "=", "ConstantClass", "(", "self", ".", "_constants_class_name", ",", "self", ".", "_io", ")", "helper", ".", "reload", "(", ")", "constants", "=", "helper", ".", "constants", "(", ")", "for", "name", ",", "value", "in", "constants", ".", "items", "(", ")", ":", "self", ".", "_add_replace_pair", "(", "name", ",", "value", ",", "True", ")", "self", ".", "_io", ".", "text", "(", "'Read {0} constants for substitution from <fso>{1}</fso>'", ".", "format", "(", "len", "(", "constants", ")", ",", "helper", ".", "file_name", "(", ")", ")", ")" ]
Gets the constants from the class that acts like a namespace for constants and adds them to the replace pairs.
[ "Gets", "the", "constants", "from", "the", "class", "that", "acts", "like", "a", "namespace", "for", "constants", "and", "adds", "them", "to", "the", "replace", "pairs", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineLoader.py#L385-L397
DreamLab/VmShepherd
src/vmshepherd/iaas/vm.py
Vm.terminate
async def terminate(self): """ Terminate vm. """ logging.debug('Terminate: %s', self) self.state = VmState.TERMINATED return await self.manager.terminate_vm(self.id)
python
async def terminate(self): """ Terminate vm. """ logging.debug('Terminate: %s', self) self.state = VmState.TERMINATED return await self.manager.terminate_vm(self.id)
[ "async", "def", "terminate", "(", "self", ")", ":", "logging", ".", "debug", "(", "'Terminate: %s'", ",", "self", ")", "self", ".", "state", "=", "VmState", ".", "TERMINATED", "return", "await", "self", ".", "manager", ".", "terminate_vm", "(", "self", ".", "id", ")" ]
Terminate vm.
[ "Terminate", "vm", "." ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/vm.py#L86-L91
ClericPy/torequests
torequests/logs.py
init_logger
def init_logger( name="", handler_path_levels=None, level=logging.INFO, formatter=None, formatter_str=None, datefmt="%Y-%m-%d %H:%M:%S", ): """Add a default handler for logger. Args: name = '' or logger obj. handler_path_levels = [['loggerfile.log',13],['','DEBUG'],['','info'],['','notSet']] # [[path,level]] level = the least level for the logger. formatter = logging.Formatter( '%(levelname)-7s %(asctime)s %(name)s (%(filename)s: %(lineno)s): %(message)s', "%Y-%m-%d %H:%M:%S") formatter_str = '%(levelname)-7s %(asctime)s %(name)s (%(funcName)s: %(lineno)s): %(message)s' custom formatter: %(asctime)s %(created)f %(filename)s %(funcName)s %(levelname)s %(levelno)s %(lineno)s %(message)s %(module)s %(name)s %(pathname)s %(process)s %(relativeCreated)s %(thread)s %(threadName)s """ levels = { "NOTSET": logging.NOTSET, "DEBUG": logging.DEBUG, "INFO": logging.INFO, "WARNING": logging.WARNING, "ERROR": logging.ERROR, "CRITICAL": logging.CRITICAL, } if not formatter: if formatter_str: formatter_str = formatter_str else: formatter_str = "%(asctime)s %(levelname)-5s [%(name)s] %(filename)s(%(lineno)s): %(message)s" formatter = logging.Formatter(formatter_str, datefmt=datefmt) logger = name if isinstance(name, logging.Logger) else logging.getLogger(str(name)) logger.setLevel(level) handler_path_levels = handler_path_levels or [["", "INFO"]] # --------------------------------------- for each_handler in handler_path_levels: path, handler_level = each_handler handler = logging.FileHandler(path) if path else logging.StreamHandler() handler.setLevel( levels.get(handler_level.upper(), 1) if isinstance(handler_level, str) else handler_level ) handler.setFormatter(formatter) logger.addHandler(handler) return logger
python
def init_logger( name="", handler_path_levels=None, level=logging.INFO, formatter=None, formatter_str=None, datefmt="%Y-%m-%d %H:%M:%S", ): """Add a default handler for logger. Args: name = '' or logger obj. handler_path_levels = [['loggerfile.log',13],['','DEBUG'],['','info'],['','notSet']] # [[path,level]] level = the least level for the logger. formatter = logging.Formatter( '%(levelname)-7s %(asctime)s %(name)s (%(filename)s: %(lineno)s): %(message)s', "%Y-%m-%d %H:%M:%S") formatter_str = '%(levelname)-7s %(asctime)s %(name)s (%(funcName)s: %(lineno)s): %(message)s' custom formatter: %(asctime)s %(created)f %(filename)s %(funcName)s %(levelname)s %(levelno)s %(lineno)s %(message)s %(module)s %(name)s %(pathname)s %(process)s %(relativeCreated)s %(thread)s %(threadName)s """ levels = { "NOTSET": logging.NOTSET, "DEBUG": logging.DEBUG, "INFO": logging.INFO, "WARNING": logging.WARNING, "ERROR": logging.ERROR, "CRITICAL": logging.CRITICAL, } if not formatter: if formatter_str: formatter_str = formatter_str else: formatter_str = "%(asctime)s %(levelname)-5s [%(name)s] %(filename)s(%(lineno)s): %(message)s" formatter = logging.Formatter(formatter_str, datefmt=datefmt) logger = name if isinstance(name, logging.Logger) else logging.getLogger(str(name)) logger.setLevel(level) handler_path_levels = handler_path_levels or [["", "INFO"]] # --------------------------------------- for each_handler in handler_path_levels: path, handler_level = each_handler handler = logging.FileHandler(path) if path else logging.StreamHandler() handler.setLevel( levels.get(handler_level.upper(), 1) if isinstance(handler_level, str) else handler_level ) handler.setFormatter(formatter) logger.addHandler(handler) return logger
[ "def", "init_logger", "(", "name", "=", "\"\"", ",", "handler_path_levels", "=", "None", ",", "level", "=", "logging", ".", "INFO", ",", "formatter", "=", "None", ",", "formatter_str", "=", "None", ",", "datefmt", "=", "\"%Y-%m-%d %H:%M:%S\"", ",", ")", ":", "levels", "=", "{", "\"NOTSET\"", ":", "logging", ".", "NOTSET", ",", "\"DEBUG\"", ":", "logging", ".", "DEBUG", ",", "\"INFO\"", ":", "logging", ".", "INFO", ",", "\"WARNING\"", ":", "logging", ".", "WARNING", ",", "\"ERROR\"", ":", "logging", ".", "ERROR", ",", "\"CRITICAL\"", ":", "logging", ".", "CRITICAL", ",", "}", "if", "not", "formatter", ":", "if", "formatter_str", ":", "formatter_str", "=", "formatter_str", "else", ":", "formatter_str", "=", "\"%(asctime)s %(levelname)-5s [%(name)s] %(filename)s(%(lineno)s): %(message)s\"", "formatter", "=", "logging", ".", "Formatter", "(", "formatter_str", ",", "datefmt", "=", "datefmt", ")", "logger", "=", "name", "if", "isinstance", "(", "name", ",", "logging", ".", "Logger", ")", "else", "logging", ".", "getLogger", "(", "str", "(", "name", ")", ")", "logger", ".", "setLevel", "(", "level", ")", "handler_path_levels", "=", "handler_path_levels", "or", "[", "[", "\"\"", ",", "\"INFO\"", "]", "]", "# ---------------------------------------", "for", "each_handler", "in", "handler_path_levels", ":", "path", ",", "handler_level", "=", "each_handler", "handler", "=", "logging", ".", "FileHandler", "(", "path", ")", "if", "path", "else", "logging", ".", "StreamHandler", "(", ")", "handler", ".", "setLevel", "(", "levels", ".", "get", "(", "handler_level", ".", "upper", "(", ")", ",", "1", ")", "if", "isinstance", "(", "handler_level", ",", "str", ")", "else", "handler_level", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "handler", ")", "return", "logger" ]
Add a default handler for logger. Args: name = '' or logger obj. handler_path_levels = [['loggerfile.log',13],['','DEBUG'],['','info'],['','notSet']] # [[path,level]] level = the least level for the logger. formatter = logging.Formatter( '%(levelname)-7s %(asctime)s %(name)s (%(filename)s: %(lineno)s): %(message)s', "%Y-%m-%d %H:%M:%S") formatter_str = '%(levelname)-7s %(asctime)s %(name)s (%(funcName)s: %(lineno)s): %(message)s' custom formatter: %(asctime)s %(created)f %(filename)s %(funcName)s %(levelname)s %(levelno)s %(lineno)s %(message)s %(module)s %(name)s %(pathname)s %(process)s %(relativeCreated)s %(thread)s %(threadName)s
[ "Add", "a", "default", "handler", "for", "logger", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/logs.py#L12-L67
ClericPy/torequests
torequests/logs.py
print_info
def print_info(*messages, **kwargs): """Simple print use logger, print with time / file / line_no. :param sep: sep of messages, " " by default. Basic Usage:: print_info(1, 2, 3) print_info(1, 2, 3) print_info(1, 2, 3) # [2018-10-24 19:12:16] temp_code.py(7): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(8): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(9): 1 2 3 """ sep = kwargs.pop("sep", " ") frame = sys._getframe(1) ln = frame.f_lineno _file = frame.f_globals.get("__file__", "") fn = os.path.split(_file)[-1] return print_logger.info( sep.join(map(unicode, messages)), extra={"ln": ln, "fn": fn} )
python
def print_info(*messages, **kwargs): """Simple print use logger, print with time / file / line_no. :param sep: sep of messages, " " by default. Basic Usage:: print_info(1, 2, 3) print_info(1, 2, 3) print_info(1, 2, 3) # [2018-10-24 19:12:16] temp_code.py(7): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(8): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(9): 1 2 3 """ sep = kwargs.pop("sep", " ") frame = sys._getframe(1) ln = frame.f_lineno _file = frame.f_globals.get("__file__", "") fn = os.path.split(_file)[-1] return print_logger.info( sep.join(map(unicode, messages)), extra={"ln": ln, "fn": fn} )
[ "def", "print_info", "(", "*", "messages", ",", "*", "*", "kwargs", ")", ":", "sep", "=", "kwargs", ".", "pop", "(", "\"sep\"", ",", "\" \"", ")", "frame", "=", "sys", ".", "_getframe", "(", "1", ")", "ln", "=", "frame", ".", "f_lineno", "_file", "=", "frame", ".", "f_globals", ".", "get", "(", "\"__file__\"", ",", "\"\"", ")", "fn", "=", "os", ".", "path", ".", "split", "(", "_file", ")", "[", "-", "1", "]", "return", "print_logger", ".", "info", "(", "sep", ".", "join", "(", "map", "(", "unicode", ",", "messages", ")", ")", ",", "extra", "=", "{", "\"ln\"", ":", "ln", ",", "\"fn\"", ":", "fn", "}", ")" ]
Simple print use logger, print with time / file / line_no. :param sep: sep of messages, " " by default. Basic Usage:: print_info(1, 2, 3) print_info(1, 2, 3) print_info(1, 2, 3) # [2018-10-24 19:12:16] temp_code.py(7): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(8): 1 2 3 # [2018-10-24 19:12:16] temp_code.py(9): 1 2 3
[ "Simple", "print", "use", "logger", "print", "with", "time", "/", "file", "/", "line_no", ".", ":", "param", "sep", ":", "sep", "of", "messages", "by", "default", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/logs.py#L86-L107
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
get_fields
def get_fields(model): """ Returns a Model's knockout_fields, or the default set of field names. """ try: if hasattr(model, "knockout_fields"): fields = model.knockout_fields() else: try: fields = model_to_dict(model).keys() except Exception as e: fields = model._meta.get_fields() return fields # Crash proofing except Exception as e: logger.exception(e) return []
python
def get_fields(model): """ Returns a Model's knockout_fields, or the default set of field names. """ try: if hasattr(model, "knockout_fields"): fields = model.knockout_fields() else: try: fields = model_to_dict(model).keys() except Exception as e: fields = model._meta.get_fields() return fields # Crash proofing except Exception as e: logger.exception(e) return []
[ "def", "get_fields", "(", "model", ")", ":", "try", ":", "if", "hasattr", "(", "model", ",", "\"knockout_fields\"", ")", ":", "fields", "=", "model", ".", "knockout_fields", "(", ")", "else", ":", "try", ":", "fields", "=", "model_to_dict", "(", "model", ")", ".", "keys", "(", ")", "except", "Exception", "as", "e", ":", "fields", "=", "model", ".", "_meta", ".", "get_fields", "(", ")", "return", "fields", "# Crash proofing", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "e", ")", "return", "[", "]" ]
Returns a Model's knockout_fields, or the default set of field names.
[ "Returns", "a", "Model", "s", "knockout_fields", "or", "the", "default", "set", "of", "field", "names", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L18-L37
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
get_object_data
def get_object_data(obj, fields, safe): """ Given an object and a list of fields, recursively build an object for serialization. Returns a dictionary. """ temp_dict = dict() for field in fields: try: attribute = getattr(obj, str(field)) if isinstance(attribute, list) and all([isinstance(item, models.Model) for item in attribute]): temp_dict[field] = [] for item in attribute: temp_dict[field].append(get_object_data(item, get_fields(item), safe)) # Recur elif isinstance(attribute, models.Model): attribute_fields = get_fields(attribute) object_data = get_object_data(attribute, attribute_fields, safe) # Recur temp_dict[field] = object_data else: if not safe: if isinstance(attribute, basestring): attribute = cgi.escape(attribute) temp_dict[field] = attribute except Exception as e: logger.info("Unable to get attribute.") logger.error(e) continue return temp_dict
python
def get_object_data(obj, fields, safe): """ Given an object and a list of fields, recursively build an object for serialization. Returns a dictionary. """ temp_dict = dict() for field in fields: try: attribute = getattr(obj, str(field)) if isinstance(attribute, list) and all([isinstance(item, models.Model) for item in attribute]): temp_dict[field] = [] for item in attribute: temp_dict[field].append(get_object_data(item, get_fields(item), safe)) # Recur elif isinstance(attribute, models.Model): attribute_fields = get_fields(attribute) object_data = get_object_data(attribute, attribute_fields, safe) # Recur temp_dict[field] = object_data else: if not safe: if isinstance(attribute, basestring): attribute = cgi.escape(attribute) temp_dict[field] = attribute except Exception as e: logger.info("Unable to get attribute.") logger.error(e) continue return temp_dict
[ "def", "get_object_data", "(", "obj", ",", "fields", ",", "safe", ")", ":", "temp_dict", "=", "dict", "(", ")", "for", "field", "in", "fields", ":", "try", ":", "attribute", "=", "getattr", "(", "obj", ",", "str", "(", "field", ")", ")", "if", "isinstance", "(", "attribute", ",", "list", ")", "and", "all", "(", "[", "isinstance", "(", "item", ",", "models", ".", "Model", ")", "for", "item", "in", "attribute", "]", ")", ":", "temp_dict", "[", "field", "]", "=", "[", "]", "for", "item", "in", "attribute", ":", "temp_dict", "[", "field", "]", ".", "append", "(", "get_object_data", "(", "item", ",", "get_fields", "(", "item", ")", ",", "safe", ")", ")", "# Recur", "elif", "isinstance", "(", "attribute", ",", "models", ".", "Model", ")", ":", "attribute_fields", "=", "get_fields", "(", "attribute", ")", "object_data", "=", "get_object_data", "(", "attribute", ",", "attribute_fields", ",", "safe", ")", "# Recur", "temp_dict", "[", "field", "]", "=", "object_data", "else", ":", "if", "not", "safe", ":", "if", "isinstance", "(", "attribute", ",", "basestring", ")", ":", "attribute", "=", "cgi", ".", "escape", "(", "attribute", ")", "temp_dict", "[", "field", "]", "=", "attribute", "except", "Exception", "as", "e", ":", "logger", ".", "info", "(", "\"Unable to get attribute.\"", ")", "logger", ".", "error", "(", "e", ")", "continue", "return", "temp_dict" ]
Given an object and a list of fields, recursively build an object for serialization. Returns a dictionary.
[ "Given", "an", "object", "and", "a", "list", "of", "fields", "recursively", "build", "an", "object", "for", "serialization", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L40-L71
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
ko_model
def ko_model(model, field_names=None, data=None): """ Given a model, returns the Knockout Model and the Knockout ViewModel. Takes optional field names and data. """ try: if isinstance(model, str): modelName = model else: modelName = model.__class__.__name__ if field_names: fields = field_names else: fields = get_fields(model) if hasattr(model, "comparator"): comparator = str(model.comparator()) else: comparator = 'id' modelViewString = render_to_string( "knockout_modeler/model.js", {'modelName': modelName, 'fields': fields, 'data': data, 'comparator': comparator} ) return modelViewString except Exception as e: logger.exception(e) return ''
python
def ko_model(model, field_names=None, data=None): """ Given a model, returns the Knockout Model and the Knockout ViewModel. Takes optional field names and data. """ try: if isinstance(model, str): modelName = model else: modelName = model.__class__.__name__ if field_names: fields = field_names else: fields = get_fields(model) if hasattr(model, "comparator"): comparator = str(model.comparator()) else: comparator = 'id' modelViewString = render_to_string( "knockout_modeler/model.js", {'modelName': modelName, 'fields': fields, 'data': data, 'comparator': comparator} ) return modelViewString except Exception as e: logger.exception(e) return ''
[ "def", "ko_model", "(", "model", ",", "field_names", "=", "None", ",", "data", "=", "None", ")", ":", "try", ":", "if", "isinstance", "(", "model", ",", "str", ")", ":", "modelName", "=", "model", "else", ":", "modelName", "=", "model", ".", "__class__", ".", "__name__", "if", "field_names", ":", "fields", "=", "field_names", "else", ":", "fields", "=", "get_fields", "(", "model", ")", "if", "hasattr", "(", "model", ",", "\"comparator\"", ")", ":", "comparator", "=", "str", "(", "model", ".", "comparator", "(", ")", ")", "else", ":", "comparator", "=", "'id'", "modelViewString", "=", "render_to_string", "(", "\"knockout_modeler/model.js\"", ",", "{", "'modelName'", ":", "modelName", ",", "'fields'", ":", "fields", ",", "'data'", ":", "data", ",", "'comparator'", ":", "comparator", "}", ")", "return", "modelViewString", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "e", ")", "return", "''" ]
Given a model, returns the Knockout Model and the Knockout ViewModel. Takes optional field names and data.
[ "Given", "a", "model", "returns", "the", "Knockout", "Model", "and", "the", "Knockout", "ViewModel", ".", "Takes", "optional", "field", "names", "and", "data", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L74-L104
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
ko_bindings
def ko_bindings(model): """ Given a model, returns the Knockout data bindings. """ try: if isinstance(model, str): modelName = model else: modelName = model.__class__.__name__ modelBindingsString = "ko.applyBindings(new " + modelName + "ViewModel(), $('#" + modelName.lower() + "s')[0]);" return modelBindingsString except Exception as e: logger.error(e) return ''
python
def ko_bindings(model): """ Given a model, returns the Knockout data bindings. """ try: if isinstance(model, str): modelName = model else: modelName = model.__class__.__name__ modelBindingsString = "ko.applyBindings(new " + modelName + "ViewModel(), $('#" + modelName.lower() + "s')[0]);" return modelBindingsString except Exception as e: logger.error(e) return ''
[ "def", "ko_bindings", "(", "model", ")", ":", "try", ":", "if", "isinstance", "(", "model", ",", "str", ")", ":", "modelName", "=", "model", "else", ":", "modelName", "=", "model", ".", "__class__", ".", "__name__", "modelBindingsString", "=", "\"ko.applyBindings(new \"", "+", "modelName", "+", "\"ViewModel(), $('#\"", "+", "modelName", ".", "lower", "(", ")", "+", "\"s')[0]);\"", "return", "modelBindingsString", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "e", ")", "return", "''" ]
Given a model, returns the Knockout data bindings.
[ "Given", "a", "model", "returns", "the", "Knockout", "data", "bindings", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L107-L123
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
ko_json
def ko_json(queryset, field_names=None, name=None, safe=False): """ Given a QuerySet, return just the serialized representation based on the knockout_fields. Useful for middleware/APIs. Convenience method around ko_data. """ return ko_data(queryset, field_names, name, safe, return_json=True)
python
def ko_json(queryset, field_names=None, name=None, safe=False): """ Given a QuerySet, return just the serialized representation based on the knockout_fields. Useful for middleware/APIs. Convenience method around ko_data. """ return ko_data(queryset, field_names, name, safe, return_json=True)
[ "def", "ko_json", "(", "queryset", ",", "field_names", "=", "None", ",", "name", "=", "None", ",", "safe", "=", "False", ")", ":", "return", "ko_data", "(", "queryset", ",", "field_names", ",", "name", ",", "safe", ",", "return_json", "=", "True", ")" ]
Given a QuerySet, return just the serialized representation based on the knockout_fields. Useful for middleware/APIs. Convenience method around ko_data.
[ "Given", "a", "QuerySet", "return", "just", "the", "serialized", "representation", "based", "on", "the", "knockout_fields", ".", "Useful", "for", "middleware", "/", "APIs", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L126-L134
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
ko_data
def ko_data(queryset, field_names=None, name=None, safe=False, return_json=False): """ Given a QuerySet, return just the serialized representation based on the knockout_fields as JavaScript. """ try: try: # Get an inital instance of the QS. queryset_instance = queryset[0] except TypeError as e: # We are being passed an object rather than a QuerySet. # That's naughty, but we'll survive. queryset_instance = queryset queryset = [queryset] except IndexError as e: if not isinstance(queryset, list): # This is an empty QS - get the model directly. queryset_instance = queryset.model else: # We have been given an empty list. # Return nothing. return '[]' modelName = queryset_instance.__class__.__name__ modelNameData = [] if field_names is not None: fields = field_names else: fields = get_fields(queryset_instance) for obj in queryset: object_data = get_object_data(obj, fields, safe) modelNameData.append(object_data) if name: modelNameString = name else: modelNameString = modelName + "Data" dthandler = lambda obj: obj.isoformat() if isinstance(obj, (datetime.date, datetime.datetime)) else None dumped_json = json.dumps(modelNameData, default=dthandler) if return_json: return dumped_json return "var " + modelNameString + " = " + dumped_json + ';' except Exception as e: logger.exception(e) return '[]'
python
def ko_data(queryset, field_names=None, name=None, safe=False, return_json=False): """ Given a QuerySet, return just the serialized representation based on the knockout_fields as JavaScript. """ try: try: # Get an inital instance of the QS. queryset_instance = queryset[0] except TypeError as e: # We are being passed an object rather than a QuerySet. # That's naughty, but we'll survive. queryset_instance = queryset queryset = [queryset] except IndexError as e: if not isinstance(queryset, list): # This is an empty QS - get the model directly. queryset_instance = queryset.model else: # We have been given an empty list. # Return nothing. return '[]' modelName = queryset_instance.__class__.__name__ modelNameData = [] if field_names is not None: fields = field_names else: fields = get_fields(queryset_instance) for obj in queryset: object_data = get_object_data(obj, fields, safe) modelNameData.append(object_data) if name: modelNameString = name else: modelNameString = modelName + "Data" dthandler = lambda obj: obj.isoformat() if isinstance(obj, (datetime.date, datetime.datetime)) else None dumped_json = json.dumps(modelNameData, default=dthandler) if return_json: return dumped_json return "var " + modelNameString + " = " + dumped_json + ';' except Exception as e: logger.exception(e) return '[]'
[ "def", "ko_data", "(", "queryset", ",", "field_names", "=", "None", ",", "name", "=", "None", ",", "safe", "=", "False", ",", "return_json", "=", "False", ")", ":", "try", ":", "try", ":", "# Get an inital instance of the QS.", "queryset_instance", "=", "queryset", "[", "0", "]", "except", "TypeError", "as", "e", ":", "# We are being passed an object rather than a QuerySet.", "# That's naughty, but we'll survive.", "queryset_instance", "=", "queryset", "queryset", "=", "[", "queryset", "]", "except", "IndexError", "as", "e", ":", "if", "not", "isinstance", "(", "queryset", ",", "list", ")", ":", "# This is an empty QS - get the model directly.", "queryset_instance", "=", "queryset", ".", "model", "else", ":", "# We have been given an empty list.", "# Return nothing.", "return", "'[]'", "modelName", "=", "queryset_instance", ".", "__class__", ".", "__name__", "modelNameData", "=", "[", "]", "if", "field_names", "is", "not", "None", ":", "fields", "=", "field_names", "else", ":", "fields", "=", "get_fields", "(", "queryset_instance", ")", "for", "obj", "in", "queryset", ":", "object_data", "=", "get_object_data", "(", "obj", ",", "fields", ",", "safe", ")", "modelNameData", ".", "append", "(", "object_data", ")", "if", "name", ":", "modelNameString", "=", "name", "else", ":", "modelNameString", "=", "modelName", "+", "\"Data\"", "dthandler", "=", "lambda", "obj", ":", "obj", ".", "isoformat", "(", ")", "if", "isinstance", "(", "obj", ",", "(", "datetime", ".", "date", ",", "datetime", ".", "datetime", ")", ")", "else", "None", "dumped_json", "=", "json", ".", "dumps", "(", "modelNameData", ",", "default", "=", "dthandler", ")", "if", "return_json", ":", "return", "dumped_json", "return", "\"var \"", "+", "modelNameString", "+", "\" = \"", "+", "dumped_json", "+", "';'", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "e", ")", "return", "'[]'" ]
Given a QuerySet, return just the serialized representation based on the knockout_fields as JavaScript.
[ "Given", "a", "QuerySet", "return", "just", "the", "serialized", "representation", "based", "on", "the", "knockout_fields", "as", "JavaScript", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L137-L187
Miserlou/django-knockout-modeler
knockout_modeler/ko.py
ko
def ko(queryset, field_names=None): """ Converts a Django QuerySet into a complete Knockout implementation. """ try: koDataString = ko_data(queryset, field_names) koModelString = ko_model(queryset[0].__class__.__name__, field_names, data=True) koBindingsString = ko_bindings(queryset[0]) koString = koDataString + '\n' + koModelString + '\n' + koBindingsString return koString except Exception as e: logger.error(e) return ''
python
def ko(queryset, field_names=None): """ Converts a Django QuerySet into a complete Knockout implementation. """ try: koDataString = ko_data(queryset, field_names) koModelString = ko_model(queryset[0].__class__.__name__, field_names, data=True) koBindingsString = ko_bindings(queryset[0]) koString = koDataString + '\n' + koModelString + '\n' + koBindingsString return koString except Exception as e: logger.error(e) return ''
[ "def", "ko", "(", "queryset", ",", "field_names", "=", "None", ")", ":", "try", ":", "koDataString", "=", "ko_data", "(", "queryset", ",", "field_names", ")", "koModelString", "=", "ko_model", "(", "queryset", "[", "0", "]", ".", "__class__", ".", "__name__", ",", "field_names", ",", "data", "=", "True", ")", "koBindingsString", "=", "ko_bindings", "(", "queryset", "[", "0", "]", ")", "koString", "=", "koDataString", "+", "'\\n'", "+", "koModelString", "+", "'\\n'", "+", "koBindingsString", "return", "koString", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "e", ")", "return", "''" ]
Converts a Django QuerySet into a complete Knockout implementation.
[ "Converts", "a", "Django", "QuerySet", "into", "a", "complete", "Knockout", "implementation", "." ]
train
https://github.com/Miserlou/django-knockout-modeler/blob/714d21cc5ed008f132cea01dbae9f214c2bf1b76/knockout_modeler/ko.py#L190-L205
vsoch/helpme
helpme/utils/settings.py
load_keypair
def load_keypair(keypair_file): '''load a keypair from a keypair file. We add attributes key (the raw key) and public_key (the url prepared public key) to the client. Parameters ========== keypair_file: the pem file to load. ''' from Crypto.PublicKey import RSA # Load key with open(keypair_file, 'rb') as filey: key = RSA.import_key(filey.read()) return quote_plus(key.publickey().exportKey().decode('utf-8'))
python
def load_keypair(keypair_file): '''load a keypair from a keypair file. We add attributes key (the raw key) and public_key (the url prepared public key) to the client. Parameters ========== keypair_file: the pem file to load. ''' from Crypto.PublicKey import RSA # Load key with open(keypair_file, 'rb') as filey: key = RSA.import_key(filey.read()) return quote_plus(key.publickey().exportKey().decode('utf-8'))
[ "def", "load_keypair", "(", "keypair_file", ")", ":", "from", "Crypto", ".", "PublicKey", "import", "RSA", "# Load key", "with", "open", "(", "keypair_file", ",", "'rb'", ")", "as", "filey", ":", "key", "=", "RSA", ".", "import_key", "(", "filey", ".", "read", "(", ")", ")", "return", "quote_plus", "(", "key", ".", "publickey", "(", ")", ".", "exportKey", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")" ]
load a keypair from a keypair file. We add attributes key (the raw key) and public_key (the url prepared public key) to the client. Parameters ========== keypair_file: the pem file to load.
[ "load", "a", "keypair", "from", "a", "keypair", "file", ".", "We", "add", "attributes", "key", "(", "the", "raw", "key", ")", "and", "public_key", "(", "the", "url", "prepared", "public", "key", ")", "to", "the", "client", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/settings.py#L39-L53
vsoch/helpme
helpme/utils/settings.py
generate_keypair
def generate_keypair(keypair_file): '''generate_keypair is used by some of the helpers that need a keypair. The function should be used if the client doesn't have the attribute self.key. We generate the key and return it. We use pycryptodome (3.7.2) Parameters ========= keypair_file: fullpath to where to save keypair ''' from Crypto.PublicKey import RSA key = RSA.generate(2048) # Ensure helper directory exists keypair_dir = os.path.dirname(keypair_file) if not os.path.exists(keypair_dir): os.makedirs(keypair_dir) # Save key with open(keypair_file, 'wb') as filey: filey.write(key.exportKey('PEM')) return key
python
def generate_keypair(keypair_file): '''generate_keypair is used by some of the helpers that need a keypair. The function should be used if the client doesn't have the attribute self.key. We generate the key and return it. We use pycryptodome (3.7.2) Parameters ========= keypair_file: fullpath to where to save keypair ''' from Crypto.PublicKey import RSA key = RSA.generate(2048) # Ensure helper directory exists keypair_dir = os.path.dirname(keypair_file) if not os.path.exists(keypair_dir): os.makedirs(keypair_dir) # Save key with open(keypair_file, 'wb') as filey: filey.write(key.exportKey('PEM')) return key
[ "def", "generate_keypair", "(", "keypair_file", ")", ":", "from", "Crypto", ".", "PublicKey", "import", "RSA", "key", "=", "RSA", ".", "generate", "(", "2048", ")", "# Ensure helper directory exists", "keypair_dir", "=", "os", ".", "path", ".", "dirname", "(", "keypair_file", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "keypair_dir", ")", ":", "os", ".", "makedirs", "(", "keypair_dir", ")", "# Save key", "with", "open", "(", "keypair_file", ",", "'wb'", ")", "as", "filey", ":", "filey", ".", "write", "(", "key", ".", "exportKey", "(", "'PEM'", ")", ")", "return", "key" ]
generate_keypair is used by some of the helpers that need a keypair. The function should be used if the client doesn't have the attribute self.key. We generate the key and return it. We use pycryptodome (3.7.2) Parameters ========= keypair_file: fullpath to where to save keypair
[ "generate_keypair", "is", "used", "by", "some", "of", "the", "helpers", "that", "need", "a", "keypair", ".", "The", "function", "should", "be", "used", "if", "the", "client", "doesn", "t", "have", "the", "attribute", "self", ".", "key", ".", "We", "generate", "the", "key", "and", "return", "it", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/settings.py#L56-L80
SetBased/py-stratum
pystratum/exception/ResultException.py
ResultException.__message
def __message(expected_row_count, actual_row_count, query): """ Composes the exception message. :param str expected_row_count: The expected row count. :param int actual_row_count: The actual row count. :param str query: The query. :rtype: str """ query = query.strip() message = 'Wrong number of rows selected' message += os.linesep message += 'Expected number of rows: {}'.format(expected_row_count) message += os.linesep message += 'Actual number of rows: {}'.format(actual_row_count) message += os.linesep message += 'Query:' message += os.linesep if os.linesep in query else ' ' message += query return message
python
def __message(expected_row_count, actual_row_count, query): """ Composes the exception message. :param str expected_row_count: The expected row count. :param int actual_row_count: The actual row count. :param str query: The query. :rtype: str """ query = query.strip() message = 'Wrong number of rows selected' message += os.linesep message += 'Expected number of rows: {}'.format(expected_row_count) message += os.linesep message += 'Actual number of rows: {}'.format(actual_row_count) message += os.linesep message += 'Query:' message += os.linesep if os.linesep in query else ' ' message += query return message
[ "def", "__message", "(", "expected_row_count", ",", "actual_row_count", ",", "query", ")", ":", "query", "=", "query", ".", "strip", "(", ")", "message", "=", "'Wrong number of rows selected'", "message", "+=", "os", ".", "linesep", "message", "+=", "'Expected number of rows: {}'", ".", "format", "(", "expected_row_count", ")", "message", "+=", "os", ".", "linesep", "message", "+=", "'Actual number of rows: {}'", ".", "format", "(", "actual_row_count", ")", "message", "+=", "os", ".", "linesep", "message", "+=", "'Query:'", "message", "+=", "os", ".", "linesep", "if", "os", ".", "linesep", "in", "query", "else", "' '", "message", "+=", "query", "return", "message" ]
Composes the exception message. :param str expected_row_count: The expected row count. :param int actual_row_count: The actual row count. :param str query: The query. :rtype: str
[ "Composes", "the", "exception", "message", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/exception/ResultException.py#L76-L98
vsoch/helpme
helpme/main/github/__init__.py
Helper._submit
def _submit(self): '''submit the issue to github. When we get here we should have: {'user_prompt_issue': 'I want to do the thing.', 'user_prompt_repo': 'vsoch/hello-world', 'user_prompt_title': 'Error with this thing', 'record_asciinema': '/tmp/helpme.93o__nt5.json', 'record_environment': ((1,1),(2,2)...(N,N))} self.token should be propogated with the personal access token ''' body = self.data['user_prompt_issue'] title = self.data['user_prompt_title'] repo = self.data['user_prompt_repo'] # Step 1: Environment envars = self.data.get('record_environment') body = body + envars_to_markdown(envars) # Step 2: Asciinema asciinema = self.data.get('record_asciinema') if asciinema not in [None, '']: url = upload_asciinema(asciinema) # If the upload is successful, add a link to it. if url is not None: body += "\n[View Asciinema Recording](%s)" %url # Add other metadata about client body += "\n\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)" body += "\nHelpMe Github Issue: %s" %(self.run_id) # Submit the issue issue = create_issue(title, body, repo, self.token) return issue
python
def _submit(self): '''submit the issue to github. When we get here we should have: {'user_prompt_issue': 'I want to do the thing.', 'user_prompt_repo': 'vsoch/hello-world', 'user_prompt_title': 'Error with this thing', 'record_asciinema': '/tmp/helpme.93o__nt5.json', 'record_environment': ((1,1),(2,2)...(N,N))} self.token should be propogated with the personal access token ''' body = self.data['user_prompt_issue'] title = self.data['user_prompt_title'] repo = self.data['user_prompt_repo'] # Step 1: Environment envars = self.data.get('record_environment') body = body + envars_to_markdown(envars) # Step 2: Asciinema asciinema = self.data.get('record_asciinema') if asciinema not in [None, '']: url = upload_asciinema(asciinema) # If the upload is successful, add a link to it. if url is not None: body += "\n[View Asciinema Recording](%s)" %url # Add other metadata about client body += "\n\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)" body += "\nHelpMe Github Issue: %s" %(self.run_id) # Submit the issue issue = create_issue(title, body, repo, self.token) return issue
[ "def", "_submit", "(", "self", ")", ":", "body", "=", "self", ".", "data", "[", "'user_prompt_issue'", "]", "title", "=", "self", ".", "data", "[", "'user_prompt_title'", "]", "repo", "=", "self", ".", "data", "[", "'user_prompt_repo'", "]", "# Step 1: Environment", "envars", "=", "self", ".", "data", ".", "get", "(", "'record_environment'", ")", "body", "=", "body", "+", "envars_to_markdown", "(", "envars", ")", "# Step 2: Asciinema", "asciinema", "=", "self", ".", "data", ".", "get", "(", "'record_asciinema'", ")", "if", "asciinema", "not", "in", "[", "None", ",", "''", "]", ":", "url", "=", "upload_asciinema", "(", "asciinema", ")", "# If the upload is successful, add a link to it.", "if", "url", "is", "not", "None", ":", "body", "+=", "\"\\n[View Asciinema Recording](%s)\"", "%", "url", "# Add other metadata about client", "body", "+=", "\"\\n\\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)\"", "body", "+=", "\"\\nHelpMe Github Issue: %s\"", "%", "(", "self", ".", "run_id", ")", "# Submit the issue", "issue", "=", "create_issue", "(", "title", ",", "body", ",", "repo", ",", "self", ".", "token", ")", "return", "issue" ]
submit the issue to github. When we get here we should have: {'user_prompt_issue': 'I want to do the thing.', 'user_prompt_repo': 'vsoch/hello-world', 'user_prompt_title': 'Error with this thing', 'record_asciinema': '/tmp/helpme.93o__nt5.json', 'record_environment': ((1,1),(2,2)...(N,N))} self.token should be propogated with the personal access token
[ "submit", "the", "issue", "to", "github", ".", "When", "we", "get", "here", "we", "should", "have", ":", "{", "user_prompt_issue", ":", "I", "want", "to", "do", "the", "thing", ".", "user_prompt_repo", ":", "vsoch", "/", "hello", "-", "world", "user_prompt_title", ":", "Error", "with", "this", "thing", "record_asciinema", ":", "/", "tmp", "/", "helpme", ".", "93o__nt5", ".", "json", "record_environment", ":", "((", "1", "1", ")", "(", "2", "2", ")", "...", "(", "N", "N", "))", "}" ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/github/__init__.py#L64-L103
julot/sphinxcontrib-dd
sphinxcontrib/dd/yaml.py
resolve_refs
def resolve_refs(uri, spec): """Resolve JSON references in a given dictionary. OpenAPI spec may contain JSON references to its nodes or external sources, so any attempt to rely that there's some expected attribute in the spec may fail. So we need to resolve JSON references before we use it (i.e. replace with referenced object). For details see: https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-02 The input spec is modified in-place despite being returned from the function. """ resolver = jsonschema.RefResolver(uri, spec) def _do_resolve(node): if isinstance(node, collections.Mapping) and '$ref' in node: with resolver.resolving(node['$ref']) as resolved: result = deepcopy(resolved) for key in resolved: if key in node: merge(result[key], node[key]) return result elif isinstance(node, collections.Mapping): for k, v in node.items(): node[k] = _do_resolve(v) elif isinstance(node, (list, tuple)): for i in range(len(node)): node[i] = _do_resolve(node[i]) return node return _do_resolve(spec)
python
def resolve_refs(uri, spec): """Resolve JSON references in a given dictionary. OpenAPI spec may contain JSON references to its nodes or external sources, so any attempt to rely that there's some expected attribute in the spec may fail. So we need to resolve JSON references before we use it (i.e. replace with referenced object). For details see: https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-02 The input spec is modified in-place despite being returned from the function. """ resolver = jsonschema.RefResolver(uri, spec) def _do_resolve(node): if isinstance(node, collections.Mapping) and '$ref' in node: with resolver.resolving(node['$ref']) as resolved: result = deepcopy(resolved) for key in resolved: if key in node: merge(result[key], node[key]) return result elif isinstance(node, collections.Mapping): for k, v in node.items(): node[k] = _do_resolve(v) elif isinstance(node, (list, tuple)): for i in range(len(node)): node[i] = _do_resolve(node[i]) return node return _do_resolve(spec)
[ "def", "resolve_refs", "(", "uri", ",", "spec", ")", ":", "resolver", "=", "jsonschema", ".", "RefResolver", "(", "uri", ",", "spec", ")", "def", "_do_resolve", "(", "node", ")", ":", "if", "isinstance", "(", "node", ",", "collections", ".", "Mapping", ")", "and", "'$ref'", "in", "node", ":", "with", "resolver", ".", "resolving", "(", "node", "[", "'$ref'", "]", ")", "as", "resolved", ":", "result", "=", "deepcopy", "(", "resolved", ")", "for", "key", "in", "resolved", ":", "if", "key", "in", "node", ":", "merge", "(", "result", "[", "key", "]", ",", "node", "[", "key", "]", ")", "return", "result", "elif", "isinstance", "(", "node", ",", "collections", ".", "Mapping", ")", ":", "for", "k", ",", "v", "in", "node", ".", "items", "(", ")", ":", "node", "[", "k", "]", "=", "_do_resolve", "(", "v", ")", "elif", "isinstance", "(", "node", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "i", "in", "range", "(", "len", "(", "node", ")", ")", ":", "node", "[", "i", "]", "=", "_do_resolve", "(", "node", "[", "i", "]", ")", "return", "node", "return", "_do_resolve", "(", "spec", ")" ]
Resolve JSON references in a given dictionary. OpenAPI spec may contain JSON references to its nodes or external sources, so any attempt to rely that there's some expected attribute in the spec may fail. So we need to resolve JSON references before we use it (i.e. replace with referenced object). For details see: https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-02 The input spec is modified in-place despite being returned from the function.
[ "Resolve", "JSON", "references", "in", "a", "given", "dictionary", "." ]
train
https://github.com/julot/sphinxcontrib-dd/blob/18619b356508b9a99cc329eeae53cbf299a5d1de/sphinxcontrib/dd/yaml.py#L32-L63
vsoch/helpme
helpme/client/help.py
main
def main(args, extras): '''This is the actual driver for the helper. ''' from helpme.main import get_helper name = args.command if name in HELPME_HELPERS: # Get the helper, do the recording, submit helper = get_helper(name=name) if args.asciinema is not None: if os.path.exists(args.asciinema): helper.data['record_asciinema'] = args.asciinema helper.run(positionals=extras)
python
def main(args, extras): '''This is the actual driver for the helper. ''' from helpme.main import get_helper name = args.command if name in HELPME_HELPERS: # Get the helper, do the recording, submit helper = get_helper(name=name) if args.asciinema is not None: if os.path.exists(args.asciinema): helper.data['record_asciinema'] = args.asciinema helper.run(positionals=extras)
[ "def", "main", "(", "args", ",", "extras", ")", ":", "from", "helpme", ".", "main", "import", "get_helper", "name", "=", "args", ".", "command", "if", "name", "in", "HELPME_HELPERS", ":", "# Get the helper, do the recording, submit", "helper", "=", "get_helper", "(", "name", "=", "name", ")", "if", "args", ".", "asciinema", "is", "not", "None", ":", "if", "os", ".", "path", ".", "exists", "(", "args", ".", "asciinema", ")", ":", "helper", ".", "data", "[", "'record_asciinema'", "]", "=", "args", ".", "asciinema", "helper", ".", "run", "(", "positionals", "=", "extras", ")" ]
This is the actual driver for the helper.
[ "This", "is", "the", "actual", "driver", "for", "the", "helper", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/client/help.py#L27-L44
jurismarches/chopper
chopper/extractor.py
Extractor.extract
def extract(self, html_contents, css_contents=None, base_url=None): """ Extracts the cleaned html tree as a string and only css rules matching the cleaned html tree :param html_contents: The HTML contents to parse :type html_contents: str :param css_contents: The CSS contents to parse :type css_contents: str :param base_url: The base page URL to use for relative to absolute links :type base_url: str :returns: cleaned HTML contents, cleaned CSS contents :rtype: str or tuple """ # Clean HTML html_extractor = self.html_extractor( html_contents, self._xpaths_to_keep, self._xpaths_to_discard) has_matches = html_extractor.parse() if has_matches: # Relative to absolute URLs if base_url is not None: html_extractor.rel_to_abs(base_url) # Convert ElementTree to string cleaned_html = html_extractor.to_string() else: cleaned_html = None # Clean CSS if css_contents is not None: if cleaned_html is not None: css_extractor = self.css_extractor(css_contents, cleaned_html) css_extractor.parse() # Relative to absolute URLs if base_url is not None: css_extractor.rel_to_abs(base_url) cleaned_css = css_extractor.to_string() else: cleaned_css = None else: return cleaned_html return (cleaned_html, cleaned_css)
python
def extract(self, html_contents, css_contents=None, base_url=None): """ Extracts the cleaned html tree as a string and only css rules matching the cleaned html tree :param html_contents: The HTML contents to parse :type html_contents: str :param css_contents: The CSS contents to parse :type css_contents: str :param base_url: The base page URL to use for relative to absolute links :type base_url: str :returns: cleaned HTML contents, cleaned CSS contents :rtype: str or tuple """ # Clean HTML html_extractor = self.html_extractor( html_contents, self._xpaths_to_keep, self._xpaths_to_discard) has_matches = html_extractor.parse() if has_matches: # Relative to absolute URLs if base_url is not None: html_extractor.rel_to_abs(base_url) # Convert ElementTree to string cleaned_html = html_extractor.to_string() else: cleaned_html = None # Clean CSS if css_contents is not None: if cleaned_html is not None: css_extractor = self.css_extractor(css_contents, cleaned_html) css_extractor.parse() # Relative to absolute URLs if base_url is not None: css_extractor.rel_to_abs(base_url) cleaned_css = css_extractor.to_string() else: cleaned_css = None else: return cleaned_html return (cleaned_html, cleaned_css)
[ "def", "extract", "(", "self", ",", "html_contents", ",", "css_contents", "=", "None", ",", "base_url", "=", "None", ")", ":", "# Clean HTML", "html_extractor", "=", "self", ".", "html_extractor", "(", "html_contents", ",", "self", ".", "_xpaths_to_keep", ",", "self", ".", "_xpaths_to_discard", ")", "has_matches", "=", "html_extractor", ".", "parse", "(", ")", "if", "has_matches", ":", "# Relative to absolute URLs", "if", "base_url", "is", "not", "None", ":", "html_extractor", ".", "rel_to_abs", "(", "base_url", ")", "# Convert ElementTree to string", "cleaned_html", "=", "html_extractor", ".", "to_string", "(", ")", "else", ":", "cleaned_html", "=", "None", "# Clean CSS", "if", "css_contents", "is", "not", "None", ":", "if", "cleaned_html", "is", "not", "None", ":", "css_extractor", "=", "self", ".", "css_extractor", "(", "css_contents", ",", "cleaned_html", ")", "css_extractor", ".", "parse", "(", ")", "# Relative to absolute URLs", "if", "base_url", "is", "not", "None", ":", "css_extractor", ".", "rel_to_abs", "(", "base_url", ")", "cleaned_css", "=", "css_extractor", ".", "to_string", "(", ")", "else", ":", "cleaned_css", "=", "None", "else", ":", "return", "cleaned_html", "return", "(", "cleaned_html", ",", "cleaned_css", ")" ]
Extracts the cleaned html tree as a string and only css rules matching the cleaned html tree :param html_contents: The HTML contents to parse :type html_contents: str :param css_contents: The CSS contents to parse :type css_contents: str :param base_url: The base page URL to use for relative to absolute links :type base_url: str :returns: cleaned HTML contents, cleaned CSS contents :rtype: str or tuple
[ "Extracts", "the", "cleaned", "html", "tree", "as", "a", "string", "and", "only", "css", "rules", "matching", "the", "cleaned", "html", "tree" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/extractor.py#L58-L110
jurismarches/chopper
chopper/extractor.py
Extractor.__add
def __add(self, dest, xpath): """ Adds a Xpath expression to the dest list :param dest: The destination list to add the Xpath :type dest: list :param xpath: The Xpath expression to add :type xpath: str """ assert isinstance(xpath, string_types) dest.append(xpath)
python
def __add(self, dest, xpath): """ Adds a Xpath expression to the dest list :param dest: The destination list to add the Xpath :type dest: list :param xpath: The Xpath expression to add :type xpath: str """ assert isinstance(xpath, string_types) dest.append(xpath)
[ "def", "__add", "(", "self", ",", "dest", ",", "xpath", ")", ":", "assert", "isinstance", "(", "xpath", ",", "string_types", ")", "dest", ".", "append", "(", "xpath", ")" ]
Adds a Xpath expression to the dest list :param dest: The destination list to add the Xpath :type dest: list :param xpath: The Xpath expression to add :type xpath: str
[ "Adds", "a", "Xpath", "expression", "to", "the", "dest", "list" ]
train
https://github.com/jurismarches/chopper/blob/53c5489a53e3a5d205a5cb207df751c09633e7ce/chopper/extractor.py#L142-L152
SetBased/py-stratum
pystratum/wrapper/Wrapper.py
Wrapper._write_line
def _write_line(self, line=None): """ Appends a line of code to the generated code and adjust the indent level of the generated code. :param line: The line of code (with out LF) that must be appended. """ if line is None: self._write("\n") if self.__indent_level > 1: self.__indent_level -= 1 elif line == '': self._write("\n") else: line = (' ' * 4 * self.__indent_level) + line if line[-1:] == ':': self.__indent_level += 1 self._write(line + "\n")
python
def _write_line(self, line=None): """ Appends a line of code to the generated code and adjust the indent level of the generated code. :param line: The line of code (with out LF) that must be appended. """ if line is None: self._write("\n") if self.__indent_level > 1: self.__indent_level -= 1 elif line == '': self._write("\n") else: line = (' ' * 4 * self.__indent_level) + line if line[-1:] == ':': self.__indent_level += 1 self._write(line + "\n")
[ "def", "_write_line", "(", "self", ",", "line", "=", "None", ")", ":", "if", "line", "is", "None", ":", "self", ".", "_write", "(", "\"\\n\"", ")", "if", "self", ".", "__indent_level", ">", "1", ":", "self", ".", "__indent_level", "-=", "1", "elif", "line", "==", "''", ":", "self", ".", "_write", "(", "\"\\n\"", ")", "else", ":", "line", "=", "(", "' '", "*", "4", "*", "self", ".", "__indent_level", ")", "+", "line", "if", "line", "[", "-", "1", ":", "]", "==", "':'", ":", "self", ".", "__indent_level", "+=", "1", "self", ".", "_write", "(", "line", "+", "\"\\n\"", ")" ]
Appends a line of code to the generated code and adjust the indent level of the generated code. :param line: The line of code (with out LF) that must be appended.
[ "Appends", "a", "line", "of", "code", "to", "the", "generated", "code", "and", "adjust", "the", "indent", "level", "of", "the", "generated", "code", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/wrapper/Wrapper.py#L66-L82