{"signature": "def push_tag(tag):", "body": "_call('' + str(tag))", "docstring": "Pushes a tag into the upstream", "id": "f1:m7"} {"signature": "def is_committed():", "body": "return '' in _call('')", "docstring": "Returns True if repository is committed, otherwise False", "id": "f1:m3"} {"signature": "def confirm(tag):", "body": "click.echo()if click.confirm(''.format(tag=click.style(str(tag), fg='')),default=True, abort=True):git.create_tag(tag)if click.confirm(''.format(tag=click.style(str(tag), fg='')),default=True):git.push_tag(tag)click.echo('')else:git.delete_tag(tag)click.echo('')", "docstring": "Prompts user before proceeding", "id": "f2:m3"} {"signature": "def bump(self, target):", "body": "if target == '':return Version(self.major, self.minor, self.patch + )if target == '':return Version(self.major, self.minor + , )if target == '':return Version(self.major + , , )return self.clone()", "docstring": "Bumps the Version given a target\n\nThe target can be either MAJOR, MINOR or PATCH", "id": "f3:c0:m5"} {"signature": "def clone(self):", "body": "t = Tag(self.version.major, self.version.minor, self.version.patch)if self.revision is not None:t.revision = self.revision.clone()return t", "docstring": "Returns a copy of this object", "id": "f3:c2:m4"} {"signature": "@staticmethoddef parse(s):", "body": "try:m = _regex.match(s)t = Tag(int(m.group('')),int(m.group('')),int(m.group('')))return tif m.group('') is Noneelse t.with_revision(m.group(''), int(m.group('')))except AttributeError:return None", "docstring": "Parses a string into a Tag", "id": "f3:c2:m8"} {"signature": "def clone(self):", "body": "return Revision(self.label, self.number)", "docstring": "Returns a copy of this object", "id": "f3:c1:m4"} {"signature": "def bump(self):", "body": "return Revision(self.label, self.number + )", "docstring": "Bumps the Revision's number", "id": "f3:c1:m5"} {"signature": "def clone(self):", "body": "return Version(self.major, self.minor, self.patch)", "docstring": "Returns a copy of this object", "id": "f3:c0:m4"} {"signature": "@invoke.task()def build(ctx):", "body": "ctx.run(f'')", "docstring": "Build the package into distributables.\n\n This will create two distributables: source and wheel.", "id": "f5:m0"} {"signature": "def lock(self):", "body": "data = self.datadata[''][''] = {\"\": self.hash}data[''][''] = return json.dumps(data, indent=, separators=('', ''))", "docstring": "Returns a JSON representation of the Pipfile.", "id": "f10:c1:m5"} {"signature": "def walk_up(bottom):", "body": "bottom = os.path.realpath(bottom)try:names = os.listdir(bottom)except Exception:returndirs, nondirs = [], []for name in names:if os.path.isdir(os.path.join(bottom, name)):dirs.append(name)else:nondirs.append(name)yield bottom, dirs, nondirsnew_path = os.path.realpath(os.path.join(bottom, ''))if new_path == bottom:returnfor x in walk_up(new_path):yield x", "docstring": "mimic os.walk, but walk 'up' instead of down the directory tree.\n From: https://gist.github.com/zdavkeos/1098474", "id": "f10:m1"} {"signature": "def assert_requirements(self):", "body": "if hasattr(sys, ''):implementation_version = format_full_version(sys.implementation.version)else:implementation_version = \"\"if hasattr(sys, ''):implementation_name = sys.implementation.nameelse:implementation_name = ''lookup = {'': os.name,'': sys.platform,'': platform.machine(),'': platform.python_implementation(),'': platform.release(),'': platform.system(),'': platform.version(),'': platform.python_version()[:],'': platform.python_version(),'': implementation_name,'': implementation_version}for marker, specifier in self.data[''][''].items():if marker in lookup:try:assert lookup[marker] == specifierexcept AssertionError:raise AssertionError(''.format(marker, specifier))", "docstring": "Asserts PEP 508 specifiers.", "id": "f10:c1:m6"} {"signature": "@propertydef contents(self):", "body": "with codecs.open(self.filename, '', '') as f:return f.read()", "docstring": "Returns the contents of the pipfile.", "id": "f10:c1:m4"} {"signature": "@classmethoddef load(klass, filename, inject_env=True):", "body": "p = PipfileParser(filename=filename)pipfile = klass(filename=filename)pipfile.data = p.parse(inject_env=inject_env)return pipfile", "docstring": "Load a Pipfile from a given filename.", "id": "f10:c1:m2"} {"signature": "def add_templates_to_message(self):", "body": "super(TemplatedHTMLEmailMessageViewTestCase, self).add_templates_to_message()self.message.html_body_template = self.html_body_template", "docstring": "Adds templates to the fixture message, ensuring it can be rendered.", "id": "f20:c2:m1"} {"signature": "def send(self, extra_context=None, **kwargs):", "body": "message = self.render_to_message(extra_context=extra_context, **kwargs)return message.send()", "docstring": "Renders and sends an email message.\n\nAll keyword arguments other than ``extra_context`` are passed through\nas keyword arguments when constructing a new :attr:`message_class`\ninstance for this message.\n\nThis method exists primarily for convenience, and the proper\nrendering of your message should not depend on the behavior of this\nmethod. To alter how a message is created, override\n:meth:``render_to_message`` instead, since that should always be\ncalled, even if a message is not sent.\n\n:param extra_context: Any additional context data that will be used\n when rendering this message.\n:type extra_context: :class:`dict`", "id": "f23:c0:m5"} {"signature": "@propertydef headers(self):", "body": "if not hasattr(self, ''):self._headers = {}return self._headers", "docstring": "A dictionary containing the headers for this message.", "id": "f23:c0:m0"} {"signature": "def render_to_message(self, extra_context=None, **kwargs):", "body": "if extra_context is None:extra_context = {}kwargs.setdefault('', {}).update(self.headers)context = self.get_context_data(**extra_context)return self.message_class(subject=self.render_subject(context),body=self.render_body(context),**kwargs)", "docstring": "Renders and returns an unsent message with the provided context.\n\nAny extra keyword arguments passed will be passed through as keyword\narguments to the message constructor.\n\n:param extra_context: Any additional context to use when rendering the\n templated content.\n:type extra_context: :class:`dict`\n:returns: A message instance.\n:rtype: :attr:`.message_class`", "id": "f23:c0:m4"} {"signature": "def render_subject(self, context):", "body": "rendered = self.subject_template.render(unescape(context))return rendered.strip()", "docstring": "Renders the message subject for the given context.\n\nThe context data is automatically unescaped to avoid rendering HTML\nentities in ``text/plain`` content.\n\n:param context: The context to use when rendering the subject template.\n:type context: :class:`~django.template.Context`\n:returns: A rendered subject.\n:rtype: :class:`str`", "id": "f23:c1:m5"} {"signature": "def register(self, cls):", "body": "preview = cls(site=self)logger.debug('', preview, self)index = self.__previews.setdefault(preview.module, {})index[cls.__name__] = preview", "docstring": "Adds a preview to the index.", "id": "f25:c0:m2"} {"signature": "@propertydef url(self):", "body": "return reverse('' % URL_NAMESPACE, kwargs={'': self.module,'': type(self).__name__,})", "docstring": "The URL to access this preview.", "id": "f25:c1:m4"} {"signature": "def __iter__(self):", "body": "for module in sorted(self.__previews.keys()):previews = ModulePreviews(module, sorted(self.__previews[module].values(), key=str))yield previews", "docstring": "Returns an iterator of :class:`ModulePreviews` tuples, sorted by module nae.", "id": "f25:c0:m1"} {"signature": "def list_view(self, request):", "body": "return render(request, '', {'': self,})", "docstring": "Returns a list view response containing all of the registered previews.", "id": "f25:c0:m4"} {"signature": "def maybe_decode_header(header):", "body": "value, encoding = decode_header(header)[]if encoding:return value.decode(encoding)else:return value", "docstring": "Decodes an encoded 7-bit ASCII header value into it's actual value.", "id": "f25:m0"} {"signature": "def logout(self, revoke_oauth=False):", "body": "return self.api.logout(revoke_oauth=revoke_oauth)", "docstring": "Log out the gmusicapi Musicmanager instance.\n\n Parameters:\n revoke_oauth (bool): If ``True``, oauth credentials will be revoked and the corresponding oauth file will be deleted.\n\n Returns:\n ``True`` on success.", "id": "f33:c0:m2"} {"signature": "def login(self, username=None, password=None, android_id=None):", "body": "cls_name = type(self).__name__if username is None:username = input(\"\")if password is None:password = getpass.getpass(\"\")if android_id is None:android_id = Mobileclient.FROM_MAC_ADDRESStry:self.api.login(username, password, android_id)except OSError:logger.exception(\"\".format(cls_name))if not self.is_authenticated:logger.warning(\"\".format(cls_name))return Falselogger.info(\"\".format(cls_name))return True", "docstring": "Authenticate the gmusicapi Mobileclient instance.\n\n Parameters:\n username (Optional[str]): Your Google Music username. Will be prompted if not given.\n\n password (Optional[str]): Your Google Music password. Will be prompted if not given.\n\n android_id (Optional[str]): The 16 hex digits from an Android device ID.\n Default: Use gmusicapi.Mobileclient.FROM_MAC_ADDRESS to create ID from computer's MAC address.\n\n Returns:\n ``True`` on successful login or ``False`` on unsuccessful login.", "id": "f35:c0:m1"} {"signature": "def get_google_playlist(self, playlist):", "body": "logger.info(\"\".format(playlist))for google_playlist in self.api.get_all_user_playlist_contents():if google_playlist[''] == playlist or google_playlist[''] == playlist:return google_playlistelse:logger.warning(\"\".format(playlist))return {}", "docstring": "Get playlist information of a user-generated Google Music playlist.\n\n Parameters:\n playlist (str): Name or ID of Google Music playlist. Names are case-sensitive.\n Google allows multiple playlists with the same name.\n If multiple playlists have the same name, the first one encountered is used.\n\n Returns:\n dict: The playlist dict as returned by Mobileclient.get_all_user_playlist_contents.", "id": "f35:c0:m5"} {"signature": "@propertydef is_authenticated(self):", "body": "return self.api.is_authenticated()", "docstring": "Check the authentication status of the gmusicapi client instance.\n\n Returns:\n ``True`` if authenticated, ``False`` if not.", "id": "f36:c0:m1"} {"signature": "@staticmethod@cast_to_list()def get_local_playlists(filepaths, exclude_patterns=None, max_depth=float('')):", "body": "logger.info(\"\")included_playlists = []excluded_playlists = []supported_filepaths = get_supported_filepaths(filepaths, SUPPORTED_PLAYLIST_FORMATS, max_depth=max_depth)included_playlists, excluded_playlists = exclude_filepaths(supported_filepaths, exclude_patterns=exclude_patterns)logger.info(\"\".format(len(excluded_playlists)))logger.info(\"\".format(len(included_playlists)))return included_playlists, excluded_playlists", "docstring": "Load playlists from local filepaths.\n\n Parameters:\n filepaths (list or str): Filepath(s) to search for music files.\n\n exclude_patterns (list or str): Pattern(s) to exclude.\n Patterns are Python regex patterns.\n Filepaths are excluded if they match any of the exclude patterns.\n\n max_depth (int): The depth in the directory tree to walk.\n A depth of '0' limits the walk to the top directory.\n Default: No limit.\n\n Returns:\n A list of local playlist filepaths matching criteria\n and a list of local playlist filepaths excluded using exclusion criteria.", "id": "f36:c0:m3"} {"signature": "def _check_field_value(field_value, pattern):", "body": "if isinstance(field_value, list):return any(re.search(pattern, str(value), re.I) for value in field_value)else:return re.search(pattern, str(field_value), re.I)", "docstring": "Check a song metadata field value for a pattern.", "id": "f38:m10"} {"signature": "def _split_field_to_single_value(field):", "body": "split_field = re.match(r'', field)return split_field.group() or field", "docstring": "Convert number field values split by a '/' to a single number value.", "id": "f38:m3"} {"signature": "def _normalize_metadata(metadata):", "body": "metadata = str(metadata)metadata = metadata.lower()metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', r'', metadata) metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', '', metadata) metadata = re.sub(r'', '', metadata, re.I) return metadata", "docstring": "Normalize metadata to improve match accuracy.", "id": "f38:m5"} {"signature": "def compare_song_collections(src_songs, dst_songs):", "body": "def gather_field_values(song):return tuple((_normalize_metadata(song[field]) for field in _filter_comparison_fields(song)))dst_songs_criteria = {gather_field_values(_normalize_song(dst_song)) for dst_song in dst_songs}return [src_song for src_song in src_songs if gather_field_values(_normalize_song(src_song)) not in dst_songs_criteria]", "docstring": "Compare two song collections to find missing songs.\n\n Parameters:\n src_songs (list): Google Music song dicts or filepaths of local songs.\n\n dest_songs (list): Google Music song dicts or filepaths of local songs.\n\n Returns:\n A list of Google Music song dicts or local song filepaths from source missing in destination.", "id": "f38:m7"} {"signature": "@cast_to_list()def get_supported_filepaths(filepaths, supported_extensions, max_depth=float('')):", "body": "supported_filepaths = []for path in filepaths:if os.name == '' and CYGPATH_RE.match(path):path = convert_cygwin_path(path)if os.path.isdir(path):for root, __, files in walk_depth(path, max_depth):for f in files:if f.lower().endswith(supported_extensions):supported_filepaths.append(os.path.join(root, f))elif os.path.isfile(path) and path.lower().endswith(supported_extensions):supported_filepaths.append(path)return supported_filepaths", "docstring": "Get filepaths with supported extensions from given filepaths.\n\n Parameters:\n filepaths (list or str): Filepath(s) to check.\n\n supported_extensions (tuple or str): Supported file extensions or a single file extension.\n\n max_depth (int): The depth in the directory tree to walk.\n A depth of '0' limits the walk to the top directory.\n Default: No limit.\n\n Returns:\n A list of supported filepaths.", "id": "f38:m8"} {"signature": "def convert_cygwin_path(path):", "body": "try:win_path = subprocess.check_output([\"\", \"\", path], universal_newlines=True).strip()except (FileNotFoundError, subprocess.CalledProcessError):logger.exception(\"\")raisereturn win_path", "docstring": "Convert Unix path from Cygwin to Windows path.", "id": "f38:m0"} {"signature": "def _mutagen_fields_to_single_value(metadata):", "body": "return dict((k, v[]) for k, v in metadata.items() if v)", "docstring": "Replace mutagen metadata field list values in mutagen tags with the first list value.", "id": "f38:m2"} {"signature": "def cast_to_list(position):", "body": "@wrapt.decoratordef wrapper(function, instance, args, kwargs):if not isinstance(args[position], list):args = list(args)args[position] = [args[position]]args = tuple(args)return function(*args, **kwargs)return wrapper", "docstring": "Cast the positional argument at given position into a list if not already a list.", "id": "f39:m0"} {"signature": "@classmethod@coroutine@nt_cursordef insert(cls, cur, table: str, values: dict):", "body": "keys = cls._COMMA.join(values.keys())value_place_holder = cls._PLACEHOLDER * len(values)query = cls._insert_string.format(table, keys, value_place_holder[:-])yield from cur.execute(query, tuple(values.values()))return (yield from cur.fetchone())", "docstring": "Creates an insert statement with only chosen fields\n\nArgs:\n table: a string indicating the name of the table\n values: a dict of fields and values to be inserted\n\nReturns:\n A 'Record' object with table columns as properties", "id": "f41:c0:m5"} {"signature": "@classmethod@coroutinedef get_cursor(cls, cursor_type=_CursorType.PLAIN) -> Cursor:", "body": "_cur = Noneif cls._use_pool:_connection_source = yield from cls.get_pool()else:_connection_source = yield from aiopg.connect(echo=False, **cls._connection_params)if cursor_type == _CursorType.PLAIN:_cur = yield from _connection_source.cursor()if cursor_type == _CursorType.NAMEDTUPLE:_cur = yield from _connection_source.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)if cursor_type == _CursorType.DICT:_cur = yield from _connection_source.cursor(cursor_factory=psycopg2.extras.DictCursor)if not cls._use_pool:_cur = cursor_context_manager(_connection_source, _cur)return _cur", "docstring": "Yields:\n new client-side cursor from existing db connection pool", "id": "f41:c0:m3"} {"signature": "def nt_cursor(func):", "body": "@wraps(func)def wrapper(cls, *args, **kwargs):with (yield from cls.get_cursor(_CursorType.NAMEDTUPLE)) as c:return (yield from func(cls, c, *args, **kwargs))return wrapper", "docstring": "Decorator that provides a namedtuple cursor to the calling function\n\nAdds the cursor as the second argument to the calling functions\n\nRequires that the function being decorated is an instance of a class or object\nthat yields a cursor from a get_cursor(cursor_type=CursorType.NAMEDTUPLE) coroutine or provides such an object\nas the first argument in its signature\n\nYields:\n A client-side namedtuple cursor", "id": "f41:m2"} {"signature": "@classmethod@coroutine@cursordef delete(cls, cur, table: str, where_keys: list):", "body": "where_clause, values = cls._get_where_clause_with_values(where_keys)query = cls._delete_query.format(table, where_clause)yield from cur.execute(query, values)return cur.rowcount", "docstring": "Creates a delete query with where keys\nSupports multiple where clause with and or or both\n\nArgs:\n table: a string indicating the name of the table\n where_keys: list of dictionary\n example of where keys: [{'name':('>', 'cip'),'url':('=', 'cip.com'},{'type':{'<=', 'manufacturer'}}]\n where_clause will look like ((name>%s and url=%s) or (type <= %s))\n items within each dictionary get 'AND'-ed and dictionaries themselves get 'OR'-ed\n\nReturns:\n an integer indicating count of rows deleted", "id": "f41:c0:m8"} {"signature": "@classmethod@coroutine@nt_cursordef raw_sql(cls, cur, query: str, values: tuple):", "body": "yield from cur.execute(query, values)return (yield from cur.fetchall())", "docstring": "Run a raw sql query\n\nArgs:\n query : query string to execute\n values : tuple of values to be used with the query\n\nReturns:\n result of query as list of named tuple", "id": "f41:c0:m10"} {"signature": "def cursor(func):", "body": "@wraps(func)def wrapper(cls, *args, **kwargs):with (yield from cls.get_cursor()) as c:return (yield from func(cls, c, *args, **kwargs))return wrapper", "docstring": "Decorator that provides a cursor to the calling function\n\nAdds the cursor as the second argument to the calling functions\n\nRequires that the function being decorated is an instance of a class or object\nthat yields a cursor from a get_cursor() coroutine or provides such an object\nas the first argument in its signature\n\nYields:\n A client-side cursor", "id": "f41:m1"} {"signature": "def transaction(func):", "body": "@wraps(func)def wrapper(cls, *args, **kwargs):with (yield from cls.get_cursor(_CursorType.NAMEDTUPLE)) as c:try:yield from c.execute('')result = (yield from func(cls, c, *args, **kwargs))except Exception:yield from c.execute('')else:yield from c.execute('')return resultreturn wrapper", "docstring": "Provides a transacted cursor which will run in autocommit=false mode\n\nFor any exception the transaction will be rolled back.\nRequires that the function being decorated is an instance of a class or object\nthat yields a cursor from a get_cursor(cursor_type=CursorType.NAMEDTUPLE) coroutine or provides such an object\nas the first argument in its signature\n\nYields:\n A client-side transacted named cursor", "id": "f41:m3"} {"signature": "@classmethod@coroutine@cursordef count(cls, cur, table:str, where_keys: list=None):", "body": "if where_keys:where_clause, values = cls._get_where_clause_with_values(where_keys)query = cls._count_query_where.format(table, where_clause)q, t = query, valueselse:query = cls._count_query.format(table)q, t = query, ()yield from cur.execute(q, t)result = yield from cur.fetchone()return int(result[])", "docstring": "gives the number of records in the table\n\nArgs:\n table: a string indicating the name of the table\n\nReturns:\n an integer indicating the number of records in the table", "id": "f41:c0:m4"} {"signature": "def set_key(self, key, value, namespace=None, expire=):", "body": "with (yield from self._pool) as redis:if namespace is not None:key = self._get_key(namespace, key)yield from redis.set(key, value, expire=expire)", "docstring": "Set a key in a cache.\n:param key: Key name\n:param value: Value\n:param namespace : Namespace to associate the key with\n:param expire: expiration\n:return:", "id": "f42:c0:m2"} {"signature": "def indexesOptional(f):", "body": "stack = inspect.stack()_NO_INDEX_CHECK_NEEDED.add('' % (f.__module__, stack[][], f.__name__))del stackreturn f", "docstring": "Decorate test methods with this if you don't require strict index checking", "id": "f44:m1"} {"signature": "def on_map_long_clicked(self, pos):", "body": "d = self.declarationd.clicked({'': '','': tuple(pos)})", "docstring": "Called when the map is clicked", "id": "f70:c24:m9"} {"signature": "def on_marker(self, marker):", "body": "mid, pos = markerself.marker = Marker(__id__=mid)mapview = self.parent()mapview.markers[mid] = selfself.marker.setTag(mid)for w in self.child_widgets():mapview.init_info_window_adapter()breakd = self.declarationif d.show_info:self.set_show_info(d.show_info)del self.options", "docstring": "Convert our options into the actual marker object", "id": "f70:c26:m4"} {"signature": "def handle_change(self, change):", "body": "op = change['']if op in '':self.add(len(change['']), LatLng(*change['']))elif op == '':self.add(change[''], LatLng(*change['']))elif op == '':points = [LatLng(*p) for p in change['']]self.addAll([bridge.encode(c) for c in points])elif op == '':self.set(change[''], LatLng(*change['']))elif op == '':self.remove(change[''])else:raise NotImplementedError(\"\".format(op))", "docstring": "Handle changes from atom ContainerLists", "id": "f70:c1:m1"} {"signature": "def add_to_map(self):", "body": "raise NotImplementedError", "docstring": "Add this item to the map", "id": "f70:c25:m1"} {"signature": "def create_widget(self):", "body": "self.options = PolygonOptions()self.points = LatLngList()", "docstring": "Create the MarkerOptions for this map marker\n this later gets converted into a \"Marker\" instance when addMarker \n is called", "id": "f70:c29:m0"} {"signature": "def init_info_window_adapter(self):", "body": "adapter = self.adapterif adapter:return adapter = GoogleMap.InfoWindowAdapter()adapter.getInfoContents.connect(self.on_info_window_contents_requested)adapter.getInfoWindow.connect(self.on_info_window_requested)self.map.setInfoWindowAdapter(adapter)", "docstring": "Initialize the info window adapter. Should only be done if one of \n the markers defines a custom view.", "id": "f70:c24:m3"} {"signature": "def on_marker(self, mid):", "body": "self.marker = Circle(__id__=mid)self.parent().markers[mid] = selfself.marker.setTag(mid)d = self.declarationif d.clickable:self.set_clickable(d.clickable)del self.options", "docstring": "Convert our options into the actual circle object", "id": "f70:c27:m3"} {"signature": "@observe('', '', '','', '', '', '','', '', '','', '', '', '','','', '','', '','', '')def _update_proxy(self, change):", "body": "super(MapView, self)._update_proxy(change)", "docstring": "An observer which sends the state change to the proxy.", "id": "f72:c7:m0"} {"signature": "def decode_html_entities(html):", "body": "if not html:return htmlfor entity, char in six.iteritems(html_entity_map):html = html.replace(entity, char)return html", "docstring": "Decodes a limited set of HTML entities.", "id": "f94:m9"} {"signature": "def x10_command(self, house_code, unit_number, state):", "body": "house_code = normalize_housecode(house_code)if unit_number is not None:unit_number = normalize_unitnumber(unit_number)return self._x10_command(house_code, unit_number, state)", "docstring": "Send X10 command to ??? unit.\n\n @param house_code (A-P) - example='A'\n @param unit_number (1-16)- example=1 (or None to impact entire house code)\n @param state - Mochad command/state, See\n https://sourceforge.net/p/mochad/code/ci/master/tree/README\n examples=OFF, 'OFF', 'ON', ALL_OFF, 'all_units_off', 'xdim 128', etc.\n\n Examples:\n x10_command('A', '1', ON)\n x10_command('A', '1', OFF)\n x10_command('A', '1', 'ON')\n x10_command('A', '1', 'OFF')\n x10_command('A', None, ON)\n x10_command('A', None, OFF)\n x10_command('A', None, 'all_lights_off')\n x10_command('A', None, 'all_units_off')\n x10_command('A', None, ALL_OFF)\n x10_command('A', None, 'all_lights_on')\n x10_command('A', 1, 'xdim 128')", "id": "f101:c3:m3"} {"signature": "def normalize_housecode(house_code):", "body": "if house_code is None:raise X10InvalidHouseCode('' % house_code)if not isinstance(house_code, basestring):raise X10InvalidHouseCode('' % house_code)if len(house_code) != :raise X10InvalidHouseCode('' % house_code)house_code = house_code.upper()if not ('' <= house_code <= ''):raise X10InvalidHouseCode('' % house_code)return house_code", "docstring": "Returns a normalized house code, i.e. upper case.\n Raises exception X10InvalidHouseCode if house code appears to be invalid", "id": "f101:m0"} {"signature": "def _x10_command(self, house_code, unit_number, state):", "body": "print('' % ((house_code, unit_number, state), ))raise NotImplementedError()", "docstring": "Real implementation", "id": "f101:c3:m4"} {"signature": "def main(argv=None):", "body": "if len(argv):commands = ''.join(argv)comPort, commands = commands.split(None, )sendCommands(comPort, commands)return ", "docstring": "Send X10 commands when module is used from the command line.\n\n This uses syntax similar to sendCommands, for example:\n\n x10.py com2 A1 On, A2 Off, B All Off", "id": "f102:m7"} {"signature": "def _sendBinaryData(port, data):", "body": "_reset(port)time.sleep(leadInOutDelay)for digit in data:_sendBit(port, digit)time.sleep(leadInOutDelay)", "docstring": "Send a string of binary data to the FireCracker with proper timing.\n\n See the diagram in the spec referenced above for timing information.\n The module level variables leadInOutDelay and bitDelay represent how\n long each type of delay should be in seconds. They may require tweaking\n on some setups.", "id": "f102:m2"} {"signature": "def sendCommands(comPort, commands):", "body": "mutex.acquire()try:try:port = serial.Serial(port=comPort)header = ''footer = ''for command in _translateCommands(commands):_sendBinaryData(port, header + command + footer)except serial.SerialException:print('' % comPort)print('')raisefinally:mutex.release()", "docstring": "Send X10 commands using the FireCracker on comPort\n\n comPort should be the name of a serial port on the host platform. On\n Windows, for example, 'com1'.\n\n commands should be a string consisting of X10 commands separated by\n commas. For example. 'A1 On, A Dim, A Dim, A Dim, A Lamps Off'. The\n letter is a house code (A-P) and the number is the device number (1-16).\n Possible commands for a house code / device number combination are\n 'On' and 'Off'. The commands 'Bright' and 'Dim' should be used with a\n house code alone after sending an On command to a specific device. The\n 'All On', 'All Off', 'Lamps On', and 'Lamps Off' commands should also\n be used with a house code alone.\n\n # Turn on module A1\n >>> sendCommands('com1', 'A1 On')\n\n # Turn all modules with house code A off\n >>> sendCommands('com1', 'A All Off')\n\n # Turn all lamp modules with house code B on\n >>> sendCommands('com1', 'B Lamps On')\n\n # Turn on module A1 and dim it 3 steps, then brighten it 1 step\n >>> sendCommands('com1', 'A1 On, A Dim, A Dim, A Dim, A Bright')", "id": "f102:m6"} {"signature": "def __init__(self, visible=False, timeout=, app=None, args=None):", "body": "self.app = app or self.create_app(visible, args)self.is_terminated = Falseself.status = Status(None)self.timeout = timeoutself.last_host = None", "docstring": "Create an emulator instance\n\n`visible` controls which executable will be used.\n`timeout` controls the timeout paramater to any Wait() command sent\n to x3270.\n`args` allows sending parameters to the emulator executable", "id": "f108:c14:m0"} {"signature": "def terminate(self):", "body": "if not self.is_terminated:log.debug(\"\")try:self.exec_command(b\"\")except BrokenPipeError: passexcept socket.error as e:if e.errno != errno.ECONNRESET:raiseself.app.close()self.is_terminated = True", "docstring": "terminates the underlying x3270 subprocess. Once called, this\nEmulator instance must no longer be used.", "id": "f108:c14:m4"} {"signature": "def is_connected(self):", "body": "try:self.exec_command(b\"\")return self.status.connection_state.startswith(b\"\")except NotConnectedException:return False", "docstring": "Return bool indicating connection state", "id": "f108:c14:m5"} {"signature": "def connect(self, host):", "body": "return False", "docstring": "this is a no-op for all but wc3270", "id": "f108:c7:m2"} {"signature": "def __del__(self):", "body": "self.terminate()", "docstring": "Since an emulator creates a process (and sometimes a socket handle), it is good practice\nto clean these up when done. Note, not terminating at this point will usually have no\nill effect - only Python 3+ on Windows had problems in this regard.", "id": "f108:c14:m1"} {"signature": "def string_get(self, ypos, xpos, length):", "body": "xpos -= ypos -= cmd = self.exec_command(\"\".format(ypos, xpos, length).encode(\"\"))assert len(cmd.data) == , cmd.datareturn cmd.data[].decode(\"\")", "docstring": "Get a string of `length` at screen co-ordinates `ypos`/`xpos`\n\nCo-ordinates are 1 based, as listed in the status area of the\nterminal.", "id": "f108:c14:m19"} {"signature": "def string_found(self, ypos, xpos, string):", "body": "found = self.string_get(ypos, xpos, len(string))log.debug(''.format(found))return found == string", "docstring": "Return True if `string` is found at screen co-ordinates\n`ypos`/`xpos`, False otherwise.\n\nCo-ordinates are 1 based, as listed in the status area of the\nterminal.", "id": "f108:c14:m20"} {"signature": "def move_to(self, ypos, xpos):", "body": "xpos -= ypos -= self.exec_command(\"\".format(ypos, xpos).encode(\"\"))", "docstring": "move the cursor to the given co-ordinates. Co-ordinates are 1\nbased, as listed in the status area of the terminal.", "id": "f108:c14:m9"} {"signature": "def reconnect(self):", "body": "self.exec_command(b\"\")self.connect(self.last_host)", "docstring": "Disconnect from the host and re-connect to the same host", "id": "f108:c14:m7"} {"signature": "def get_object(self):", "body": "dataframe = self.filter_dataframe(self.get_dataframe())assert self.lookup_url_kwarg in self.kwargs, ('''''' %(self.__class__.__name__, self.lookup_url_kwarg))try:obj = self.index_row(dataframe)except (IndexError, KeyError, ValueError):raise Http404self.check_object_permissions(self.request, obj)return obj", "docstring": "Returns the row the view is displaying.\n\nYou may want to override this if you need to provide non-standard\nqueryset lookups. Eg if objects are referenced using multiple\nkeyword arguments in the url conf.", "id": "f112:c0:m3"} {"signature": "def paginate_dataframe(self, dataframe):", "body": "if self.paginator is None:return Nonereturn self.paginator.paginate_dataframe(dataframe, self.request, view=self)", "docstring": "Return a single page of results, or `None` if pagination is disabled.", "id": "f112:c0:m9"} {"signature": "def get_serializer(self, *args, **kwargs):", "body": "serializer_class = self.get_serializer_class()kwargs[''] = self.get_serializer_context()return serializer_class(*args, **kwargs)", "docstring": "Return the serializer instance that should be used for validating and\ndeserializing input, and for serializing output.", "id": "f112:c0:m4"} {"signature": "def update_dataframe(self, dataframe):", "body": "return dataframe", "docstring": "Indicates that the dataframe needs to be updated. The default implementation\njust returns the argument. This method has to be ovewritten to make changing\noperations stick.", "id": "f112:c0:m1"} {"signature": "def _wrap_color(self, code, text, format=None, style=None):", "body": "color = Noneif code[:] == self.bg.PREFIX:color = self.bg.COLORS.get(code, None)if not color:color = self.fg.COLORS.get(code, None)if not color:raise Exception('')if format and format not in self.formats:raise Exception('')fmt = \"\"if format == '':fmt = \"\"elif format == '':fmt = \"\"parts = color.split('')color = ''.format(parts[], fmt, parts[])if self.has_colors and self.colors_enabled:st = ''if style:st = self.st.COLORS.get(style, '')return \"\".format(st, color, text, self.st.COLORS[''])else:return text", "docstring": "Colors text with code and given format", "id": "f119:c4:m4"} {"signature": "def __init__(self):", "body": "try:p = subprocess.Popen(['', ''],stdout=subprocess.PIPE,stderr=subprocess.PIPE)num_colors = int(p.stdout.read())except (OSError, ValueError):num_colors = self.has_colors = Falseif num_colors > :self.has_colors = Trueself.enable_colors()self.COLORS = self.enumerate_colors()", "docstring": "Checks if the shell supports colors", "id": "f119:c4:m0"} {"signature": "def enable_colors(self):", "body": "self.colors_enabled = True", "docstring": "Method to enable colors", "id": "f119:c4:m2"} {"signature": "def get(self,key: str,default: typing.Any = UNSET,type_: typing.Type[typing.Any] = str,subtype: typing.Type[typing.Any] = str,mapper: typing.Optional[typing.Callable[[object], object]] = None,) -> typing.Any:", "body": "value = self.environ.get(key, UNSET)if value is UNSET and default is UNSET:raise ConfigError(\"\".format(key))if value is UNSET:value = defaultelse:value = self.parse(typing.cast(str, value), type_, subtype)if mapper:value = mapper(value)return value", "docstring": "Parse a value from an environment variable.\n\n.. code-block:: python\n\n >>> os.environ['FOO']\n <<< '12345'\n >>>\n >>> os.environ['BAR']\n <<< '1,2,3,4'\n >>>\n >>> 'BAZ' in os.environ\n <<< False\n >>>\n >>> parser = Config()\n >>> parser.get('FOO', type_=int)\n <<< 12345\n >>>\n >>> parser.get('BAR', type_=list, subtype=int)\n <<< [1, 2, 3, 4]\n >>>\n >>> parser.get('BAZ', default='abc123')\n <<< 'abc123'\n >>>\n >>> parser.get('FOO', type_=int, mapper=lambda x: x*10)\n <<< 123450\n\n:param key: the key to look up the value under\n:param default: default value to return when when no value is present\n:param type\\\\_: the type to return\n:param subtype: subtype for iterator types\n:param mapper: a function to post-process the value with\n:return: the parsed config value", "id": "f127:c1:m3"} {"signature": "def unsign(self, signed_value, ttl=None):", "body": "h_size, d_size = struct.calcsize(''), self.digest.digest_sizefmt = '' % (len(signed_value) - h_size - d_size, d_size)try:version, timestamp, value, sig = struct.unpack(fmt, signed_value)except struct.error:raise BadSignature('')if version != self.version:raise BadSignature('')if ttl is not None:if isinstance(ttl, datetime.timedelta):ttl = ttl.total_seconds()age = abs(time.time() - timestamp)if age > ttl + _MAX_CLOCK_SKEW:raise SignatureExpired('' % (age,ttl))try:self.signature(signed_value[:-d_size]).verify(sig)except InvalidSignature:raise BadSignature('' % binascii.b2a_base64(sig))return value", "docstring": "Retrieve original value and check it wasn't signed more\nthan max_age seconds ago.\n\n:type signed_value: bytes\n:type ttl: int | datetime.timedelta", "id": "f144:c3:m3"} {"signature": "def signature(self, value):", "body": "h = HMAC(self.key, self.digest, backend=settings.CRYPTOGRAPHY_BACKEND)h.update(force_bytes(value))return h", "docstring": ":type value: any\n:rtype: HMAC", "id": "f144:c3:m1"} {"signature": "def get_main_version(version=None):", "body": "version = get_complete_version(version)parts = if version[] == else return ''.join(str(x) for x in version[:parts])", "docstring": "Returns main version (X.Y[.Z]) from VERSION.", "id": "f145:m1"} {"signature": "def get_git_changeset():", "body": "repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))git_log = subprocess.Popen('',stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True,cwd=repo_dir,universal_newlines=True)timestamp = git_log.communicate()[]try:timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))except ValueError:return Nonereturn timestamp.strftime('')", "docstring": "Returns a numeric identifier of the latest git changeset.\n\nThe result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.\nThis value isn't guaranteed to be unique, but collisions are very unlikely,\nso it's sufficient for generating the development version numbers.", "id": "f145:m4"} {"signature": "def decrypt(self, data, ttl=None):", "body": "data = self._signer.unsign(data, ttl)iv = data[:]ciphertext = data[:]decryptor = Cipher(algorithms.AES(self._encryption_key), modes.CBC(iv),self._backend).decryptor()plaintext_padded = decryptor.update(ciphertext)try:plaintext_padded += decryptor.finalize()except ValueError:raise InvalidTokenunpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()unpadded = unpadder.update(plaintext_padded)try:unpadded += unpadder.finalize()except ValueError:raise InvalidTokenreturn unpadded", "docstring": ":type data: bytes\n:type ttl: int\n:rtype: bytes", "id": "f146:c1:m3"} {"signature": "def salted_hmac(key_salt, value, secret=None):", "body": "if secret is None:secret = settings.SECRET_KEYkey_salt = force_bytes(key_salt)secret = force_bytes(secret)digest = hashes.Hash(settings.CRYPTOGRAPHY_DIGEST, backend=settings.CRYPTOGRAPHY_BACKEND)digest.update(key_salt + secret)key = digest.finalize()h = HMAC(key,settings.CRYPTOGRAPHY_DIGEST,backend=settings.CRYPTOGRAPHY_BACKEND)h.update(force_bytes(value))return h", "docstring": "Returns the HMAC-HASH of 'value', using a key generated from key_salt and a\nsecret (which defaults to settings.SECRET_KEY).\n\nA different key_salt should be passed in for every application of HMAC.\n\n:type key_salt: any\n:type value: any\n:type secret: any\n:rtype: HMAC", "id": "f146:m0"} {"signature": "def constant_time_compare(val1, val2):", "body": "return constant_time.bytes_eq(force_bytes(val1), force_bytes(val2))", "docstring": ":type val1: any\n:type val2: any\n:rtype: bool", "id": "f146:m1"} {"signature": "def pbkdf2(password, salt, iterations, dklen=, digest=None):", "body": "if digest is None:digest = settings.CRYPTOGRAPHY_DIGESTif not dklen:dklen = digest.digest_sizepassword = force_bytes(password)salt = force_bytes(salt)kdf = PBKDF2HMAC(algorithm=digest,length=dklen,salt=salt,iterations=iterations,backend=settings.CRYPTOGRAPHY_BACKEND)return kdf.derive(password)", "docstring": "Implements PBKDF2 with the same API as Django's existing\nimplementation, using cryptography.\n\n:type password: any\n:type salt: any\n:type iterations: int\n:type dklen: int\n:type digest: cryptography.hazmat.primitives.hashes.HashAlgorithm", "id": "f146:m2"} {"signature": "def encrypt(self, data):", "body": "data = force_bytes(data)iv = os.urandom()return self._encrypt_from_parts(data, iv)", "docstring": ":type data: any\n:rtype: any", "id": "f146:c1:m1"} {"signature": "def log_file(self, url=None):", "body": "if url is None:url = self.urlf = re.sub(\"\", \"\", url)try:with open(f, \"\") as of:of.write(str(self.store.get_json_tuples(True)))except IOError as e:print(e)print(\"\")", "docstring": "Write to a local log file", "id": "f155:c0:m3"} {"signature": "def register_credentials(self, credentials=None, user=None, user_file=None, password=None, password_file=None):", "body": "if credentials is not None:self.credentials = credentialselse:self.credentials = {}if user:self.credentials[\"\"] = userelif user_file:with open(user_file, \"\") as of:pattern = re.compile(\"\")for l in of:if re.match(pattern, l):l = l[:-]self.credentials[\"\"] = re.sub(pattern, \"\", l)if self.credentials[\"\"][:] == '' andself.credentials[\"\"][-:] == '':self.credentials[\"\"] = self.credentials[\"\"][:-]if password:self.credentials[\"\"] = passwordelif password_file:with open(password_file, \"\") as of:pattern = re.compile(\"\")for l in of:if re.match(pattern, l):l = l[:-]self.credentials[\"\"] =re.sub(pattern, \"\", l)if self.credentials[\"\"][:] == '' andself.credentials[\"\"][-:] == '':self.credentials[\"\"] =self.credentials[\"\"][:-]if \"\" in self.credentials and \"\" in self.credentials:c = self.credentials[\"\"] + \"\" + self.credentials[\"\"]self.credentials[\"\"] = b64encode(c.encode()).decode(\"\")", "docstring": "Helper method to store username and password", "id": "f155:c0:m6"} {"signature": "def log_post(self, url=None, credentials=None, do_verify_certificate=True):", "body": "if url is None:url = self.urlif credentials is None:credentials = self.credentialsif do_verify_certificate is None:do_verify_certificate = self.do_verify_certificateif credentials and \"\" in credentials:headers = {\"\": \"\",'': '' % credentials[\"\"]}else:headers = {\"\": \"\"}try:request = requests.post(url, headers=headers,data=self.store.get_json(), verify=do_verify_certificate)except httplib.IncompleteRead as e:request = e.partial", "docstring": "Write to a remote host via HTTP POST", "id": "f155:c0:m4"} {"signature": "def halt(self):", "body": "self.do_run = False", "docstring": "Tell the this object to stop working after the next round", "id": "f157:c0:m3"} {"signature": "def age(self):", "body": "if self.rounds == :self.do_run = Falseelif self.rounds > :self.rounds -= ", "docstring": "Get closer to your EOL", "id": "f157:c0:m1"} {"signature": "def __init__(self, device, baudrate, store, rounds=, timeout=):", "body": "threading.Thread.__init__(self)self.baudrate = baudrateself.store = storeself.rounds = roundsself.do_run = Trueself.device_name = devicetry:if device:self.device = serial.Serial(device, self.baudrate, timeout=timeout);except serial.serialutil.SerialException:print(\"\" + self.device_name)", "docstring": "Initialize the serial reader class\n device device name to connect to\n baudrate the baud rate for the serial line\n store the data store object to send the data to\n rounds number of rounds to run / listen for input", "id": "f157:c0:m0"} {"signature": "def sort_by_modified(files_or_folders: list) -> list:", "body": "return sorted(files_or_folders, key=os.path.getmtime, reverse=True)", "docstring": "Sort files or folders by modified time\n\nArgs:\n files_or_folders: list of files or folders\n\nReturns:\n list", "id": "f161:m5"} {"signature": "def abspath(cur_file, parent=) -> str:", "body": "file_path = os.path.abspath(cur_file).replace('', '')if os.path.isdir(file_path) and parent == : return file_pathadj = - os.path.isdir(file_path)return ''.join(file_path.split('')[:-(parent + adj)])", "docstring": "Absolute path\n\nArgs:\n cur_file: __file__ or file or path str\n parent: level of parent to look for\n\nReturns:\n str", "id": "f161:m1"} {"signature": "def all_files(path_name, keyword='', ext='', full_path=True,has_date=False, date_fmt=DATE_FMT) -> list:", "body": "if not os.path.exists(path=path_name): return []path_name = path_name.replace('', '')if keyword or ext:keyword = f'' if keyword else ''if not ext: ext = ''files = sort_by_modified([f.replace('', '') for f in glob.iglob(f'')if os.path.isfile(f) and (f.replace('', '').split('')[-][] != '')])else:files = sort_by_modified([f'' for f in os.listdir(path=path_name)if os.path.isfile(f'') and (f[] != '')])if has_date:files = filter_by_dates(files, date_fmt=date_fmt)return files if full_path else [f.split('')[-] for f in files]", "docstring": "Search all files with criteria\nReturned list will be sorted by last modified\n\nArgs:\n path_name: full path name\n keyword: keyword to search\n ext: file extensions, split by ','\n full_path: whether return full path (default True)\n has_date: whether has date in file name (default False)\n date_fmt: date format to check for has_date parameter\n\nReturns:\n list: all file names with criteria fulfilled", "id": "f161:m3"} {"signature": "def exists(path) -> bool:", "body": "return os.path.exists(path=path)", "docstring": "Check path or file exists (use os.path.exists)\n\nArgs:\n path: path or file", "id": "f161:m0"} {"signature": "def filter_by_dates(files_or_folders: list, date_fmt=DATE_FMT) -> list:", "body": "r = re.compile(f'')return list(filter(lambda vv: r.match(vv.replace('', '').split('')[-]) is not None,files_or_folders,))", "docstring": "Filter files or dates by date patterns\n\nArgs:\n files_or_folders: list of files or folders\n date_fmt: date format\n\nReturns:\n list", "id": "f161:m6"} {"signature": "def file_modified_time(file_name) -> pd.Timestamp:", "body": "return pd.to_datetime(time.ctime(os.path.getmtime(filename=file_name)))", "docstring": "File modified time in python\n\nArgs:\n file_name: file name\n\nReturns:\n pd.Timestamp", "id": "f161:m8"} {"signature": "def to_hour(num) -> str:", "body": "to_str = str(int(num))return pd.Timestamp(f'').strftime('')", "docstring": "Convert YAML input to hours\n\nArgs:\n num: number in YMAL file, e.g., 900, 1700, etc.\n\nReturns:\n str\n\nExamples:\n >>> to_hour(900)\n '09:00'\n >>> to_hour(1700)\n '17:00'", "id": "f162:m2"} {"signature": "def hist_file(ticker: str, dt, typ='') -> str:", "body": "data_path = os.environ.get(assist.BBG_ROOT, '').replace('', '')if not data_path: return ''asset = ticker.split()[-]proper_ticker = ticker.replace('', '')cur_dt = pd.Timestamp(dt).strftime('')return f''", "docstring": "Data file location for Bloomberg historical data\n\nArgs:\n ticker: ticker name\n dt: date\n typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK]\n\nReturns:\n file location\n\nExamples:\n >>> os.environ['BBG_ROOT'] = ''\n >>> hist_file(ticker='ES1 Index', dt='2018-08-01') == ''\n True\n >>> os.environ['BBG_ROOT'] = '/data/bbg'\n >>> hist_file(ticker='ES1 Index', dt='2018-08-01')\n '/data/bbg/Index/ES1 Index/TRADE/2018-08-01.parq'", "id": "f163:m0"} {"signature": "def get_logger(name_or_func, level=LOG_LEVEL, types='', **kwargs):", "body": "if isinstance(level, str): level = getattr(logging, level.upper())log_name = utils.func_scope(name_or_func) if callable(name_or_func) else name_or_funclogger = logging.getLogger(name=log_name)logger.setLevel(level=level)if not len(logger.handlers):formatter = logging.Formatter(fmt=kwargs.get('', LOG_FMT))if '' in types and '' in kwargs:file_handler = logging.FileHandler(kwargs[''])file_handler.setFormatter(fmt=formatter)logger.addHandler(file_handler)if '' in types:stream_handler = logging.StreamHandler()stream_handler.setFormatter(fmt=formatter)logger.addHandler(stream_handler)return logger", "docstring": "Generate logger\n\nArgs:\n name_or_func: logger name or current running function\n level: level of logs - debug, info, error\n types: file or stream, or both\n\nReturns:\n logger\n\nExamples:\n >>> get_logger(name_or_func='download_data', level='debug', types='stream')\n \n >>> get_logger(name_or_func='preprocess', log_file='pre.log', types='file|stream')\n ", "id": "f164:m0"} {"signature": "def format_output(data: pd.DataFrame, source, col_maps=None) -> pd.DataFrame:", "body": "if data.empty: return pd.DataFrame()if source == '': req_cols = ['', '', '']else: req_cols = ['', '', '', '', '']if any(col not in data for col in req_cols): return pd.DataFrame()if data.dropna(subset=['']).empty: return pd.DataFrame()if source == '':res = pd.DataFrame(pd.concat([pd.Series({**{'': t}, **grp.set_index('').value.to_dict()})for t, grp in data.groupby('')], axis=, sort=False)).transpose().set_index('')else:res = pd.DataFrame(pd.concat([grp.loc[:, ['', '']].set_index('').transpose().reset_index(drop=True).assign(ticker=t)for (t, _), grp in data.groupby(['', ''])], sort=False)).reset_index(drop=True).set_index('')res.columns.name = Noneif col_maps is None: col_maps = dict()return res.rename(columns=lambda vv: col_maps.get(vv, vv.lower().replace('', '').replace('', ''))).apply(pd.to_numeric, errors='', downcast='')", "docstring": "Format `pdblp` outputs to column-based results\n\nArgs:\n data: `pdblp` result\n source: `bdp` or `bds`\n col_maps: rename columns with these mappings\n\nReturns:\n pd.DataFrame\n\nExamples:\n >>> format_output(\n ... data=pd.read_pickle('xbbg/tests/data/sample_bdp.pkl'),\n ... source='bdp'\n ... ).reset_index()\n ticker name\n 0 QQQ US Equity INVESCO QQQ TRUST SERIES 1\n 1 SPY US Equity SPDR S&P 500 ETF TRUST\n >>> format_output(\n ... data=pd.read_pickle('xbbg/tests/data/sample_dvd.pkl'),\n ... source='bds', col_maps={'Dividend Frequency': 'dvd_freq'}\n ... ).loc[:, ['ex_date', 'dividend_amount', 'dvd_freq']].reset_index()\n ticker ex_date dividend_amount dvd_freq\n 0 C US Equity 2018-02-02 0.32 Quarter", "id": "f166:m3"} {"signature": "def proc_ovrds(**kwargs):", "body": "return [(k, v) for k, v in kwargs.items()if k not in list(ELEM_KEYS.keys()) + list(ELEM_KEYS.values()) + PRSV_COLS]", "docstring": "Bloomberg overrides\n\nArgs:\n **kwargs: overrides\n\nReturns:\n list of tuples\n\nExamples:\n >>> proc_ovrds(DVD_Start_Dt='20180101')\n [('DVD_Start_Dt', '20180101')]\n >>> proc_ovrds(DVD_Start_Dt='20180101', cache=True, has_date=True)\n [('DVD_Start_Dt', '20180101')]", "id": "f166:m0"} {"signature": "def proc_elms(**kwargs) -> list:", "body": "return [(ELEM_KEYS.get(k, k), ELEM_VALS.get(ELEM_KEYS.get(k, k), dict()).get(v, v))for k, v in kwargs.items()if (k in list(ELEM_KEYS.keys()) + list(ELEM_KEYS.values()))and (k not in PRSV_COLS)]", "docstring": "Bloomberg overrides for elements\n\nArgs:\n **kwargs: overrides\n\nReturns:\n list of tuples\n\nExamples:\n >>> proc_elms(PerAdj='A', Per='W')\n [('periodicityAdjustment', 'ACTUAL'), ('periodicitySelection', 'WEEKLY')]\n >>> proc_elms(Days='A', Fill='B')\n [('nonTradingDayFillOption', 'ALL_CALENDAR_DAYS'), ('nonTradingDayFillMethod', 'NIL_VALUE')]\n >>> proc_elms(CshAdjNormal=False, CshAdjAbnormal=True)\n [('adjustmentNormal', False), ('adjustmentAbnormal', True)]\n >>> proc_elms(Per='W', Quote='Average', start_date='2018-01-10')\n [('periodicitySelection', 'WEEKLY'), ('overrideOption', 'OVERRIDE_OPTION_GPA')]\n >>> proc_elms(QuoteType='Y')\n [('pricingOption', 'PRICING_OPTION_YIELD')]\n >>> proc_elms(QuoteType='Y', cache=True)\n [('pricingOption', 'PRICING_OPTION_YIELD')]", "id": "f166:m1"} {"signature": "def update_missing(**kwargs):", "body": "data_path = os.environ.get(BBG_ROOT, '').replace('', '')if not data_path: returnif len(kwargs) == : returnlog_path = f''cnt = len(files.all_files(log_path)) + files.create_folder(log_path)open(f'', '').close()", "docstring": "Update number of trials for missing values", "id": "f167:m2"} {"signature": "def with_bloomberg(func):", "body": "@wraps(func)def wrapper(*args, **kwargs):scope = utils.func_scope(func=func)param = inspect.signature(func).parametersport = kwargs.pop('', _PORT_)timeout = kwargs.pop('', _TIMEOUT_)restart = kwargs.pop('', False)all_kw = {k: args[n] if n < len(args) else v.defaultfor n, (k, v) in enumerate(param.items()) if k != ''}all_kw.update(kwargs)log_level = kwargs.get('', logs.LOG_LEVEL)for to_list in ['', '']:conv = all_kw.get(to_list, None)if hasattr(conv, ''):all_kw[to_list] = getattr(conv, '')()if isinstance(conv, str):all_kw[to_list] = [conv]cached_data = []if scope in ['', '']:to_qry = cached.bdp_bds_cache(func=func.__name__, **all_kw)cached_data += to_qry.cached_dataif not (to_qry.tickers and to_qry.flds):if not cached_data: return pd.DataFrame()res = pd.concat(cached_data, sort=False).reset_index(drop=True)if not all_kw.get('', False):res = assist.format_output(data=res, source=func.__name__,col_maps=all_kw.get('', dict()))return resall_kw[''] = to_qry.tickersall_kw[''] = to_qry.fldsif scope in ['']:data_file = storage.hist_file(ticker=all_kw[''], dt=all_kw[''], typ=all_kw[''],)if files.exists(data_file):logger = logs.get_logger(func, level=log_level)if all_kw.get('', False): returnlogger.debug(f'')return assist.format_intraday(data=pd.read_parquet(data_file), **all_kw)_, new = create_connection(port=port, timeout=timeout, restart=restart)res = func(**{k: v for k, v in all_kw.items() if k not in ['', '']})if new: delete_connection()if scope.startswith('') and isinstance(res, list):final = cached_data + resif not final: return pd.DataFrame()res = pd.DataFrame(pd.concat(final, sort=False))if (scope in ['', ''])and (not all_kw.get('', False)):res = assist.format_output(data=res.reset_index(drop=True), source=func.__name__,col_maps=all_kw.get('', dict()),)return resreturn wrapper", "docstring": "Wrapper function for Bloomberg connection\n\nArgs:\n func: function to wrap", "id": "f169:m0"} {"signature": "def load_module(full_path):", "body": "from importlib import utilfile_name = full_path.replace('', '').split('')[-]if file_name[-:] != '':raise ImportError(f'')module_name = file_name[:-]spec = util.spec_from_file_location(name=module_name, location=full_path)module = util.module_from_spec(spec=spec)spec.loader.exec_module(module=module)return module", "docstring": "Load module from full path\nArgs:\n full_path: module full path name\nReturns:\n python module\nReferences:\n https://stackoverflow.com/a/67692/1332656\nExamples:\n >>> import os\n >>>\n >>> cur_file = os.path.abspath(__file__).replace('\\\\\\\\', '/')\n >>> cur_path = '/'.join(cur_file.split('/')[:-1])\n >>> load_module(f'{cur_path}/timezone.py').__name__\n 'timezone'\n >>> load_module(f'{cur_path}/timezone.pyc')\n Traceback (most recent call last):\n ImportError: not a python file: timezone.pyc", "id": "f170:m7"} {"signature": "def func_scope(func) -> str:", "body": "cur_mod = sys.modules[func.__module__]return f''", "docstring": "Function scope name\n\nArgs:\n func: python function\n\nReturns:\n str: module_name.func_name\n\nExamples:\n >>> func_scope(flatten)\n 'xbbg.core.utils.flatten'\n >>> func_scope(time.strftime)\n 'time.strftime'", "id": "f170:m6"} {"signature": "def cur_time(typ='', tz=DEFAULT_TZ) -> (datetime.date, str):", "body": "dt = pd.Timestamp('', tz=tz)if typ == '': return dt.strftime('')if typ == '': return dt.strftime('')if typ == '': return dt.strftime('')if typ == '': return dtreturn dt.date()", "docstring": "Current time\n\nArgs:\n typ: one of ['date', 'time', 'time_path', 'raw', '']\n tz: timezone\n\nReturns:\n relevant current time or date\n\nExamples:\n >>> cur_dt = pd.Timestamp('now')\n >>> cur_time(typ='date') == cur_dt.strftime('%Y-%m-%d')\n True\n >>> cur_time(typ='time') == cur_dt.strftime('%Y-%m-%d %H:%M:%S')\n True\n >>> cur_time(typ='time_path') == cur_dt.strftime('%Y-%m-%d/%H-%M-%S')\n True\n >>> isinstance(cur_time(typ='raw', tz='Europe/London'), pd.Timestamp)\n True\n >>> cur_time(typ='') == cur_dt.date()\n True", "id": "f170:m3"} {"signature": "def _to_gen_(iterable):", "body": "from collections import Iterablefor elm in iterable:if isinstance(elm, Iterable) and not isinstance(elm, (str, bytes)):yield from flatten(elm)else: yield elm", "docstring": "Recursively iterate lists and tuples", "id": "f170:m1"} {"signature": "def market_exact(self, session, start_time: str, end_time: str) -> Session:", "body": "if session not in self.exch: return SessNAss = self.exch[session]same_day = ss[] < ss[-]if not start_time: s_time = ss[]else:s_time = param.to_hour(start_time)if same_day: s_time = max(s_time, ss[])if not end_time: e_time = ss[-]else:e_time = param.to_hour(end_time)if same_day: e_time = min(e_time, ss[-])if same_day and (s_time > e_time): return SessNAreturn Session(start_time=s_time, end_time=e_time)", "docstring": "Explicitly specify start time and end time\n\nArgs:\n session: predefined session\n start_time: start time in terms of HHMM string\n end_time: end time in terms of HHMM string\n\nReturns:\n Session of start_time and end_time", "id": "f171:c0:m4"} {"signature": "def __init__(self, ticker):", "body": "self.ticker = tickerself.exch = const.exch_info(ticker=ticker)", "docstring": "Args:\n ticker: ticker", "id": "f171:c0:m0"} {"signature": "def market_timing(ticker, dt, timing='', tz='') -> str:", "body": "logger = logs.get_logger(market_timing)exch = pd.Series(exch_info(ticker=ticker))if any(req not in exch.index for req in ['', '', '']):logger.error(f'')return ''mkt_time = {'': exch.day[], '': exch.allday[-]}.get(timing, exch.day[-])cur_dt = pd.Timestamp(str(dt)).strftime('')if tz == '':return f''return timezone.tz_convert(f'', to_tz=tz, from_tz=exch.tz)", "docstring": "Market close time for ticker\n\nArgs:\n ticker: ticker name\n dt: date\n timing: [EOD (default), BOD]\n tz: conversion to timezone\n\nReturns:\n str: date & time\n\nExamples:\n >>> market_timing('7267 JT Equity', dt='2018-09-10')\n '2018-09-10 14:58'\n >>> market_timing('7267 JT Equity', dt='2018-09-10', tz=timezone.TimeZone.NY)\n '2018-09-10 01:58:00-04:00'\n >>> market_timing('7267 JT Equity', dt='2018-01-10', tz='NY')\n '2018-01-10 00:58:00-05:00'\n >>> market_timing('7267 JT Equity', dt='2018-09-10', tz='SPX Index')\n '2018-09-10 01:58:00-04:00'\n >>> market_timing('8035 JT Equity', dt='2018-09-10', timing='BOD')\n '2018-09-10 09:01'\n >>> market_timing('Z 1 Index', dt='2018-09-10', timing='FINISHED')\n '2018-09-10 21:00'\n >>> market_timing('TESTTICKER Corp', dt='2018-09-10')\n ''", "id": "f172:m3"} {"signature": "def ccy_pair(local, base='') -> CurrencyPair:", "body": "ccy_param = param.load_info(cat='')if f'' in ccy_param:info = ccy_param[f'']elif f'' in ccy_param:info = ccy_param[f'']info[''] = / info.get('', )info[''] = -info.get('', )elif base.lower() == local.lower():info = dict(ticker='')info[''] = if base[-].lower() == base[-]:info[''] /= if local[-].lower() == local[-]:info[''] *= else:logger = logs.get_logger(ccy_pair)logger.error(f'')return CurrencyPair(ticker='', factor=, power=)if '' not in info: info[''] = if '' not in info: info[''] = return CurrencyPair(**info)", "docstring": "Currency pair info\n\nArgs:\n local: local currency\n base: base currency\n\nReturns:\n CurrencyPair\n\nExamples:\n >>> ccy_pair(local='HKD', base='USD')\n CurrencyPair(ticker='HKD Curncy', factor=1.0, power=1)\n >>> ccy_pair(local='GBp')\n CurrencyPair(ticker='GBP Curncy', factor=100, power=-1)\n >>> ccy_pair(local='USD', base='GBp')\n CurrencyPair(ticker='GBP Curncy', factor=0.01, power=1)\n >>> ccy_pair(local='XYZ', base='USD')\n CurrencyPair(ticker='', factor=1.0, power=1)\n >>> ccy_pair(local='GBP', base='GBp')\n CurrencyPair(ticker='', factor=0.01, power=1)\n >>> ccy_pair(local='GBp', base='GBP')\n CurrencyPair(ticker='', factor=100.0, power=1)", "id": "f172:m2"} {"signature": "@with_bloombergdef active_futures(ticker: str, dt) -> str:", "body": "t_info = ticker.split()prefix, asset = ''.join(t_info[:-]), t_info[-]info = const.market_info(f'')f1, f2 = f'', f''fut_2 = fut_ticker(gen_ticker=f2, dt=dt, freq=info[''])fut_1 = fut_ticker(gen_ticker=f1, dt=dt, freq=info[''])fut_tk = bdp(tickers=[fut_1, fut_2], flds='', cache=True)if pd.Timestamp(dt).month < pd.Timestamp(fut_tk.last_tradeable_dt[]).month: return fut_1d1 = bdib(ticker=f1, dt=dt)d2 = bdib(ticker=f2, dt=dt)return fut_1 if d1[f1].volume.sum() > d2[f2].volume.sum() else fut_2", "docstring": "Active futures contract\n\nArgs:\n ticker: futures ticker, i.e., ESA Index, Z A Index, CLA Comdty, etc.\n dt: date\n\nReturns:\n str: ticker name", "id": "f174:m7"} {"signature": "@with_bloombergdef bdib(ticker, dt, typ='', **kwargs) -> pd.DataFrame:", "body": "from xbbg.core import missinglogger = logs.get_logger(bdib, level=kwargs.pop('', logs.LOG_LEVEL))t_1 = pd.Timestamp('').date() - pd.Timedelta('')whole_day = pd.Timestamp(dt).date() < t_1batch = kwargs.pop('', False)if (not whole_day) and batch:logger.warning(f'')return pd.DataFrame()cur_dt = pd.Timestamp(dt).strftime('')asset = ticker.split()[-]info_log = f''if asset in ['', '', '', '']:exch = const.exch_info(ticker=ticker)if exch.empty: return pd.DataFrame()else:logger.error(f'')return pd.DataFrame()time_fmt = ''time_idx = pd.DatetimeIndex([f'', f'']).tz_localize(exch.tz).tz_convert(DEFAULT_TZ).tz_convert('')if time_idx[] > time_idx[]: time_idx -= pd.TimedeltaIndex(['', ''])q_tckr = tickerif exch.get('', False):if '' not in exch:logger.error(f'')is_sprd = exch.get('', False) and (len(ticker[:-]) != exch[''][])if not is_sprd:q_tckr = fut_ticker(gen_ticker=ticker, dt=dt, freq=exch[''])if q_tckr == '':logger.error(f'')return pd.DataFrame()info_log = f''miss_kw = dict(ticker=ticker, dt=dt, typ=typ, func='')cur_miss = missing.current_missing(**miss_kw)if cur_miss >= :if batch: return pd.DataFrame()logger.info(f'')return pd.DataFrame()logger.info(f'')con, _ = create_connection()try:data = con.bdib(ticker=q_tckr, event_type=typ, interval=,start_datetime=time_idx[].strftime(time_fmt),end_datetime=time_idx[].strftime(time_fmt),)except KeyError:data = pd.DataFrame()if not isinstance(data, pd.DataFrame):raise ValueError(f'')if data.empty:logger.warning(f'')missing.update_missing(**miss_kw)return pd.DataFrame()data = data.tz_localize('').tz_convert(exch.tz)storage.save_intraday(data=data, ticker=ticker, dt=dt, typ=typ)return pd.DataFrame() if batch else assist.format_intraday(data=data, ticker=ticker)", "docstring": "Bloomberg intraday bar data\n\nArgs:\n ticker: ticker name\n dt: date to download\n typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK]\n **kwargs:\n batch: whether is batch process to download data\n log: level of logs\n\nReturns:\n pd.DataFrame", "id": "f174:m3"} {"signature": "@with_bloombergdef check_hours(tickers, tz_exch, tz_loc=DEFAULT_TZ) -> pd.DataFrame:", "body": "cols = ['', '']con, _ = create_connection()hours = con.ref(tickers=tickers, flds=cols)cur_dt = pd.Timestamp('').strftime('')hours.loc[:, ''] = hours.value.astype(str).str[:-]hours.loc[:, ''] = pd.DatetimeIndex(cur_dt + hours.value.astype(str)).tz_localize(tz_loc).tz_convert(tz_exch).strftime('')hours = pd.concat([hours.set_index(['', '']).exch.unstack().loc[:, cols],hours.set_index(['', '']).local.unstack().loc[:, cols],], axis=)hours.columns = ['', '', '', '']return hours", "docstring": "Check exchange hours vs local hours\n\nArgs:\n tickers: list of tickers\n tz_exch: exchange timezone\n tz_loc: local timezone\n\nReturns:\n Local and exchange hours", "id": "f174:m9"} {"signature": "@with_bloombergdef bds(tickers, flds, **kwargs):", "body": "logger = logs.get_logger(bds, level=kwargs.pop('', logs.LOG_LEVEL))con, _ = create_connection()ovrds = assist.proc_ovrds(**kwargs)logger.info(f''f'')data = con.bulkref(tickers=tickers, flds=flds, ovrds=ovrds)if not kwargs.get('', False): return [data]qry_data = []for (ticker, fld), grp in data.groupby(['', '']):data_file = storage.ref_file(ticker=ticker, fld=fld, ext='',has_date=kwargs.get('', True), **kwargs)if data_file:if not files.exists(data_file): qry_data.append(grp)files.create_folder(data_file, is_file=True)grp.reset_index(drop=True).to_pickle(data_file)return qry_data", "docstring": "Bloomberg block data\n\nArgs:\n tickers: ticker(s)\n flds: field(s)\n **kwargs: other overrides for query\n -> raw: raw output from `pdbdp` library, default False\n\nReturns:\n pd.DataFrame: block data\n\nExamples:\n >>> import os\n >>>\n >>> pd.options.display.width = 120\n >>> s_dt, e_dt = '20180301', '20181031'\n >>> dvd = bds(\n ... 'NVDA US Equity', 'DVD_Hist_All',\n ... DVD_Start_Dt=s_dt, DVD_End_Dt=e_dt, raw=True,\n ... )\n >>> dvd.loc[:, ['ticker', 'name', 'value']].head(8)\n ticker name value\n 0 NVDA US Equity Declared Date 2018-08-16\n 1 NVDA US Equity Ex-Date 2018-08-29\n 2 NVDA US Equity Record Date 2018-08-30\n 3 NVDA US Equity Payable Date 2018-09-21\n 4 NVDA US Equity Dividend Amount 0.15\n 5 NVDA US Equity Dividend Frequency Quarter\n 6 NVDA US Equity Dividend Type Regular Cash\n 7 NVDA US Equity Declared Date 2018-05-10\n >>> dvd = bds(\n ... 'NVDA US Equity', 'DVD_Hist_All',\n ... DVD_Start_Dt=s_dt, DVD_End_Dt=e_dt,\n ... )\n >>> dvd.reset_index().loc[:, ['ticker', 'ex_date', 'dividend_amount']]\n ticker ex_date dividend_amount\n 0 NVDA US Equity 2018-08-29 0.15\n 1 NVDA US Equity 2018-05-23 0.15\n >>> if not os.environ.get('BBG_ROOT', ''):\n ... os.environ['BBG_ROOT'] = f'{files.abspath(__file__, 1)}/tests/data'\n >>> idx_kw = dict(End_Dt='20181220', cache=True)\n >>> idx_wt = bds('DJI Index', 'Indx_MWeight_Hist', **idx_kw)\n >>> idx_wt.round(2).tail().reset_index(drop=True)\n index_member percent_weight\n 0 V UN 3.82\n 1 VZ UN 1.63\n 2 WBA UW 2.06\n 3 WMT UN 2.59\n 4 XOM UN 2.04\n >>> idx_wt = bds('DJI Index', 'Indx_MWeight_Hist', **idx_kw)\n >>> idx_wt.round(2).head().reset_index(drop=True)\n index_member percent_weight\n 0 AAPL UW 4.65\n 1 AXP UN 2.84\n 2 BA UN 9.29\n 3 CAT UN 3.61\n 4 CSCO UW 1.26", "id": "f174:m1"} {"signature": "def parse_version(package):", "body": "init_file = f''with open(init_file, '', encoding='') as f:for line in f.readlines():if '' in line:return line.split('')[].strip()[:-]return ''", "docstring": "Parse versions", "id": "f175:m0"} {"signature": "def parse_markdown():", "body": "readme_file = f''if path.exists(readme_file):with open(readme_file, '', encoding='') as f:long_description = f.read()return long_description", "docstring": "Parse markdown as description", "id": "f175:m1"} {"signature": "def import_submodules(package, recursive=True):", "body": "if isinstance(package, str):package = importlib.import_module(package)results = {}for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):full_name = package.__name__ + '' + nameresults[full_name] = importlib.import_module(full_name)if recursive and is_pkg:results.update(import_submodules(full_name))return results", "docstring": "Import all submodules of a module, recursively, including subpackages\n\n :param package: package (name or actual module)\n :type package: str | module\n :rtype: dict[str, types.ModuleType]", "id": "f183:m1"} {"signature": "@staticmethoddef parse(config):", "body": "if not isinstance(config, basestring):raise TypeError(\"\")validator = ContainsValidator()validator.contains_string = configreturn validator", "docstring": "Parse a contains validator, which takes as the config a simple string to find", "id": "f213:c0:m1"} {"signature": "def bind_variable(self, variable_name, variable_value):", "body": "str_name = str(variable_name)prev = self.variables.get(str_name)if prev != variable_value:self.variables[str(variable_name)] = variable_valueself.mod_count = self.mod_count + ", "docstring": "Bind a named variable to a value within the context\n This allows for passing in variables in testing", "id": "f215:c0:m0"} {"signature": "def tearDown(self):", "body": "self.server_process.terminate()self.server_process = None", "docstring": "Stop the server process", "id": "f219:c0:m1"} {"signature": "def setUp(self):", "body": "config_args = ('', os.path.join(djangopath, ''))proc = Process(target=call_command, args=config_args)proc.start()self.server_process = proctime.sleep()", "docstring": "Start a mini Django-tastypie REST webapp with test data for testing REST tests", "id": "f219:c0:m0"} {"signature": "def realize_partial(self, context=None):", "body": "if not self.is_dynamic():return selfif self.is_context_modifier():return selfelse:copyout = copy.coppass", "docstring": "Attempt to template out what is possible for this benchmark", "id": "f223:m2"} {"signature": "def main(args):", "body": "if '' in args and args[''] is not None:logger.setLevel(LOGGING_LEVELS.get(args[''].lower(), logging.NOTSET))if '' in args and args['']:extensions = args[''].split('')working_folder = args['']if working_folder not in sys.path:sys.path.insert(, working_folder)register_extensions(extensions)test_file = args['']test_structure = read_test_file(test_file)my_vars = Noneif '' in args and args[''] is not None:my_vars = yaml.safe_load(args[''])if my_vars and not isinstance(my_vars, dict):raise Exception(\"\")base_url = args['']if '' in args and args['']:base_url = ''tests = parse_testsets(base_url, test_structure,working_directory=os.path.dirname(test_file), vars=my_vars)for t in tests:if '' in args and args[''] is not None and bool(args['']):t.config.print_bodies = safe_to_bool(args[''])if '' in args and args[''] is not None and bool(args['']):t.config.print_headers = safe_to_bool(args[''])if '' in args and args[''] is not None:t.config.interactive = safe_to_bool(args[''])if '' in args and args[''] is not None:t.config.verbose = safe_to_bool(args[''])if '' in args and args[''] is not None:t.config.ssl_insecure = safe_to_bool(args[''])if '' in args and args[''] is not None:t.config.skip_term_colors = safe_to_bool(args[''])failures = run_testsets(tests)sys.exit(failures)", "docstring": "Execute a test against the given base url.\n\nKeys allowed for args:\n url - REQUIRED - Base URL\n test - REQUIRED - Test file (yaml)\n print_bodies - OPTIONAL - print response body\n print_headers - OPTIONAL - print response headers\n log - OPTIONAL - set logging level {debug,info,warning,error,critical} (default=warning)\n interactive - OPTIONAL - mode that prints info before and after test exectuion and pauses for user input for each test\n absolute_urls - OPTIONAL - mode that treats URLs in tests as absolute/full URLs instead of relative URLs\n skip_term_colors - OPTIONAL - mode that turn off the output term colors", "id": "f231:m14"} {"signature": "def analyze_benchmark_results(benchmark_result, benchmark):", "body": "output = BenchmarkResult()output.name = benchmark_result.nameoutput.group = benchmark_result.groupoutput.failures = benchmark_result.failuresraw_results = benchmark_result.resultstemp = dict()for metric in benchmark.raw_metrics:temp[metric] = raw_results[metric]output.results = tempaggregate_results = list()for metricname, aggregate_list in benchmark.aggregated_metrics.items():numbers = raw_results[metricname]for aggregate_name in aggregate_list:if numbers: aggregate_function = AGGREGATES[aggregate_name]aggregate_results.append((metricname, aggregate_name, aggregate_function(numbers)))else:aggregate_results.append((metricname, aggregate_name, None))output.aggregates = aggregate_resultsreturn output", "docstring": "Take a benchmark result containing raw benchmark results, and do aggregation by\n applying functions\n\n Aggregates come out in format of metricname, aggregate_name, result", "id": "f231:m7"} {"signature": "def parse_configuration(node, base_config=None):", "body": "test_config = base_configif not test_config:test_config = TestConfig()node = lowercase_keys(flatten_dictionaries(node)) for key, value in node.items():if key == u'':test_config.timeout = int(value)elif key == u'':test_config.print_bodies = safe_to_bool(value)elif key == u'':test_config.retries = int(value)elif key == u'':if not test_config.variable_binds:test_config.variable_binds = dict()test_config.variable_binds.update(flatten_dictionaries(value))elif key == u'':flat = flatten_dictionaries(value)gen_map = dict()for generator_name, generator_config in flat.items():gen = parse_generator(generator_config)gen_map[str(generator_name)] = gentest_config.generators = gen_mapreturn test_config", "docstring": "Parse input config to configuration information", "id": "f231:m3"} {"signature": "def read_file(path):", "body": "with open(path, \"\") as f:string = f.read()f.close()return string", "docstring": "Read an input into a file, doing necessary conversions around relative path handling", "id": "f231:m4"} {"signature": "def parse_headers(header_string):", "body": "if not header_string:return list()request, headers = header_string.split('', )if not headers:return list()if sys.version_info < (,):header_msg = message_from_string(headers.encode(HEADER_ENCODING))return [(text_type(k.lower(), HEADER_ENCODING), text_type(v, HEADER_ENCODING))for k, v in header_msg.items()]else:header_msg = message_from_string(headers)return [(k.lower(), v) for k, v in header_msg.items()]", "docstring": "Parse a header-string into individual headers\n Implementation based on: http://stackoverflow.com/a/5955949/95122\n Note that headers are a list of (key, value) since duplicate headers are allowed\n\n NEW NOTE: keys & values are unicode strings, but can only contain ISO-8859-1 characters", "id": "f231:m1"} {"signature": "def get_readable_config(self, context=None):", "body": "string_frags = list()string_frags.append(\"\" + self.extractor.get_readable_config(context=context))if isinstance(self.expected, AbstractExtractor):string_frags.append(\"\" +self.expected.get_readable_config(context=context))elif self.isTemplateExpected:string_frags.append(''.format(self.expected))return os.linesep.join(string_frags)", "docstring": "Get a human-readable config string", "id": "f232:c6:m0"} {"signature": "def safe_length(var):", "body": "output = -try:output = len(var)except:passreturn output", "docstring": "Exception-safe length check, returns -1 if no length on type or error", "id": "f232:m1"} {"signature": "@staticmethoddef parse(config):", "body": "output = ComparatorValidator()config = parsing.lowercase_keys(parsing.flatten_dictionaries(config))output.config = configoutput.extractor = _get_extractor(config)if output.extractor is None:raise ValueError(\"\")if '' not in config: output.comparator_name = ''else:output.comparator_name = config[''].lower()output.comparator = COMPARATORS[output.comparator_name]if not output.comparator:raise ValueError(\"\")try:expected = config['']except KeyError:raise ValueError(\"\")if isinstance(expected, basestring) or isinstance(expected, (int, long, float, complex)):output.expected = expectedelif isinstance(expected, dict):expected = parsing.lowercase_keys(expected)template = expected.get('')if template: if not isinstance(template, basestring):raise ValueError(\"\")output.isTemplateExpected = Trueoutput.expected = templateelse: output.expected = _get_extractor(expected)if not output.expected:raise ValueError(\"\")return output", "docstring": "Create a validator that does an extract from body and applies a comparator,\n Then does comparison vs expected value\n Syntax sample:\n { jsonpath_mini: 'node.child',\n operator: 'eq',\n expected: 'myValue'\n }", "id": "f232:c6:m2"} {"signature": "def register_validator(name, parse_function):", "body": "name = name.lower()if name in VALIDATORS:raise Exception(\"\".format(name))VALIDATORS[name] = parse_function", "docstring": "Registers a validator for use by this library\n Name is the string name for validator\n\n Parse function does parse(config_node) and returns a Validator object\n Validator functions have signature:\n validate(response_body, context=None) - context is a bindings.Context object\n\n Validators return true or false and optionally can return a Failure instead of false\n This allows for passing more details", "id": "f232:m6"} {"signature": "def _get_extractor(config_dict):", "body": "extractor = Noneextract_config = Nonefor key, value in config_dict.items():if key in EXTRACTORS:return parse_extractor(key, value)else: raise Exception(''.format(config_dict))", "docstring": "Utility function, get an extract function for a single valid extractor name in config\n and error if more than one or none", "id": "f232:m3"} {"signature": "def add_move(move):", "body": "setattr(_MovedItems, move.name, move)", "docstring": "Add an item to six.moves.", "id": "f235:m2"} {"signature": "def python_2_unicode_compatible(klass):", "body": "if PY2:if '' not in klass.__dict__:raise ValueError(\"\"\"\" %klass.__name__)klass.__unicode__ = klass.__str__klass.__str__ = lambda self: self.__unicode__().encode('')return klass", "docstring": "A decorator that defines __unicode__ and __str__ methods under Python 2.\nUnder Python 3 it does nothing.\n\nTo support Python 2 and 3 with a single code base, define a __str__ method\nreturning text and apply this decorator to the class.", "id": "f235:m9"} {"signature": "def get_code(self, fullname):", "body": "self.__get_module(fullname) return None", "docstring": "Return None\n\n Required, if is_package is implemented", "id": "f235:c4:m7"} {"signature": "def remove_move(name):", "body": "try:delattr(_MovedItems, name)except AttributeError:try:del moves.__dict__[name]except KeyError:raise AttributeError(\"\" % (name,))", "docstring": "Remove item from six.moves.", "id": "f235:m3"} {"signature": "def with_metaclass(meta, *bases):", "body": "class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, '', (), {})", "docstring": "Create a base class with a metaclass.", "id": "f235:m7"} {"signature": "def setup(self, input, is_file=False, is_template_path=False, is_template_content=False):", "body": "if not isinstance(input, basestring):raise TypeError(\"\")if is_file:input = os.path.abspath(input)self.content = inputself.is_file = is_fileself.is_template_path = is_template_pathself.is_template_content = is_template_content", "docstring": "Self explanatory, input is inline content or file path.", "id": "f239:c0:m3"} {"signature": "def get_content(self, context=None):", "body": "if self.is_file:path = self.contentif self.is_template_path and context:path = string.Template(path).safe_substitute(context.get_values())data = Nonewith open(path, '') as f:data = f.read()if self.is_template_content and context:return string.Template(data).safe_substitute(context.get_values())else:return dataelse:if self.is_template_content and context:return safe_substitute_unicode_template(self.content, context.get_values())else:return self.content", "docstring": "Does all context binding and pathing to get content, templated out", "id": "f239:c0:m1"} {"signature": "def create_noread_version(self):", "body": "if not self.is_file or self.is_template_path:return selfoutput = ContentHandler()output.is_template_content = self.is_template_contentwith open(self.content, '') as f:output.content = f.read()return output", "docstring": "Read file content if it is static and return content handler with no I/O", "id": "f239:c0:m2"} {"signature": "def factory_fixed_sequence(values):", "body": "def seq_generator():my_list = list(values)i = while(True):yield my_list[i]if i == len(my_list):i = return seq_generator", "docstring": "Return a generator that runs through a list of values in order, looping after end", "id": "f240:m4"} {"signature": "def generator_random_int32():", "body": "rand = random.Random()while (True):yield random.randint(, INT32_MAX_VALUE)", "docstring": "Random integer generator for up to 32-bit signed ints", "id": "f240:m2"} {"signature": "def parse_generator(configuration):", "body": "configuration = lowercase_keys(flatten_dictionaries(configuration))gen_type = str(configuration.get(u'')).lower()if gen_type not in GENERATOR_TYPES:raise ValueError(''.format(gen_type))if gen_type == u'':return factory_env_variable(configuration[u''])()elif gen_type == u'':return factory_env_string(configuration[u''])()elif gen_type == u'':start = configuration.get('')increment = configuration.get('')if not start:start = else:start = int(start)if not increment:increment = else:increment = int(increment)return factory_generate_ids(start, increment)()elif gen_type == u'':return generator_random_int32()elif gen_type == u'':return parse_random_text_generator(configuration)elif gen_type in GENERATOR_TYPES:return GENERATOR_PARSING[gen_type](configuration)else:raise Exception(\"\".format(''))", "docstring": "Parses a configuration built from yaml and returns a generator\n Configuration should be a map", "id": "f240:m12"} {"signature": "def parse_fixed_sequence(config):", "body": "vals = config['']if not vals:raise ValueError('')if not isinstance(vals, list):raise ValueError('')return factory_fixed_sequence(vals)()", "docstring": "Parse fixed sequence string", "id": "f240:m5"} {"signature": "def factory_generate_ids(starting_id=, increment=):", "body": "def generate_started_ids():val = starting_idlocal_increment = incrementwhile(True):yield valval += local_incrementreturn generate_started_ids", "docstring": "Return function generator for ids starting at starting_id\n Note: needs to be called with () to make generator", "id": "f240:m0"} {"signature": "def factory_choice_generator(values):", "body": "def choice_generator():my_list = list(values)rand = random.Random()while(True):yield random.choice(my_list)return choice_generator", "docstring": "Return a generator that picks values from a list randomly", "id": "f240:m6"} {"signature": "def run_configure(self, key, value, configurable, validator_func=None, converter_func=None, store_func=None, *args, **kwargs):", "body": "if validator_func and not validator(value):raise TypeError(\"\".format(value))storeable = valueif converter_func:storeable = converter_func(value)if store_func:store_func(configurable, key, storeable)else:configurable.setattr(configurable, key, value)", "docstring": "Run a single configuration element\n Run a validator on the value, if supplied\n Run a converter_funct to turn the value into something to storeable:\n converter_func takes params (value) at least and throws exception if failed\n If a store_func is supplied, use that to store the option\n store_func needs to take params (object, key, value, args, kwargs)\n If store_func NOT supplied we do a setattr on object", "id": "f241:c0:m0"} {"signature": "def encode_unicode_bytes(my_string):", "body": "if not isinstance(my_string, basestring):my_string = repr(my_string)if PYTHON_MAJOR_VERSION == :if isinstance(my_string, str):return my_stringelif isinstance(my_string, unicode):return my_string.encode('')else:if isinstance(my_string, str):return my_string.encode('')elif isinstance(my_string, bytes):return my_string", "docstring": "Shim function, converts Unicode to UTF-8 encoded bytes regardless of the source format\n Intended for python 3 compatibility mode, and b/c PyCurl only takes raw bytes", "id": "f241:m0"} {"signature": "def safe_to_json(in_obj):", "body": "if isinstance(in_obj, bytearray):return str(in_obj)if hasattr(in_obj, ''):return in_obj.__dict__try:return str(in_obj)except:return repr(in_obj)", "docstring": "Safely get dict from object if present for json dumping", "id": "f241:m2"} {"signature": "def configure(self, configs, configurable, handler, *args, **kwargs):", "body": "for key, value in configs.items():handler[key] = config_optionsself.run_configure(value, configurable)", "docstring": "Use the configs and configurable to parse", "id": "f241:c0:m1"} {"signature": "def is_context_modifier(self):", "body": "return self.variable_binds or self.generator_binds or self.extract_binds", "docstring": "Returns true if context can be modified by this test\n (disallows caching of templated test bodies)", "id": "f242:c0:m13"} {"signature": "def configure_curl(self, timeout=DEFAULT_TIMEOUT, context=None, curl_handle=None):", "body": "if curl_handle:curl = curl_handletry: curl.getinfo(curl.HTTP_CODE) curl.reset()curl.setopt(curl.COOKIELIST, \"\")except pycurl.error:curl = pycurl.Curl()else:curl = pycurl.Curl()curl.setopt(curl.URL, str(self.url))curl.setopt(curl.TIMEOUT, timeout)is_unicoded = Falsebod = self.bodyif isinstance(bod, text_type): bod = bod.encode('')is_unicoded = Trueif bod and len(bod) > :curl.setopt(curl.READFUNCTION, MyIO(bod).read)if self.auth_username and self.auth_password:curl.setopt(pycurl.USERPWD, parsing.encode_unicode_bytes(self.auth_username) + b'' + parsing.encode_unicode_bytes(self.auth_password))if self.auth_type:curl.setopt(pycurl.HTTPAUTH, self.auth_type)if self.method == u'':curl.setopt(HTTP_METHODS[u''], )if bod is not None:curl.setopt(pycurl.POSTFIELDSIZE, len(bod))else:curl.setopt(pycurl.POSTFIELDSIZE, )elif self.method == u'':curl.setopt(HTTP_METHODS[u''], )if bod is not None:curl.setopt(pycurl.INFILESIZE, len(bod))else:curl.setopt(pycurl.INFILESIZE, )elif self.method == u'':curl.setopt(curl.POSTFIELDS, bod)curl.setopt(curl.CUSTOMREQUEST, '')if bod is not None:curl.setopt(pycurl.INFILESIZE, len(bod))else:curl.setopt(pycurl.INFILESIZE, )elif self.method == u'':curl.setopt(curl.CUSTOMREQUEST, '')if bod is not None:curl.setopt(pycurl.POSTFIELDS, bod)curl.setopt(pycurl.POSTFIELDSIZE, len(bod))elif self.method == u'':curl.setopt(curl.NOBODY, )curl.setopt(curl.CUSTOMREQUEST, '')elif self.method and self.method.upper() != '': curl.setopt(curl.CUSTOMREQUEST, self.method.upper())if bod is not None:curl.setopt(pycurl.POSTFIELDS, bod)curl.setopt(pycurl.POSTFIELDSIZE, len(bod))head = self.get_headers(context=context)head = copy.copy(head) if is_unicoded and u'' in head.keys():content = head[u'']if u'' not in content:head[u''] = content + u''if head:headers = [str(headername) + '' + str(headervalue)for headername, headervalue in head.items()]else:headers = list()headers.append(\"\")headers.append(\"\")curl.setopt(curl.HTTPHEADER, headers)if self.curl_options:filterfunc = lambda x: x[] is not None and x[] is not None for (key, value) in ifilter(filterfunc, self.curl_options.items()):curl.setopt(getattr(curl, key), value)return curl", "docstring": "Create and mostly configure a curl object for test, reusing existing if possible", "id": "f242:c0:m19"} {"signature": "def realize(self, context=None):", "body": "if not self.is_dynamic() or context is None:return selfelse:selfcopy = self.ninja_copy()selfcopy.templates = Noneif isinstance(self._body, ContentHandler):selfcopy._body = self._body.get_content(context)selfcopy._url = self.get_url(context=context)selfcopy._headers = self.get_headers(context=context)return selfcopy", "docstring": "Return a fully-templated test object, for configuring curl\n Warning: this is a SHALLOW copy, mutation of fields will cause problems!\n Can accept a None context", "id": "f242:c0:m15"} {"signature": "def set_body(self, value):", "body": "self._body = value", "docstring": "Set body, directly", "id": "f242:c0:m5"} {"signature": "def set_headers(self, value, isTemplate=False):", "body": "if isTemplate:self.set_template(self.NAME_HEADERS, '')else:self.del_template(self.NAME_HEADERS)self._headers = value", "docstring": "Set headers, passing flag if using a template", "id": "f242:c0:m9"} {"signature": "def set_url(self, value, isTemplate=False):", "body": "if isTemplate:self.set_template(self.NAME_URL, value)else:self.del_template(self.NAME_URL)self._url = value", "docstring": "Set URL, passing flag if using a template", "id": "f242:c0:m7"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(MailChimp, self).__init__(*args, **kwargs)self.root = self.api_root = Root(self)self.authorized_apps = AuthorizedApps(self)self.automations = Automations(self)self.automations.actions = AutomationActions(self)self.automations.emails = AutomationEmails(self)self.automations.emails.actions = AutomationEmailActions(self)self.automations.emails.queues = AutomationEmailQueues(self)self.automations.removed_subscribers = AutomationRemovedSubscribers(self)self.batches = self.batch_operations = BatchOperations(self)self.batch_webhooks = BatchWebhooks(self)self.campaign_folders = CampaignFolders(self)self.campaigns = Campaigns(self)self.campaigns.actions = CampaignActions(self)self.campaigns.content = CampaignContent(self)self.campaigns.feedback = CampaignFeedback(self)self.campaigns.send_checklist = CampaignSendChecklist(self)self.conversations = Conversations(self)self.conversations.messages = ConversationMessages(self)self.stores = self.ecommerce = Stores(self)self.stores.carts = StoreCarts(self)self.stores.carts.lines = StoreCartLines(self)self.stores.customers = StoreCustomers(self)self.stores.orders = StoreOrders(self)self.stores.orders.lines = StoreOrderLines(self)self.stores.products = StoreProducts(self)self.stores.products.images = StoreProductImages(self)self.stores.products.variants = StoreProductVariants(self)self.stores.promo_rules = StorePromoRules(self)self.stores.promo_codes = StorePromoCodes(self)self.files = FileManagerFiles(self)self.folders = FileManagerFolders(self)self.lists = Lists(self)self.lists.abuse_reports = ListAbuseReports(self)self.lists.activity = ListActivity(self)self.lists.clients = ListClients(self)self.lists.growth_history = ListGrowthHistory(self)self.lists.interest_categories = ListInterestCategories(self)self.lists.interest_categories.interests = ListInterestCategoryInterest(self)self.lists.members = ListMembers(self)self.lists.members.activity = ListMemberActivity(self)self.lists.members.goals = ListMemberGoals(self)self.lists.members.notes = ListMemberNotes(self)self.lists.members.tags = ListMemberTags(self)self.lists.merge_fields = ListMergeFields(self)self.lists.segments = ListSegments(self)self.lists.segments.members = ListSegmentMembers(self)self.lists.signup_forms = ListSignupForms(self)self.lists.webhooks = ListWebhooks(self)self.ping = Ping(self)self.reports = Reports(self)self.reports.abuse_reports = ReportCampaignAbuseReports(self)self.reports.advice = ReportCampaignAdvice(self)self.reports.click_details = ReportClickDetailReports(self)self.reports.click_details.members = ReportClickDetailMembers(self)self.reports.domain_performance = ReportDomainPerformance(self)self.reports.eepurl = ReportEepURL(self)self.reports.email_activity = ReportEmailActivity(self)self.reports.locations = ReportLocations(self)self.reports.sent_to = ReportSentTo(self)self.reports.subreports = ReportSubReports(self)self.reports.unsubscribes = ReportUnsubscribes(self)self.reports.open_details = ReportOpenDetails(self)self.reports.google_analytics = ReportGoogleAnalytics(self)self.search_campaigns = SearchCampaigns(self)self.search_members = SearchMembers(self)self.template_folders = TemplateFolders(self)self.templates = Templates(self)self.templates.default_content = TemplateDefaultContent(self)", "docstring": "Initialize the class with your api_key and user_id and attach all of\nthe endpoints", "id": "f245:c0:m0"} {"signature": "def check_subscriber_hash(potential_hash):", "body": "if re.match(r\"\", potential_hash):return potential_hashelse:return get_subscriber_hash(potential_hash)", "docstring": "Check the passed value to see if it matches a 32 character hex number that\nMD5 generates as output, or compute that value assuming that the input is\nan email address.\n\n:param potential_hash: A value to be passed to any of the endpoints that\nexpect an MD5 of an email address\n:type potential_hash: :py:class:`str`\n:returns: A valid MD5 hash in hex\n:rtype: :py:class:`str`", "id": "f246:m1"} {"signature": "def check_email(email):", "body": "if not re.match(r\"\", email):raise ValueError('')return", "docstring": "Function that verifies that the string passed is a valid email address.\n\nRegex for email validation based on MailChimp limits:\nhttp://kb.mailchimp.com/accounts/management/international-characters-in-mailchimp\n\n:param email: The potential email address\n:type email: :py:class:`str`\n:return: Nothing", "id": "f246:m2"} {"signature": "def _iterate(self, url, **queryparams):", "body": "if '' in queryparams:if '' not in queryparams[''].split(''):queryparams[''] += ''queryparams.pop(\"\", None)queryparams.pop(\"\", None)result = self._mc_client._get(url=url, offset=, count=, **queryparams)total = result['']if total > :for offset in range(, int(total / ) + ):result = merge_results(result, self._mc_client._get(url=url,offset=int(offset * ),count=,**queryparams))return resultelse: return result", "docstring": "Iterate over all pages for the given url. Feed in the result of self._build_path as the url.\n\n:param url: The url of the endpoint\n:type url: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f247:c0:m2"} {"signature": "def get(self, folder_id, **queryparams):", "body": "self.folder_id = folder_idreturn self._mc_client._get(url=self._build_path(folder_id), **queryparams)", "docstring": "Get information about a specific folder used to organize templates.\n\n:param folder_id: The unique id for the File Manager folder.\n:type folder_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f248:c0:m3"} {"signature": "def delete(self, folder_id):", "body": "self.folder_id = folder_idreturn self._mc_client._delete(url=self._build_path(folder_id))", "docstring": "Delete a specific template folder, and mark all the templates in the\nfolder as \u2018unfiled\u2019.\n\n:param folder_id: The unique id for the File Manager folder.\n:type folder_id: :py:class:`str`", "id": "f248:c0:m5"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(TemplateFolders, self).__init__(*args, **kwargs)self.endpoint = ''self.folder_id = None", "docstring": "Initialize the endpoint", "id": "f248:c0:m0"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.folder_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get all folders used to organize templates.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f248:c0:m2"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(), data=data)if response is not None:self.folder_id = response['']else:self.folder_id = Nonereturn response", "docstring": "Create a new template folder.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*\n}", "id": "f248:c0:m1"} {"signature": "def get(self, workflow_id, **queryparams):", "body": "self.workflow_id = workflow_idreturn self._mc_client._get(url=self._build_path(workflow_id), **queryparams)", "docstring": "Get a summary of an individual Automation workflow\u2019s settings and\ncontent. The trigger_settings object returns information for the first\nemail in the workflow.\n\n:param workflow_id: The unique id for the Automation workflow\n:type workflow_id: :py:class:`str`\n:param queryparams: the query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f249:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(Automations, self).__init__(*args, **kwargs)self.endpoint = ''self.workflow_id = Noneself.actions = AutomationActions(self)self.emails = AutomationEmails(self)self.removed_subscribers = AutomationRemovedSubscribers(self)", "docstring": "Initialize the endpoint", "id": "f249:c0:m0"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.workflow_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get a summary of an account\u2019s Automations.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: the query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f249:c0:m1"} {"signature": "def get(self, workflow_id, email_id):", "body": "self.workflow_id = workflow_idself.email_id = email_idreturn self._mc_client._get(url=self._build_path(workflow_id, '', email_id))", "docstring": "Get information about an individual Automation workflow email.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`\n:param email_id: The unique id for the Automation workflow email.\n:type email_id: :py:class:`str`", "id": "f250:c0:m2"} {"signature": "def all(self, workflow_id, get_all=False, **queryparams):", "body": "self.workflow_id = workflow_idself.email_id = Noneif get_all:return self._iterate(url=self._build_path(workflow_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(workflow_id, ''), **queryparams)", "docstring": "Get a summary of the emails in an Automation workflow.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: the query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f250:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(AutomationEmails, self).__init__(*args, **kwargs)self.endpoint = ''self.workflow_id = Noneself.email_id = Noneself.actions = AutomationEmailActions(self)self.queues = AutomationEmailQueues(self)", "docstring": "Initialize the endpoint", "id": "f250:c0:m0"} {"signature": "def all(self, workflow_id, email_id):", "body": "self.workflow_id = workflow_idself.email_id = email_idself.subscriber_hash = Nonereturn self._mc_client._get(url=self._build_path(workflow_id, '', email_id, ''))", "docstring": "Get information about an Automation email queue.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`\n:param email_id: The unique id for the Automation workflow email.\n:type email_id: :py:class:`str`", "id": "f251:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(AutomationEmailQueues, self).__init__(*args, **kwargs)self.endpoint = ''self.workflow_id = Noneself.email_id = Noneself.subscriber_hash = None", "docstring": "Initialize the endpoint", "id": "f251:c0:m0"} {"signature": "def get(self, **queryparams):", "body": "return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Search all campaigns for the specified query terms.\n\n:param queryparams: The query string parameters\nqueryparams['fields'] = array\nqueryparams['exclude_fields'] = array\nqueryparams['query'] = string\nqueryparams['snip_start'] = string\nqueryparams['snip_end'] = string\nqueryparams['offset'] = integer", "id": "f252:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(TemplateDefaultContent, self).__init__(*args, **kwargs)self.endpoint = ''self.template_id = None", "docstring": "Initialize the endpoint", "id": "f253:c0:m0"} {"signature": "def delete(self, template_id):", "body": "self.template_id = template_idreturn self._mc_client._delete(url=self._build_path(template_id))", "docstring": "Delete a specific template.\n\n:param template_id: The unique id for the template.\n:type template_id: :py:class:`str`", "id": "f254:c0:m5"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.template_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get a list of an account\u2019s available templates.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['created_by'] = string\nqueryparams['before_created_at'] = string\nqueryparams['since_created_at'] = string\nqueryparams['type'] = string\nqueryparams['folder_id'] = string", "id": "f254:c0:m2"} {"signature": "def get(self, list_id, segment_id):", "body": "return self._mc_client._get(url=self._build_path(list_id, '', segment_id))", "docstring": "returns the specified list segment.", "id": "f256:c0:m2"} {"signature": "def update(self, list_id, segment_id, data):", "body": "return self._mc_client._patch(url=self._build_path(list_id, '', segment_id), data=data)", "docstring": "updates an existing list segment.", "id": "f256:c0:m3"} {"signature": "def create(self, list_id, data):", "body": "return self._mc_client._post(url=self._build_path(list_id, ''), data=data)", "docstring": "adds a new segment to the list.", "id": "f256:c0:m5"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.file_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get a list of available images and files stored in the File Manager for the account.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['type'] = string\nqueryparams['created_by'] = string\nqueryparams['before_created_at'] = string\nqueryparams['since_created_at'] = string\nqueryparams['sort_field'] = string\nqueryparams['sort_dir'] = string", "id": "f257:c0:m2"} {"signature": "def delete(self, file_id):", "body": "self.file_id = file_idreturn self._mc_client._delete(url=self._build_path(file_id))", "docstring": "Remove a specific file from the File Manager.\n\n:param file_id: The unique id for the File Manager file.\n:type file_id: :py:class:`str`", "id": "f257:c0:m5"} {"signature": "def update(self, file_id, data):", "body": "self.file_id = file_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')return self._mc_client._patch(url=self._build_path(file_id), data=data)", "docstring": "Update a file in the File Manager.\n\n:param file_id: The unique id for the File Manager file.\n:type file_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*,\n \"file_data\": string*\n}", "id": "f257:c0:m4"} {"signature": "def get(self, file_id, **queryparams):", "body": "self.file_id = file_idreturn self._mc_client._get(url=self._build_path(file_id), **queryparams)", "docstring": "Get information about a specific file in the File Manager.\n\n:param file_id: The unique id for the File Manager file.\n:type file_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f257:c0:m3"} {"signature": "def all(self, list_id, segment_id, get_all=False, **queryparams):", "body": "self.list_id = list_idself.segment_id = segment_idself.subscriber_hash = Noneif get_all:return self._iterate(url=self._build_path(list_id, '', segment_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, '', segment_id, ''), **queryparams)", "docstring": "Get information about members in a saved segment.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param segment_id: The unique id for the segment.\n:type segment_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f258:c0:m2"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idself.subscriber_hash = Noneif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get a list of member\u2019s subscriber activity in a specific campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f259:c0:m1"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(), data=data)if response is not None:self.batch_webhook_id = response['']else:self.batch_webhook_id = Nonereturn response", "docstring": "Configure a webhook that will fire whenever any batch request\ncompletes processing.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"url\": string*\n}", "id": "f260:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportSubReports, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = None", "docstring": "Initialize the endpoint", "id": "f261:c0:m0"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportCampaignAbuseReports, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = Noneself.report_id = None", "docstring": "Initialize the endpoint", "id": "f263:c0:m0"} {"signature": "def all(self, campaign_id, **queryparams):", "body": "self.campaign_id = campaign_idself.report_id = Nonereturn self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get a list of abuse complaints for a specific campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f263:c0:m1"} {"signature": "def get(self, app_id, **queryparams):", "body": "self.app_id = app_idreturn self._mc_client._get(url=self._build_path(app_id), **queryparams)", "docstring": "Get information about a specific authorized application\n\n:param app_id: The unique id for the connected authorized application\n:type app_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f264:c0:m3"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.app_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get a list of an account\u2019s registered, connected applications.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f264:c0:m2"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if not re.match(r\"\", data['']):raise ValueError('')response = self._mc_client._post(url=self._build_path(), data=data)if response is not None:self.store_id = response['']else:self.store_id = Nonereturn response", "docstring": "Add a new store to your MailChimp account.\n\nError checking on the currency code verifies that it is in the correct\nthree-letter, all-caps format as specified by ISO 4217 but does not\ncheck that it is a valid code as the list of valid codes changes over\ntime.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"id\": string*,\n \"list_id\": string*,\n \"name\": string*,\n \"currency_code\": string*\n}", "id": "f266:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(Stores, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.carts = StoreCarts(self)self.customers = StoreCustomers(self)self.orders = StoreOrders(self)self.products = StoreProducts(self)", "docstring": "Initialize the endpoint", "id": "f266:c0:m0"} {"signature": "def delete(self, store_id):", "body": "self.store_id = store_idreturn self._mc_client._delete(url=self._build_path(store_id))", "docstring": "Delete a store. Deleting a store will also delete any associated\nsubresources, including Customers, Orders, Products, and Carts.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`", "id": "f266:c0:m5"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.store_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get information about all stores in the account.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f266:c0:m2"} {"signature": "def all(self, campaign_id, link_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idself.link_id = link_idself.subscriber_hash = Noneif get_all:return self._iterate(url=self._build_path(campaign_id, '', link_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, '', link_id, ''),**queryparams)", "docstring": "Get information about list members who clicked on a specific link in a\ncampaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param link_id: The id for the link.\n:type link_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f267:c0:m1"} {"signature": "def update(self, store_id, cart_id, data):", "body": "self.store_id = store_idself.cart_id = cart_idreturn self._mc_client._patch(url=self._build_path(store_id, '', cart_id), data=data)", "docstring": "Update a specific cart.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param cart_id: The id for the cart.\n:type cart_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f268:c0:m4"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StoreCarts, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.cart_id = Noneself.lines = StoreCartLines(self)", "docstring": "Initialize the endpoint", "id": "f268:c0:m0"} {"signature": "def all(self, store_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.cart_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, ''), **queryparams)", "docstring": "Get information about a store\u2019s carts.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f268:c0:m2"} {"signature": "def get(self, store_id, cart_id, **queryparams):", "body": "self.store_id = store_idself.cart_id = cart_idreturn self._mc_client._get(url=self._build_path(store_id, '', cart_id), **queryparams)", "docstring": "Get information about a specific cart.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param cart_id: The id for the cart.\n:type cart_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f268:c0:m3"} {"signature": "def update(self, store_id, product_id, data):", "body": "self.store_id = store_idself.product_id = product_idreturn self._mc_client._patch(url=self._build_path(store_id, '', product_id),data=data)", "docstring": "Update a product.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f270:c0:m4"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StoreProducts, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.product_id = Noneself.images = StoreProductImages(self)self.variants = StoreProductVariants(self)", "docstring": "Initialize the endpoint", "id": "f270:c0:m0"} {"signature": "def delete(self, store_id, product_id):", "body": "self.store_id = store_idself.product_id = product_idreturn self._mc_client._delete(url=self._build_path(store_id, '', product_id))", "docstring": "Delete a product.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`", "id": "f270:c0:m5"} {"signature": "def all(self, list_id, get_all=False, **queryparams):", "body": "self.list_id = list_idself.month = Noneif get_all:return self._iterate(url=self._build_path(list_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, ''), **queryparams)", "docstring": "Get a month-by-month summary of a specific list\u2019s growth activity.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f271:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(Conversations, self).__init__(*args, **kwargs)self.endpoint = ''self.conversation_id = Noneself.messages = ConversationMessages(self)", "docstring": "Initialize the endpoint", "id": "f272:c0:m0"} {"signature": "def get(self, conversation_id, **queryparams):", "body": "self.conversation_id = conversation_idreturn self._mc_client._get(url=self._build_path(conversation_id), **queryparams)", "docstring": "Get details about an individual conversation.\n\n:param conversation_id: The unique id for the conversation.\n:type conversation_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f272:c0:m2"} {"signature": "def pause(self, workflow_id):", "body": "self.workflow_id = workflow_idreturn self._mc_client._post(url=self._build_path(workflow_id, ''))", "docstring": "Pause all emails in a specific Automation workflow.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`", "id": "f274:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportLocations, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = None", "docstring": "Initialize the endpoint", "id": "f277:c0:m0"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get top open locations for a specific campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f277:c0:m1"} {"signature": "def all(self, campaign_id, **queryparams):", "body": "self.campaign_id = campaign_idreturn self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get statistics for the top-performing email domains in a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f278:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StoreOrderLines, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.order_id = Noneself.line_id = None", "docstring": "Initialize the endpoint", "id": "f279:c0:m0"} {"signature": "def all(self, store_id, order_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.order_id = order_idself.line_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, '', order_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, '', order_id, ''), **queryparams)", "docstring": "Get information about an order\u2019s line items.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param order_id: The id for the order in a store.\n:type order_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f279:c0:m2"} {"signature": "def get(self, store_id, order_id, line_id, **queryparams):", "body": "self.store_id = store_idself.order_id = order_idself.line_id = line_idreturn self._mc_client._get(url=self._build_path(store_id, '', order_id, '', line_id), **queryparams)", "docstring": "Get information about a specific order line item.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param order_id: The id for the order in a store.\n:type order_id: :py:class:`str`\n:param line_id: The id for the line item of a cart.\n:type line_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f279:c0:m3"} {"signature": "def update(self, campaign_id, feedback_id, data):", "body": "self.campaign_id = campaign_idself.feedback_id = feedback_idif '' not in data:raise KeyError('')return self._mc_client._patch(url=self._build_path(campaign_id, '', feedback_id), data=data)", "docstring": "Update a specific feedback message for a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param feedback_id: The unique id for the feedback message.\n:type feedback_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"message\": string*\n}", "id": "f280:c0:m4"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idself.feedback_id = Noneif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get team feedback while you\u2019re working together on a MailChimp\ncampaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f280:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(CampaignFeedback, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = Noneself.feedback_id = None", "docstring": "Initialize the endpoint", "id": "f280:c0:m0"} {"signature": "def delete(self, campaign_id, feedback_id):", "body": "self.campaign_id = campaign_idself.feedback_id = feedback_idreturn self._mc_client._delete(url=self._build_path(campaign_id, '', feedback_id))", "docstring": "Remove a specific feedback message for a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param feedback_id: The unique id for the feedback message.\n:type feedback_id: :py:class:`str`", "id": "f280:c0:m5"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get detailed information about any campaign emails that were opened by a list member.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['since'] = str", "id": "f281:c0:m1"} {"signature": "def all(self, conversation_id, **queryparams):", "body": "self.conversation_id = conversation_idself.message_id = Nonereturn self._mc_client._get(url=self._build_path(conversation_id, ''), **queryparams)", "docstring": "Get messages from a specific conversation.\n\nThis endpoint does not currently support count and offset, preventing\nit from having the get_all parameter that most all() methods have\n\n:param conversation_id: The unique id for the conversation.\n:type conversation_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = p[\nqueryparams['is_read'] = string\nqueryparams['before_timestamp'] = string\nqueryparams['since_timestamp'] = string", "id": "f282:c0:m2"} {"signature": "def create(self, conversation_id, data):", "body": "self.conversation_id = conversation_idif '' not in data:raise KeyError('')check_email(data[''])if '' not in data:raise KeyError('')if data[''] not in [True, False]:raise TypeError('')response = self._mc_client._post(url=self._build_path(conversation_id, ''), data=data)if response is not None:self.message_id = response['']else:self.message_id = Nonereturn response", "docstring": "Post a new message to a conversation.\n\n:param conversation_id: The unique id for the conversation.\n:type conversation_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"from_email\": string*,\n \"read\": boolean*\n}", "id": "f282:c0:m1"} {"signature": "def delete(self, store_id, customer_id):", "body": "self.store_id = store_idself.customer_id = customer_idreturn self._mc_client._delete(url=self._build_path(store_id, '', customer_id))", "docstring": "Delete a customer from a store.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param customer_id: The id for the customer of a store.\n:type customer_id: :py:class:`str`", "id": "f283:c0:m6"} {"signature": "def create(self, store_id, data):", "body": "self.store_id = store_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')check_email(data[''])if '' not in data:raise KeyError('')if data[''] not in [True, False]:raise TypeError('')response = self._mc_client._post(url=self._build_path(store_id, ''), data=data)if response is not None:self.customer_id = response['']else:self.customer_id = Nonereturn response", "docstring": "Add a new customer to a store.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"id\": string*,\n \"email_address\": string*,\n \"opt_in_status\": boolean*\n}", "id": "f283:c0:m1"} {"signature": "def update(self, store_id, customer_id, data):", "body": "self.store_id = store_idself.customer_id = customer_idreturn self._mc_client._patch(url=self._build_path(store_id, '', customer_id), data=data)", "docstring": "Update a customer.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param customer_id: The id for the customer of a store.\n:type customer_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f283:c0:m4"} {"signature": "def delete(self, list_id, category_id, interest_id):", "body": "self.list_id = list_idself.category_id = category_idself.interest_id = interest_idreturn self._mc_client._delete(url=self._build_path(list_id, '', category_id, '', interest_id))", "docstring": "Delete interests or group names in a specific category.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`\n:param interest_id: The specific interest or \u2018group name\u2019.\n:type interest_id: :py:class:`str`", "id": "f284:c0:m5"} {"signature": "def update(self, list_id, category_id, interest_id, data):", "body": "self.list_id = list_idself.category_id = category_idself.interest_id = interest_idif '' not in data:raise KeyError('')return self._mc_client._patch(url=self._build_path(list_id, '', category_id, '', interest_id),data=data)", "docstring": "Update interests or \u2018group names\u2019 for a specific category.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`\n:param interest_id: The specific interest or \u2018group name\u2019.\n:type interest_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*\n}", "id": "f284:c0:m4"} {"signature": "def get(self, list_id, category_id, interest_id, **queryparams):", "body": "self.list_id = list_idself.category_id = category_idself.interest_id = interest_idreturn self._mc_client._get(url=self._build_path(list_id, '', category_id, '', interest_id),**queryparams)", "docstring": "Get interests or \u2018group names\u2019 for a specific category.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`\n:param interest_id: The specific interest or \u2018group name\u2019.\n:type interest_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f284:c0:m3"} {"signature": "def all(self, list_id, category_id, get_all=False, **queryparams):", "body": "self.list_id = list_idself.category_id = category_idself.interest_id = Noneif get_all:return self._iterate(url=self._build_path(list_id, '', category_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, '', category_id, ''),**queryparams)", "docstring": "Get a list of this category\u2019s interests.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f284:c0:m2"} {"signature": "def create(self, list_id, category_id, data):", "body": "self.list_id = list_idself.category_id = category_idif '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(list_id, '', category_id, ''),data=data)if response is not None:self.interest_id = response['']else:self.interest_id = Nonereturn response", "docstring": "Create a new interest or \u2018group name\u2019 for a specific category.\n\nThe documentation lists only the name request body parameter so it is\nbeing documented and error-checked as if it were required based on the\ndescription of the method.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*\n}", "id": "f284:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListInterestCategoryInterest, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = Noneself.category_id = Noneself.interest_id = None", "docstring": "Initialize the endpoint", "id": "f284:c0:m0"} {"signature": "def get(self, folder_id, **queryparams):", "body": "self.folder_id = folder_idreturn self._mc_client._get(url=self._build_path(folder_id), **queryparams)", "docstring": "Get information about a specific folder used to organize campaigns.\n\n:param folder_id: The unique id for the campaign folder.\n:type folder_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f285:c0:m3"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(CampaignFolders, self).__init__(*args, **kwargs)self.endpoint = ''self.folder_id = None", "docstring": "Initialize the endpoint", "id": "f285:c0:m0"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.folder_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get all folders used to organize campaigns.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f285:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListActivity, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = None", "docstring": "Initialize the endpoint", "id": "f286:c0:m0"} {"signature": "def all(self, campaign_id, **queryparams):", "body": "self.campaign_id = campaign_idreturn self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get feedback based on a campaign\u2019s statistics. Advice feedback is\nbased on campaign stats like opens, clicks, unsubscribes, bounces, and\nmore.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f288:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportCampaignAdvice, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = None", "docstring": "Initialize the endpoint", "id": "f288:c0:m0"} {"signature": "def pause(self, campaign_id):", "body": "self.campaign_id = campaign_idreturn self._mc_client._post(url=self._build_path(campaign_id, ''))", "docstring": "Pause an RSS-Driven campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`", "id": "f289:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(CampaignActions, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = None", "docstring": "Initialize the endpoint", "id": "f289:c0:m0"} {"signature": "def send(self, campaign_id):", "body": "self.campaign_id = campaign_idreturn self._mc_client._post(url=self._build_path(campaign_id, ''))", "docstring": "Send a MailChimp campaign. For RSS Campaigns, the campaign will send\naccording to its schedule. All other campaigns will send immediately.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`", "id": "f289:c0:m6"} {"signature": "def update(self, campaign_id, data):", "body": "self.campaign_id = campaign_idreturn self._mc_client._put(url=self._build_path(campaign_id, ''), data=data)", "docstring": "Set the content for a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f290:c0:m2"} {"signature": "def get(self, campaign_id, **queryparams):", "body": "self.campaign_id = campaign_idreturn self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get the the HTML and plain-text content for a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f290:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportClickDetailReports, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = Noneself.link_id = Noneself.members = ReportClickDetailMembers(self)", "docstring": "Initialize the endpoint", "id": "f291:c0:m0"} {"signature": "def get(self, campaign_id, link_id, **queryparams):", "body": "self.campaign_id = campaign_idself.link_id = link_idreturn self._mc_client._get(url=self._build_path(campaign_id, '', link_id), **queryparams)", "docstring": "Get click details for a specific link in a campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param link_id: The id for the link.\n:type link_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f291:c0:m2"} {"signature": "def all(self, store_id, cart_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.cart_id = cart_idself.line_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, '', cart_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, '', cart_id, ''), **queryparams)", "docstring": "Get information about a cart\u2019s line items.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param cart_id: The id for the cart.\n:type cart_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f292:c0:m2"} {"signature": "def delete(self, store_id, cart_id, line_id):", "body": "self.store_id = store_idself.cart_id = cart_idself.line_id = line_idreturn self._mc_client._delete(url=self._build_path(store_id, '', cart_id, '', line_id))", "docstring": "Delete a cart.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param cart_id: The id for the cart.\n:type cart_id: :py:class:`str`\n:param line_id: The id for the line item of a cart.\n:type line_id: :py:class:`str`", "id": "f292:c0:m5"} {"signature": "def get(self, **queryparams):", "body": "if '' in queryparams:self.list_id = queryparams['']else:self.list_id = Nonereturn self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Search for list members. This search can be restricted to a specific\nlist, or can be used to search across all lists in an account.\n\n:param queryparams: The query string parameters\nqueryparams['fields'] = array\nqueryparams['exclude_fields'] = array\nqueryparams['query'] = string\nqueryparams['list_id'] = string\nqueryparams['offset'] = integer", "id": "f293:c0:m1"} {"signature": "def get(self, list_id, segment_id, **queryparams):", "body": "self.list_id = list_idself.segment_id = segment_idreturn self._mc_client._get(url=self._build_path(list_id, '', segment_id), **queryparams)", "docstring": "Get information about a specific segment.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param segment_id: The unique id for the segment.\n:type segment_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f294:c0:m3"} {"signature": "def create(self, store_id, data):", "body": "self.store_id = store_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data:raise KeyError('')if not re.match(r\"\", data['']):raise ValueError('')if '' not in data:raise KeyError('')if '' not in data:raise KeyError('')for line in data['']:if '' not in line:raise KeyError('')if '' not in line:raise KeyError('')if '' not in line:raise KeyError('')if '' not in line:raise KeyError('')if '' not in line:raise KeyError('')response = self._mc_client._post(url=self._build_path(store_id, ''), data=data)if response is not None:self.order_id = response['']else:self.order_id = Nonereturn response", "docstring": "Add a new order to a store.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"id\": string*,\n \"customer\": object*\n {\n \"'id\": string*\n },\n \"curency_code\": string*,\n \"order_total\": number*,\n \"lines\": array*\n [\n {\n \"id\": string*,\n \"product_id\": string*,\n \"product_variant_id\": string*,\n \"quantity\": integer*,\n \"price\": number*\n }\n ]\n}", "id": "f295:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StoreOrders, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.order_id = Noneself.lines = StoreOrderLines(self)", "docstring": "Initialize the endpoint", "id": "f295:c0:m0"} {"signature": "def all(self, store_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.order_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, ''), **queryparams)", "docstring": "Get information about a store\u2019s orders.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['customer_id'] = string", "id": "f295:c0:m2"} {"signature": "def start(self, workflow_id, email_id):", "body": "self.workflow_id = workflow_idself.email_id = email_idreturn self._mc_client._post(url=self._build_path(workflow_id, '', email_id, ''))", "docstring": "Start an automated email.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`\n:param email_id: The unique id for the Automation workflow email.\n:type email_id: :py:class:`str`", "id": "f296:c0:m2"} {"signature": "def create(self, store_id, promo_rule_id, data):", "body": "self.store_id = store_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(store_id, '', promo_rule_id, ''), data=data)if response is not None:return response", "docstring": "Add a new promo code to a store.\n\n:param store_id: The store id\n:type store_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict'\ndata = {\n \"id\": string*,\n \"code\": string*,\n \"redemption_url\": string*,\n \"usage_count\": string,\n \"enabled\": boolean,\n \"created_at_foreign\": string,\n \"updated_at_foreign\": string,\n}", "id": "f297:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StorePromoCodes, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = None", "docstring": "Initialize the Endpoint\n:param args:\n:param kwargs:", "id": "f297:c0:m0"} {"signature": "def all(self, store_id, promo_rule_id, get_all=False, **queryparams):", "body": "self.store_id=store_idself.promo_rule_id=promo_rule_idif get_all:return self._iterate(url=self._build_path(store_id, '', promo_rule_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, '', promo_rule_id), **queryparams)", "docstring": "Get information about a store\u2019s promo codes.\n\n:param store_id: The store's id\n:type store_id: `str`\n:param promo_rule_id: The store promo rule id\n:type store_id: `str`\n:param get_all:\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f297:c0:m2"} {"signature": "def get(self):", "body": "return self._mc_client._get(url=self._build_path())", "docstring": "A health check for the API that won\u2019t return any account-specific information.", "id": "f298:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(Ping, self).__init__(*args, **kwargs)self.endpoint = ''", "docstring": "Initialize the endpoint", "id": "f298:c0:m0"} {"signature": "def delete(self, list_id, category_id):", "body": "self.list_id = list_idself.category_id = category_idreturn self._mc_client._delete(url=self._build_path(list_id, '', category_id))", "docstring": "Delete a specific interest category.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param category_id: The unique id for the interest category.\n:type category_id: :py:class:`str`", "id": "f299:c0:m5"} {"signature": "def all(self, list_id, get_all=False, **queryparams):", "body": "self.list_id = list_idself.category_id = Noneif get_all:return self._iterate(url=self._build_path(list_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, ''), **queryparams)", "docstring": "Get information about a list\u2019s interest categories.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['type'] = string", "id": "f299:c0:m2"} {"signature": "def create(self, list_id, data):", "body": "self.list_id = list_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')if data[''] not in ['', '', '', '']:raise ValueError('''')response = self._mc_client._post(url=self._build_path(list_id, ''), data=data)if response is not None:self.category_id = response['']else:self.category_id = Nonereturn response", "docstring": "Create a new interest category.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"title\": string*,\n \"type\": string* (Must be one of 'checkboxes', 'dropdown', 'radio', or 'hidden')\n}", "id": "f299:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListInterestCategories, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = Noneself.category_id = Noneself.interests = ListInterestCategoryInterest(self)", "docstring": "Initialize the endpoint", "id": "f299:c0:m0"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ReportGoogleAnalytics, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = Noneself.profile_id = None", "docstring": "Initialize the endpoint", "id": "f300:c0:m0"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get a summary of Google Analytics reports for a specific campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f300:c0:m1"} {"signature": "def delete(self, list_id, merge_id):", "body": "self.list_id = list_idself.merge_id = merge_idreturn self._mc_client._delete(url=self._build_path(list_id, '', merge_id))", "docstring": "Delete a specific merge field in a list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param merge_id: The id for the merge field.\n:type merge_id: :py:class:`str`", "id": "f301:c0:m5"} {"signature": "def all(self, list_id, get_all=False, **queryparams):", "body": "self.list_id = list_idself.merge_id = Noneif get_all:return self._iterate(url=self._build_path(list_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, ''), **queryparams)", "docstring": "Get a list of all merge fields (formerly merge vars) for a list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['type'] = string\nqueryparams['required'] = boolean", "id": "f301:c0:m2"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListMergeFields, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = Noneself.merge_id = None", "docstring": "Initialize the endpoint", "id": "f301:c0:m0"} {"signature": "def create(self, list_id, data):", "body": "self.list_id = list_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(list_id, ''), data=data)if response is not None:self.merge_id = response['']else:self.merge_id = Nonereturn response", "docstring": "Add a new merge field for a specific list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*,\n \"type\": string*\n}", "id": "f301:c0:m1"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idself.subscriber_hash = Noneif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get information about members who have unsubscribed from a specific\ncampaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f302:c0:m1"} {"signature": "def get(self, campaign_id, subscriber_hash, **queryparams):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.campaign_id = campaign_idself.subscriber_hash = subscriber_hashreturn self._mc_client._get(url=self._build_path(campaign_id, '', subscriber_hash), **queryparams)", "docstring": "Get information about a specific list member who unsubscribed from a\ncampaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f302:c0:m2"} {"signature": "def update(self, list_id, subscriber_hash, data):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashreturn self._mc_client._patch(url=self._build_path(list_id, '', subscriber_hash), data=data)", "docstring": "Update information for a specific list member.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f303:c0:m4"} {"signature": "def delete(self, list_id, subscriber_hash):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashreturn self._mc_client._delete(url=self._build_path(list_id, '', subscriber_hash))", "docstring": "Delete a member from a list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`", "id": "f303:c0:m6"} {"signature": "def create(self, list_id, data):", "body": "self.list_id = list_idif '' not in data:raise KeyError('')if data[''] not in ['', '', '', '', '']:raise ValueError('''')if '' not in data:raise KeyError('')check_email(data[''])response = self._mc_client._post(url=self._build_path(list_id, ''), data=data)if response is not None:self.subscriber_hash = response['']else:self.subscriber_hash = Nonereturn response", "docstring": "Add a new member to the list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"status\": string*, (Must be one of 'subscribed', 'unsubscribed', 'cleaned',\n 'pending', or 'transactional')\n \"email_address\": string*\n}", "id": "f303:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListMembers, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = Noneself.subscriber_hash = Noneself.activity = ListMemberActivity(self)self.goals = ListMemberGoals(self)self.notes = ListMemberNotes(self)", "docstring": "Initialize the endpoint", "id": "f303:c0:m0"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.batch_id = Noneself.operation_status = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get a summary of batch requests that have been made.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f304:c0:m2"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')for op in data['']:if '' not in op:raise KeyError('')if op[''] not in ['', '', '', '', '']:raise ValueError(''''.format(op['']))if '' not in op:raise KeyError('')return self._mc_client._post(url=self._build_path(), data=data)", "docstring": "Begin processing a batch operations request.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"operations\": array*\n [\n {\n \"method\": string* (Must be one of \"GET\", \"POST\", \"PUT\", \"PATCH\", or \"DELETE\")\n \"path\": string*,\n }\n ]\n}", "id": "f304:c0:m1"} {"signature": "def get(self, batch_id, **queryparams):", "body": "self.batch_id = batch_idself.operation_status = Nonereturn self._mc_client._get(url=self._build_path(batch_id), **queryparams)", "docstring": "Get the status of a batch request.\n\n:param batch_id: The unique id for the batch operation.\n:type batch_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f304:c0:m3"} {"signature": "def delete(self, batch_id):", "body": "self.batch_id = batch_idself.operation_status = Nonereturn self._mc_client._delete(url=self._build_path(batch_id))", "docstring": "Stops a batch request from running. Since only one batch request is\nrun at a time, this can be used to cancel a long running request. The\nresults of any completed operations will not be available after this\ncall.\n\n:param batch_id: The unique id for the batch operation.\n:type batch_id: :py:class:`str`", "id": "f304:c0:m4"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(BatchOperations, self).__init__(*args, **kwargs)self.endpoint = ''self.batch_id = Noneself.operation_status = None", "docstring": "Initialize the endpoint", "id": "f304:c0:m0"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(), data=data)if response is not None:self.folder_id = response['']else:self.folder_id = Nonereturn response", "docstring": "Create a new folder in the File Manager.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"name\": string*\n}", "id": "f305:c0:m1"} {"signature": "def update(self, list_id, subscriber_hash, note_id, data):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashself.note_id = note_idif '' not in data:raise KeyError('')return self._mc_client._patch(url=self._build_path(list_id, '', subscriber_hash, '', note_id),data=data)", "docstring": "Update a specific note for a specific list member.\n\nThe documentation lists only the note request body parameter so it is\nbeing documented and error-checked as if it were required based on the\ndescription of the method.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param note_id: The id for the note.\n:type note_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"note\": string*\n}", "id": "f306:c0:m4"} {"signature": "def get(self, list_id, subscriber_hash, note_id, **queryparams):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashself.note_id = note_idreturn self._mc_client._get(url=self._build_path(list_id, '', subscriber_hash, '', note_id),**queryparams)", "docstring": "Get a specific note for a specific list member.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param note_id: The id for the note.\n:type note_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f306:c0:m3"} {"signature": "def all(self, list_id, subscriber_hash, get_all=False, **queryparams):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashself.note_id = Noneif get_all:return self._iterate(url=self._build_path(list_id, '', subscriber_hash, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(list_id, '', subscriber_hash, ''), **queryparams)", "docstring": "Get recent notes for a specific list member.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f306:c0:m2"} {"signature": "def all(self, store_id, get_all=False, **queryparams):", "body": "self.store_id = store_idif get_all:return self._iterate(url=self._build_path(store_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, ''), **queryparams)", "docstring": "Get information about a store\u2019s promo rules.\n\n:param store_id: The store's id\n:type store_id: `str`\n:param get_all:\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f307:c0:m2"} {"signature": "def get(self, store_id, promo_rule_id, **queryparams):", "body": "self.store_id = store_idself.promo_rule_id = promo_rule_idreturn self._mc_client._get(url=self._build_path(store_id, '', promo_rule_id), **queryparams)", "docstring": "Get information about a specific promo rule.\n\n:param store_id: The store's id\n:type store_id: `string`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f307:c0:m3"} {"signature": "def update(self, store_id, promo_rule_id, data):", "body": "self.store_id = store_idself.promo_rule_id = promo_rule_idreturn self._mc_client._patch(url=self._build_path(store_id, '', promo_rule_id), data=data)", "docstring": "Update a promo rule\n\n:param store_id: The store id\n:type :py:class:`str`\n:param promo_rule_id: The id for the promo rule of a store.\n:type :py:class:`str`\n:param data:\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"id\": string,\n \"title\": string,\n \"description\": string,\n \"starts_at\": string,\n \"ends_at\": string,\n \"amount\": number,\n \"type\": string,\n \"target\": string,\n \"enabled\": boolean,\n \"created_at_foreign\": string,\n \"updated_at_foreign\": string,\n}", "id": "f307:c0:m4"} {"signature": "def all(self, store_id, product_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.product_id = product_idself.image_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, '', product_id, ''), **queryparams)else:return self._mc_client._post(url=self._build_path(store_id, '', product_id, ''), **queryparams)", "docstring": "Get information about a product\u2019s images.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f308:c0:m2"} {"signature": "def create(self, store_id, product_id, data):", "body": "self.store_id = store_idself.product_id = product_idif '' not in data:raise KeyError('')if '' not in data:raise KeyError('')response = self._mc_client._post(url=self._build_path(store_id, '', product_id, ''), data=data)if response is not None:self.image_id = response['']else:self.image_id = Nonereturn response", "docstring": "Add a new image to the product.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"id\": string*,\n \"url\": string*\n}", "id": "f308:c0:m1"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(ListMemberGoals, self).__init__(*args, **kwargs)self.endpoint = ''self.list_id = Noneself.subscriber_hash = None", "docstring": "Initialize the endpoint", "id": "f309:c0:m0"} {"signature": "def all(self, list_id, subscriber_hash, **queryparams):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.list_id = list_idself.subscriber_hash = subscriber_hashreturn self._mc_client._get(url=self._build_path(list_id, '', subscriber_hash, ''), **queryparams)", "docstring": "Get the last 50 Goal events for a member on a specific list.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f309:c0:m1"} {"signature": "def all(self, store_id, product_id, get_all=False, **queryparams):", "body": "self.store_id = store_idself.product_id = product_idself.variant_id = Noneif get_all:return self._iterate(url=self._build_path(store_id, '', product_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(store_id, '', product_id, ''),**queryparams)", "docstring": "Get information about a product\u2019s variants.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f310:c0:m2"} {"signature": "def delete(self, store_id, product_id, variant_id):", "body": "self.store_id = store_idself.product_id = product_idself.variant_id = variant_idreturn self._mc_client._delete(url=self._build_path(store_id, '', product_id, '', variant_id))", "docstring": "Delete a product variant.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param variant_id: The id for the product variant.\n:type variant_id: :py:class:`str`", "id": "f310:c0:m6"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(StoreProductVariants, self).__init__(*args, **kwargs)self.endpoint = ''self.store_id = Noneself.product_id = Noneself.variant_id = None", "docstring": "Initialize the endpoint", "id": "f310:c0:m0"} {"signature": "def update(self, store_id, product_id, variant_id, data):", "body": "self.store_id = store_idself.product_id = product_idself.variant_id = variant_idreturn self._mc_client._patch(url=self._build_path(store_id, '', product_id, '', variant_id),data=data)", "docstring": "Update a product variant.\n\n:param store_id: The store id.\n:type store_id: :py:class:`str`\n:param product_id: The id for the product of a store.\n:type product_id: :py:class:`str`\n:param variant_id: The id for the product variant.\n:type variant_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`", "id": "f310:c0:m4"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(Root, self).__init__(*args, **kwargs)self.endpoint = ''", "docstring": "Initialize the endpoint", "id": "f311:c0:m0"} {"signature": "def all(self, campaign_id, get_all=False, **queryparams):", "body": "self.campaign_id = campaign_idself.subscriber_hash = Noneif get_all:return self._iterate(url=self._build_path(campaign_id, ''), **queryparams)else:return self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Get information about campaign recipients.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer", "id": "f312:c0:m1"} {"signature": "def get(self, campaign_id, subscriber_hash, **queryparams):", "body": "subscriber_hash = check_subscriber_hash(subscriber_hash)self.campaign_id = campaign_idself.subscriber_hash = subscriber_hashreturn self._mc_client._get(url=self._build_path(campaign_id, '', subscriber_hash), **queryparams)", "docstring": "Get information about a specific campaign recipient.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param subscriber_hash: The MD5 hash of the lowercase version of the\n list member\u2019s email address.\n:type subscriber_hash: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f312:c0:m2"} {"signature": "def get(self, list_id, **queryparams):", "body": "self.list_id = list_idreturn self._mc_client._get(url=self._build_path(list_id), **queryparams)", "docstring": "Get information about a specific list in your MailChimp account.\nResults include list members who have signed up but haven\u2019t confirmed\ntheir subscription yet and unsubscribed or cleaned.\n\n:param list_id: The unique id for the list.\n:type list_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f313:c0:m4"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.list_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get information about all lists in the account.\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['before_date_created'] = string\nqueryparams['since_date_created'] = string\nqueryparams['before_campaign_last_sent'] = string\nqueryparams['since_campaign_last_sent'] = string\nqueryparams['email'] = string\nqueryparams['sort_field'] = string (Must be 'date_created')\nqueryparams['sort_dir'] = string (Must be one of 'ASC' or 'DESC')", "id": "f313:c0:m3"} {"signature": "def __init__(self, *args, **kwargs):", "body": "super(CampaignSendChecklist, self).__init__(*args, **kwargs)self.endpoint = ''self.campaign_id = None", "docstring": "Initialize the endpoint", "id": "f314:c0:m0"} {"signature": "def get(self, campaign_id, **queryparams):", "body": "self.campaign_id = campaign_idreturn self._mc_client._get(url=self._build_path(campaign_id, ''), **queryparams)", "docstring": "Review the send checklist for a campaign, and resolve any issues\nbefore sending.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []", "id": "f314:c0:m1"} {"signature": "def create(self, data):", "body": "if '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data['']:raise KeyError('')check_email(data[''][''])if '' not in data:raise KeyError('')if not data[''] in ['', '', '', '', '']:raise ValueError('')if data[''] == '':if '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if data[''][''] not in ['', '', '', '']:raise ValueError('''')if data[''] == '':if '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if not data[''][''] in ['', '', '']:raise ValueError('')response = self._mc_client._post(url=self._build_path(), data=data)if response is not None:self.campaign_id = response['']else:self.campaign_id = Nonereturn response", "docstring": "Create a new MailChimp campaign.\n\nThe ValueError raised by an invalid type in data does not mention\n'absplit' as a potential value because the documentation indicates\nthat the absplit type has been deprecated.\n\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"recipients\": object*\n {\n \"list_id\": string*\n },\n \"settings\": object*\n {\n \"subject_line\": string*,\n \"from_name\": string*,\n \"reply_to\": string*\n },\n \"variate_settings\": object* (Required if type is \"variate\")\n {\n \"winner_criteria\": string* (Must be one of \"opens\", \"clicks\", \"total_revenue\", or \"manual\")\n },\n \"rss_opts\": object* (Required if type is \"rss\")\n {\n \"feed_url\": string*,\n \"frequency\": string* (Must be one of \"daily\", \"weekly\", or \"monthly\")\n },\n \"type\": string* (Must be one of \"regular\", \"plaintext\", \"rss\", \"variate\", or \"absplit\")\n}", "id": "f315:c0:m1"} {"signature": "def update(self, campaign_id, data):", "body": "self.campaign_id = campaign_idif '' not in data:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data['']:raise KeyError('')if '' not in data['']:raise KeyError('')check_email(data[''][''])return self._mc_client._patch(url=self._build_path(campaign_id), data=data)", "docstring": "Update some or all of the settings for a specific campaign.\n\n:param campaign_id: The unique id for the campaign.\n:type campaign_id: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:class:`dict`\ndata = {\n \"settings\": object*\n {\n \"subject_line\": string*,\n \"from_name\": string*,\n \"reply_to\": string*\n },\n}", "id": "f315:c0:m4"} {"signature": "def all(self, get_all=False, **queryparams):", "body": "self.campaign_id = Noneif get_all:return self._iterate(url=self._build_path(), **queryparams)else:return self._mc_client._get(url=self._build_path(), **queryparams)", "docstring": "Get all campaigns in an account.\n\n.. note::\n The before_create_time, since_create_time, before_send_time, and\n since_send_time queryparams expect times to be listed in the ISO\n 8601 format in UTC (ex. 2015-10-21T15:41:36+00:00).\n\n:param get_all: Should the query get all results\n:type get_all: :py:class:`bool`\n:param queryparams: The query string parameters\nqueryparams['fields'] = []\nqueryparams['exclude_fields'] = []\nqueryparams['count'] = integer\nqueryparams['offset'] = integer\nqueryparams['type'] = []\nqueryparams['status'] = []\nqueryparams['before_send_time'] = string\nqueryparams['since_send_time'] = string\nqueryparams['before_create_time'] = string\nqueryparams['since_create_time'] = string\nqueryparams['list_id'] = string\nqueryparams['folder_id'] = string\nqueryparams['sort_field'] = string\nqueryparams['sort_dir'] = string", "id": "f315:c0:m2"} {"signature": "def all(self, workflow_id):", "body": "self.workflow_id = workflow_idreturn self._mc_client._get(url=self._build_path(workflow_id, ''))", "docstring": "Get information about subscribers who were removed from an Automation\nworkflow.\n\n:param workflow_id: The unique id for the Automation workflow.\n:type workflow_id: :py:class:`str`", "id": "f316:c0:m2"} {"signature": "def __call__(self, r):", "body": "r.headers[''] = '' + self._access_tokenreturn r", "docstring": "Authorize with the access token provided in __init__", "id": "f317:c2:m1"} {"signature": "@_enabled_or_noopdef _post(self, url, data=None):", "body": "url = urljoin(self.base_url, url)try:r = self._make_request(**dict(method='',url=url,json=data,auth=self.auth,timeout=self.timeout,hooks=self.request_hooks,headers=self.request_headers))except requests.exceptions.RequestException as e:raise eelse:if r.status_code >= :try:error_data = r.json()except ValueError:error_data = { \"\": r }raise MailChimpError(error_data)if r.status_code == :return Nonereturn r.json()", "docstring": "Handle authenticated POST requests\n\n:param url: The url for the endpoint including path parameters\n:type url: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:data:`none` or :py:class:`dict`\n:returns: The JSON output from the API or an error message", "id": "f317:c1:m2"} {"signature": "@_enabled_or_noopdef _get(self, url, **queryparams):", "body": "url = urljoin(self.base_url, url)if len(queryparams):url += '' + urlencode(queryparams)try:r = self._make_request(**dict(method='',url=url,auth=self.auth,timeout=self.timeout,hooks=self.request_hooks,headers=self.request_headers))except requests.exceptions.RequestException as e:raise eelse:if r.status_code >= :raise MailChimpError(r.json())return r.json()", "docstring": "Handle authenticated GET requests\n\n:param url: The url for the endpoint including path parameters\n:type url: :py:class:`str`\n:param queryparams: The query string parameters\n:returns: The JSON output from the API", "id": "f317:c1:m3"} {"signature": "@_enabled_or_noopdef _patch(self, url, data=None):", "body": "url = urljoin(self.base_url, url)try:r = self._make_request(**dict(method='',url=url,json=data,auth=self.auth,timeout=self.timeout,hooks=self.request_hooks,headers=self.request_headers))except requests.exceptions.RequestException as e:raise eelse:if r.status_code >= :raise MailChimpError(r.json())return r.json()", "docstring": "Handle authenticated PATCH requests\n\n:param url: The url for the endpoint including path parameters\n:type url: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:data:`none` or :py:class:`dict`\n:returns: The JSON output from the API", "id": "f317:c1:m5"} {"signature": "@_enabled_or_noopdef _delete(self, url):", "body": "url = urljoin(self.base_url, url)try:r = self._make_request(**dict(method='',url=url,auth=self.auth,timeout=self.timeout,hooks=self.request_hooks,headers=self.request_headers))except requests.exceptions.RequestException as e:raise eelse:if r.status_code >= :raise MailChimpError(r.json())if r.status_code == :returnreturn r.json()", "docstring": "Handle authenticated DELETE requests\n\n:param url: The url for the endpoint including path parameters\n:type url: :py:class:`str`\n:returns: The JSON output from the API", "id": "f317:c1:m4"} {"signature": "def get_base_url(self):", "body": "try:return self.get_metadata()['']except requests.exceptions.RequestException:raise", "docstring": "Get the base_url from the authentication metadata", "id": "f317:c2:m3"} {"signature": "def get_metadata(self):", "body": "try:r = requests.get('', auth=self)except requests.exceptions.RequestException as e:raise eelse:r.raise_for_status()output = r.json()if '' in output:raise requests.exceptions.RequestException(output[''])return output", "docstring": "Get the metadata returned after authentication", "id": "f317:c2:m2"} {"signature": "@_enabled_or_noopdef _put(self, url, data=None):", "body": "url = urljoin(self.base_url, url)try:r = self._make_request(**dict(method='',url=url,json=data,auth=self.auth,timeout=self.timeout,hooks=self.request_hooks,headers=self.request_headers))except requests.exceptions.RequestException as e:raise eelse:if r.status_code >= :try:error_data = r.json()except ValueError:error_data = { \"\": r }raise MailChimpError(error_data)return r.json()", "docstring": "Handle authenticated PUT requests\n\n:param url: The url for the endpoint including path parameters\n:type url: :py:class:`str`\n:param data: The request body parameters\n:type data: :py:data:`none` or :py:class:`dict`\n:returns: The JSON output from the API", "id": "f317:c1:m6"} {"signature": "def __init__(self, mc_api=None, mc_user='', access_token=None, enabled=True, timeout=None,request_hooks=None, request_headers=None):", "body": "super(MailChimpClient, self).__init__()self.enabled = enabledself.timeout = timeoutif access_token:self.auth = MailChimpOAuth(access_token)self.base_url = self.auth.get_base_url() + ''elif mc_api:if not re.match(r\"\", mc_api.split('')[]):raise ValueError('''')self.auth = HTTPBasicAuth(mc_user, mc_api)datacenter = mc_api.split('').pop()self.base_url = ''.format(datacenter)else:raise Exception('')self.request_headers = request_headers or requests.utils.default_headers()self.request_hooks = request_hooks or requests.hooks.default_hooks()", "docstring": "Initialize the class with your optional user_id and required api_key.\n\nIf `enabled` is not True, these methods become no-ops. This is\nparticularly useful for testing or disabling with configuration.\n\n:param mc_api: Mailchimp API key\n:type mc_api: :py:class:`str`\n:param mc_user: Mailchimp user id\n:type mc_user: :py:class:`str`\n:param access_token: The OAuth access token\n:type access_token: :py:class:`str`\n:param enabled: Whether the API should execute any requests\n:type enabled: :py:class:`bool`\n:param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) ` tuple.\n:type timeout: float or tuple\n:param request_hooks: (optional) Hooks for :py:func:`requests.requests`.\n:type request_hooks: :py:class:`dict`\n:param request_headers: (optional) Headers for\n :py:func:`requests.requests`.\n:type request_headers: :py:class:`dict`", "id": "f317:c1:m0"} {"signature": "def load(self):", "body": "con = sqlite3.connect(self.tmp_cookie_file)cur = con.cursor()try:cur.execute(''''.format(self.domain_name))except sqlite3.OperationalError:cur.execute(''''.format(self.domain_name))cj = http.cookiejar.CookieJar()for item in cur.fetchall():host, path, secure, expires, name = item[:]value = self._decrypt(item[], item[])c = create_cookie(host, path, secure, expires, name, value)cj.set_cookie(c)con.close()return cj", "docstring": "Load sqlite cookies into a cookiejar", "id": "f320:c1:m3"} {"signature": "def load(domain_name=\"\"):", "body": "cj = http.cookiejar.CookieJar()for cookie_fn in [chrome, firefox]:try:for cookie in cookie_fn(domain_name=domain_name):cj.set_cookie(cookie)except BrowserCookieError:passreturn cj", "docstring": "Try to load cookies from all supported browsers and return combined cookiejar\n Optionally pass in a domain name to only load cookies from the specified domain", "id": "f320:m6"} {"signature": "def chrome(cookie_file=None, domain_name=\"\"):", "body": "return Chrome(cookie_file, domain_name).load()", "docstring": "Returns a cookiejar of the cookies used by Chrome. Optionally pass in a\n domain name to only load cookies from the specified domain", "id": "f320:m4"} {"signature": "def create_cookie(host, path, secure, expires, name, value):", "body": "return http.cookiejar.Cookie(, name, value, None, False, host, host.startswith(''), host.startswith(''), path,True, secure, expires, False, None, None, {})", "docstring": "Shortcut function to create a cookie", "id": "f320:m3"} {"signature": "def _decrypt(self, value, encrypted_value):", "body": "if sys.platform == '':return self._decrypt_windows_chrome(value, encrypted_value)if value or (encrypted_value[:] != b''):return valueencrypted_value = encrypted_value[:]encrypted_value_half_len = int(len(encrypted_value) / )cipher = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(self.key, self.iv))decrypted = cipher.feed(encrypted_value[:encrypted_value_half_len])decrypted += cipher.feed(encrypted_value[encrypted_value_half_len:])decrypted += cipher.feed()return decrypted.decode(\"\")", "docstring": "Decrypt encoded cookies", "id": "f320:c1:m5"} {"signature": "@staticmethoddef from_dict(name, values):", "body": "count = count_value = values.get('', )if isinstance(count_value, int):count = max(count_value, )def with_index(name, idx):if name and idx:return '' % (name, idx)return namedef get_instance(n, idx=None):return BlockadeContainerConfig(with_index(n, idx),values[''],command=values.get(''),links=values.get(''),volumes=values.get(''),publish_ports=values.get(''),expose_ports=values.get(''),environment=values.get(''),hostname=values.get(''),dns=values.get(''),start_delay=values.get('', ),neutral=values.get('', False),holy=values.get('', False),container_name=with_index(values.get(''), idx),cap_add=values.get(''))if count == :yield get_instance(name)else:for idx in range(, count+):yield get_instance(name, idx)", "docstring": "Convert a dictionary of configuration values\ninto a sequence of BlockadeContainerConfig instances", "id": "f325:c0:m0"} {"signature": "def dependency_sorted(containers):", "body": "if not isinstance(containers, collections.Mapping):containers = dict((c.name, c) for c in containers)container_links = dict((name, set(c.links.keys()))for name, c in containers.items())sorted_names = _resolve(container_links)return [containers[name] for name in sorted_names]", "docstring": "Sort a dictionary or list of containers into dependency order\n\n Returns a sequence", "id": "f325:m2"} {"signature": "def wait_for_children():", "body": "wait(lambda: len(multiprocessing.active_children()) == )", "docstring": "Wait for child processes to exit\n\n The testing system launches and terminates child processes, but\n doesn't wait for them to actually die. So in a few places we need\n this extra call", "id": "f338:m0"} {"signature": "def cmd_restart(opts):", "body": "__with_containers(opts, Blockade.restart)", "docstring": "Restart some or all containers", "id": "f342:m14"} {"signature": "def cmd_status(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)containers = b.status()print_containers(containers, opts.json)", "docstring": "Print status of containers and networks", "id": "f342:m9"} {"signature": "def cmd_join(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)b.join()", "docstring": "Restore full networking between containers", "id": "f342:m21"} {"signature": "def cmd_flaky(opts):", "body": "__with_containers(opts, Blockade.flaky)", "docstring": "Make the network flaky for some or all containers", "id": "f342:m15"} {"signature": "def cmd_logs(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)puts(b.logs(opts.container).decode(encoding=''))", "docstring": "Fetch the logs of a container", "id": "f342:m22"} {"signature": "def cmd_add(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)b.add_container(opts.containers)", "docstring": "Add one or more existing Docker containers to a Blockade group", "id": "f342:m24"} {"signature": "def cmd_partition(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)if opts.random:if opts.partitions:raise BlockadeError(\"\"\"\")b.random_partition()else:partitions = []for partition in opts.partitions:names = []for name in partition.split(\"\"):name = name.strip()if name:names.append(name)partitions.append(names)if not partitions:raise BlockadeError(\"\"\"\")b.partition(partitions)", "docstring": "Partition the network between containers\n\n Replaces any existing partitions outright. Any containers NOT specified\n in arguments will be globbed into a single implicit partition. For\n example if you have three containers: c1, c2, and c3 and you run:\n\n blockade partition c1\n\n The result will be a partition with just c1 and another partition with\n c2 and c3.\n\n Alternatively, --random may be specified, and zero or more random\n partitions will be generated by blockade.", "id": "f342:m20"} {"signature": "def cmd_up(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)containers = b.create(verbose=opts.verbose, force=opts.force)print_containers(containers, opts.json)", "docstring": "Start the containers and link them together", "id": "f342:m7"} {"signature": "def cmd_version(opts):", "body": "import blockade.versionputs(\"\" + blockade.version.__version__)", "docstring": "Show the Blockade version information", "id": "f342:m25"} {"signature": "def cmd_daemon(opts):", "body": "if opts.data_dir is None:raise BlockadeError(\"\")rest.start(data_dir=opts.data_dir, port=opts.port, debug=opts.debug,host_exec=get_host_exec())", "docstring": "Start the Blockade REST API", "id": "f342:m23"} {"signature": "def cmd_kill(opts):", "body": "kill_signal = opts.signal if hasattr(opts, '') else \"\"__with_containers(opts, Blockade.kill, signal=kill_signal)", "docstring": "Kill some or all containers", "id": "f342:m12"} {"signature": "def cmd_destroy(opts):", "body": "config = load_config(opts.config)b = get_blockade(config, opts)b.destroy()", "docstring": "Destroy all containers and restore networks", "id": "f342:m8"} {"signature": "def create_chain(self, chain):", "body": "if not chain:raise ValueError(\"\")self.call(\"\", chain)", "docstring": "Create a new chain", "id": "f343:c2:m9"} {"signature": "def _sm_start(self, *args, **kwargs):", "body": "millisec = random.randint(self._start_min_delay, self._start_max_delay)self._timer = threading.Timer(millisec / , self.event_timeout)self._timer.start()", "docstring": "Start the timer waiting for pain", "id": "f345:c2:m11"} {"signature": "def _sm_relieve_pain(self, *args, **kwargs):", "body": "_logger.info(\"\" % self._blockade_name)self._do_reset_all()millisec = random.randint(self._start_min_delay, self._start_max_delay)self._timer = threading.Timer(millisec/, self.event_timeout)self._timer.start()", "docstring": "End the blockade event and return to a steady state", "id": "f345:c2:m14"} {"signature": "def _sm_cleanup(self, *args, **kwargs):", "body": "if self._done_notification_func is not None:self._done_notification_func()self._timer.cancel()", "docstring": "Delete all state associated with the chaos session", "id": "f345:c2:m16"} {"signature": "def _sm_stop_from_pain(self, *args, **kwargs):", "body": "_logger.info(\"\" % self._blockade_name)self._do_reset_all()", "docstring": "Stop chaos while there is a blockade event in progress", "id": "f345:c2:m15"} {"signature": "def _sm_to_pain(self, *args, **kwargs):", "body": "_logger.info(\"\" % self._blockade_name)self._do_blockade_event()millisec = random.randint(self._run_min_time, self._run_max_time)self._timer = threading.Timer(millisec / , self.event_timeout)self._timer.start()", "docstring": "Start the blockade event", "id": "f345:c2:m12"} {"signature": "def _assure_dir(self):", "body": "try:os.makedirs(self._state_dir)except OSError as err:if err.errno != errno.EEXIST:raise", "docstring": "Make sure the state directory exists", "id": "f351:c0:m11"} {"signature": "def initialize(self, containers):", "body": "self._containers = deepcopy(containers)self.__write(containers, initialize=True)", "docstring": "Initialize a new state file with the given contents.\nThis function fails in case the state file already exists.", "id": "f351:c0:m5"} {"signature": "def container_id(self, name):", "body": "container = self._containers.get(name, None)if not container is None:return container.get('', None)return None", "docstring": "Try to find the container ID with the specified name", "id": "f351:c0:m4"} {"signature": "@propertydef blockade_net_name(self):", "body": "return \"\" % self._blockade_id", "docstring": "Generate blockade nework name based on the blockade_id", "id": "f351:c0:m2"} {"signature": "def load(self):", "body": "try:with open(self._state_file) as f:state = yaml.safe_load(f)self._containers = state['']except (IOError, OSError) as err:if err.errno == errno.ENOENT:raise NotInitializedError(\"\")raise InconsistentStateError(\"\"+ str(err))except Exception as err:raise InconsistentStateError(\"\"+ str(err))", "docstring": "Try to load a blockade state file in the current directory", "id": "f351:c0:m8"} {"signature": "def translate(self, frame=):", "body": "return Fasta(self.id, ''.join([genetic_codes.codes[genetic_code].get(self.seq[x:x+].upper(), '') for x in range(frame, len(self)--frame, )]))", "docstring": "Returns a Fasta sequence, translated into amino acids. Starts translating from 'frame', where frame expected to be 0,1 or 2", "id": "f364:c1:m30"} {"signature": "def gc_content(self, as_decimal=True):", "body": "gc_total = num_bases = n_tuple = tuple('')accepted_bases = tuple('')for base, count in Counter(self.seq).items():if base not in n_tuple:num_bases += countif base in accepted_bases: gc_total += countgc_content = gc_total / num_basesif not as_decimal: gc_content *= return gc_content", "docstring": "Returns the GC content for the sequence.\n Notes:\n This method ignores N when calculating the length of the sequence.\n It does not, however ignore other ambiguous bases. It also only\n includes the ambiguous base S (G or C). In this sense the method is\n conservative with its calculation.\n\n Args:\n as_decimal (bool): Return the result as a decimal. Setting to False\n will return as a percentage. i.e for the sequence GCAT it will\n return 0.5 by default and 50.00 if set to False.\n\n Returns:\n float: GC content calculated as the number of G, C, and S divided\n by the number of (non-N) bases (length).", "id": "f364:c1:m31"} {"signature": "def expand_nucleotides(self):", "body": "s = list(self.seq)for i in range(len(s)):if s[i] in redundant_nts:s[i] = ''.join(redundant_nts[s[i]])seqs = []for x in itertools.product(*s):seqs.append(Fasta(self.id + '' + str(len(seqs) + ), ''.join(x)))return seqs", "docstring": "Assumes sequence is nucleotides. Returns list of all combinations of redundant nucleotides. e.g. R is A or G, so CRT would have combinations CAT and CGT", "id": "f364:c1:m7"} {"signature": "def subseq(self, start, end):", "body": "return Fastq(self.id, self.seq[start:end], self.qual[start:end])", "docstring": "Returns Fastq object with the same name, of the bases from start to end, but not including end", "id": "f364:c3:m3"} {"signature": "def subseq(self, start, end):", "body": "return Fasta(self.id, self.seq[start:end])", "docstring": "Returns Fasta object with the same name, of the bases from start to end, but not including end", "id": "f364:c1:m5"} {"signature": "def to_Fastq(self, qual_scores):", "body": "if len(self) != len(qual_scores):raise Error('', self.id)return Fastq(self.id, self.seq, ''.join([chr(max(, min(x, )) + ) for x in qual_scores]))", "docstring": "Returns a Fastq object. qual_scores expected to be a list of numbers, like you would get in a .qual file", "id": "f364:c1:m28"} {"signature": "def replace_bases(self, old, new):", "body": "self.seq = self.seq.replace(old, new)", "docstring": "Replaces all occurrences of 'old' with 'new", "id": "f364:c1:m14"} {"signature": "def orfs(self, frame=, revcomp=False):", "body": "assert frame in [,,]if revcomp:self.revcomp()aa_seq = self.translate(frame=frame).seq.rstrip('')if revcomp:self.revcomp()orfs = _orfs_from_aa_seq(aa_seq)for i in range(len(orfs)):if revcomp:start = len(self) - (orfs[i].end * + ) - frameend = len(self) - (orfs[i].start * ) - - frameelse:start = orfs[i].start * + frameend = orfs[i].end * + + frameorfs[i] = intervals.Interval(start, end)return orfs", "docstring": "Returns a list of ORFs that the sequence has, starting on the given\n frame. Each returned ORF is an interval.Interval object.\n If revomp=True, then finds the ORFs of the reverse complement\n of the sequence.", "id": "f364:c1:m19"} {"signature": "def replace_interval(self, start, end, new, qual_string):", "body": "if len(new) != len(qual_string):raise Error('')super().replace_interval(start, end, new)self.qual = self.qual[:start] + qual_string + self.qual[end + :]", "docstring": "Replaces the sequence from start to end with the sequence \"new", "id": "f364:c3:m10"} {"signature": "def strip_illumina_suffix(self):", "body": "if self.id.endswith('') or self.id.endswith(''):self.id = self.id[:-]", "docstring": "Removes any trailing /1 or /2 off the end of the name", "id": "f364:c1:m9"} {"signature": "def strip_after_first_whitespace(self):", "body": "self.id = self.id.split()[]", "docstring": "Removes everything in the name after the first whitespace character", "id": "f364:c1:m8"} {"signature": "def revcomp(self):", "body": "self.seq = self.seq.translate(str.maketrans(\"\", \"\"))[::-]", "docstring": "Reverse complements the sequence", "id": "f364:c1:m10"} {"signature": "def gaps(self, min_length = ):", "body": "gaps = []regex = re.compile('', re.IGNORECASE)for m in regex.finditer(self.seq):if m.span()[] - m.span()[] + >= min_length:gaps.append(intervals.Interval(m.span()[], m.span()[] - ))return gaps", "docstring": "Finds the positions of all gaps in the sequence that are at least min_length long. Returns a list of Intervals. Coords are zero-based", "id": "f364:c1:m17"} {"signature": "def replace_non_acgt(self):", "body": "self.seq = re.sub(r'''''', '', self.seq)", "docstring": "Replace all non acgt characters with an N (case insensitive)", "id": "f364:c1:m15"} {"signature": "def trim(self, start, end):", "body": "super().trim(start, end)self.qual = self.qual[start:len(self.qual) - end]", "docstring": "Removes first 'start'/'end' bases off the start/end of the sequence", "id": "f364:c3:m6"} {"signature": "def to_boulderio(infile, outfile):", "body": "seq_reader = sequences.file_reader(infile)f_out = utils.open_file_write(outfile)for sequence in seq_reader:print(\"\" + sequence.id, file=f_out)print(\"\" + sequence.seq, file=f_out)print(\"\", file=f_out)utils.close(f_out)", "docstring": "Converts input sequence file into a \"Boulder-IO format\", as used by primer3", "id": "f405:m38"} {"signature": "def split_by_fixed_size(infile, outfiles_prefix, chunk_size, tolerance, skip_if_all_Ns=False):", "body": "file_count = coords = []small_sequences = [] seq_reader = sequences.file_reader(infile)f_coords = utils.open_file_write(outfiles_prefix + '')for seq in seq_reader:if skip_if_all_Ns and seq.is_all_Ns():continueif len(seq) < chunk_size:small_sequences.append(copy.copy(seq))elif len(seq) <= chunk_size + tolerance:f = utils.open_file_write(outfiles_prefix + '' + str(file_count))print(seq, file=f)utils.close(f)file_count += else:chunks = [(x,x+chunk_size) for x in range(, len(seq), chunk_size)]if chunks[-][] - > len(seq):chunks[-] = (chunks[-][], len(seq))if len(chunks) > and (chunks[-][] - chunks[-][]) <= tolerance:chunks[-] = (chunks[-][], chunks[-][])chunks.pop()offset = for chunk in chunks:if not(skip_if_all_Ns and seq.is_all_Ns(start=chunk[], end=chunk[]-)):f = utils.open_file_write(outfiles_prefix + '' + str(file_count))chunk_id = seq.id + '' + str(chunk[]+) + '' + str(chunk[])print(sequences.Fasta(chunk_id, seq[chunk[]:chunk[]]), file=f)print(chunk_id, seq.id, offset, sep='', file=f_coords)utils.close(f)file_count += offset += chunk[] - chunk[]if len(small_sequences):f = utils.open_file_write(outfiles_prefix + '' + str(file_count))file_count += base_count = for seq in small_sequences:if base_count > and base_count + len(seq) > chunk_size + tolerance:utils.close(f)f = utils.open_file_write(outfiles_prefix + '' + str(file_count))file_count += base_count = print(seq, file=f)base_count += len(seq)utils.close(f)", "docstring": "Splits fasta/q file into separate files, with up to (chunk_size + tolerance) bases in each file", "id": "f405:m33"} {"signature": "def scaffolds_to_contigs(infile, outfile, number_contigs=False):", "body": "seq_reader = sequences.file_reader(infile)fout = utils.open_file_write(outfile)for seq in seq_reader:contigs = seq.contig_coords()counter = for contig in contigs:if number_contigs:name = seq.id + '' + str(counter)counter += else:name = ''.join([seq.id, str(contig.start + ), str(contig.end + )])print(sequences.Fasta(name, seq[contig.start:contig.end+]), file=fout)utils.close(fout)", "docstring": "Makes a file of contigs from scaffolds by splitting at every N.\n Use number_contigs=True to add .1, .2, etc onto end of each\n contig, instead of default to append coordinates.", "id": "f405:m21"} {"signature": "def split_by_base_count(infile, outfiles_prefix, max_bases, max_seqs=None):", "body": "seq_reader = sequences.file_reader(infile)base_count = file_count = seq_count = fout = Noneif max_seqs is None:max_seqs = float('')for seq in seq_reader:if base_count == :fout = utils.open_file_write(outfiles_prefix + '' + str(file_count))file_count += if base_count + len(seq) > max_bases or seq_count >= max_seqs:if base_count == :print(seq, file=fout)utils.close(fout)else:utils.close(fout)fout = utils.open_file_write(outfiles_prefix + '' + str(file_count))print(seq, file=fout)base_count = len(seq)file_count += seq_count = else:base_count += len(seq)seq_count += print(seq, file=fout)utils.close(fout)", "docstring": "Splits a fasta/q file into separate files, file size determined by number of bases.\n\n Puts <= max_bases in each split file The exception is a single sequence >=max_bases\n is put in its own file. This does not split sequences.", "id": "f405:m32"} {"signature": "def acgtn_only(infile, outfile):", "body": "f = utils.open_file_write(outfile)for seq in sequences.file_reader(infile):seq.replace_non_acgt()print(seq, file=f)utils.close(f)", "docstring": "Replace every non-acgtn (case insensitve) character with an N", "id": "f405:m0"} {"signature": "def interleave(infile_1, infile_2, outfile, suffix1=None, suffix2=None):", "body": "seq_reader_1 = sequences.file_reader(infile_1)seq_reader_2 = sequences.file_reader(infile_2)f_out = utils.open_file_write(outfile)for seq_1 in seq_reader_1:try:seq_2 = next(seq_reader_2)except:utils.close(f_out)raise Error('', seq_1.id, '')if suffix1 is not None and not seq_1.id.endswith(suffix1):seq_1.id += suffix1if suffix2 is not None and not seq_2.id.endswith(suffix2):seq_2.id += suffix2print(seq_1, file=f_out)print(seq_2, file=f_out)try:seq_2 = next(seq_reader_2)except:seq_2 = Noneif seq_2 is not None:utils.close(f_out)raise Error('', seq_2.id, '')utils.close(f_out)", "docstring": "Makes interleaved file from two sequence files. If used, will append suffix1 onto end\n of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2.", "id": "f405:m16"} {"signature": "def mean_length(infile, limit=None):", "body": "total = count = seq_reader = sequences.file_reader(infile)for seq in seq_reader:total += len(seq)count += if limit is not None and count >= limit:breakassert count > return total / count", "docstring": "Returns the mean length of the sequences in the input file. By default uses all sequences. To limit to the first N sequences, use limit=N", "id": "f405:m18"} {"signature": "def sort_by_size(infile, outfile, smallest_first=False):", "body": "seqs = {}file_to_dict(infile, seqs)seqs = list(seqs.values())seqs.sort(key=lambda x: len(x), reverse=not smallest_first)fout = utils.open_file_write(outfile)for seq in seqs:print(seq, file=fout)utils.close(fout)", "docstring": "Sorts input sequence file by biggest sequence first, writes sorted output file. Set smallest_first=True to have smallest first", "id": "f405:m24"} {"signature": "def count_sequences(infile):", "body": "seq_reader = sequences.file_reader(infile)n = for seq in seq_reader:n += return n", "docstring": "Returns the number of sequences in a file", "id": "f405:m3"} {"signature": "def to_fastg(infile, outfile, circular=None):", "body": "if circular is None:to_circularise = set()elif type(circular) is not set:f = utils.open_file_read(circular)to_circularise = set([x.rstrip() for x in f.readlines()])utils.close(f)else:to_circularise = circularseq_reader = sequences.file_reader(infile)fout = utils.open_file_write(outfile)nodes = for seq in seq_reader:new_id = ''.join(['', str(nodes),'', str(len(seq)),'', '','', seq.id])if seq.id in to_circularise:seq.id = new_id + '' + new_id + ''print(seq, file=fout)seq.revcomp()seq.id = new_id + \"\" + new_id + \"\"print(seq, file=fout)else:seq.id = new_id + ''print(seq, file=fout)seq.revcomp()seq.id = new_id + \"\"print(seq, file=fout)nodes += utils.close(fout)", "docstring": "Writes a FASTG file in SPAdes format from input file. Currently only whether or not a sequence is circular is supported. Put circular=set of ids, or circular=filename to make those sequences circular in the output. Puts coverage=1 on all contigs", "id": "f405:m26"} {"signature": "def split_by_fixed_size_onefile(infile, outfile, chunk_size, tolerance, skip_if_all_Ns=False):", "body": "seq_reader = sequences.file_reader(infile)f_out = utils.open_file_write(outfile)for seq in seq_reader:for i in range(, len(seq), chunk_size):if i + chunk_size + tolerance >= len(seq):end = len(seq)else:end = i + chunk_sizesubseq = seq.subseq(i, end)if not (skip_if_all_Ns and subseq.is_all_Ns()):subseq.id += '' + str(i+) + '' + str(end)print(subseq, file=f_out)if end == len(seq):breakutils.close(f_out)", "docstring": "Splits each sequence in infile into chunks of fixed size, last chunk can be up to\n (chunk_size + tolerance) in length", "id": "f405:m34"} {"signature": "def caf_to_fastq(infile, outfile, min_length=, trim=False):", "body": "caf_reader = caf.file_reader(infile)fout = utils.open_file_write(outfile)for c in caf_reader:if trim:if c.clip_start is not None and c.clip_end is not None:c.seq.seq = c.seq.seq[c.clip_start:c.clip_end + ]c.seq.qual = c.seq.qual[c.clip_start:c.clip_end + ]else:print('', c.id, file=sys.stderr)if len(c.seq) >= min_length:print(c.seq, file=fout)utils.close(fout)", "docstring": "Convert a CAF file to fastq. Reads shorter than min_length are not output. If clipping information is in the CAF file (with a line Clipping QUAL ...) and trim=True, then trim the reads", "id": "f405:m1"} {"signature": "def intersection(self, i):", "body": "if self.intersects(i):return Interval(max(self.start, i.start), min(self.end, i.end))else:return None", "docstring": "If intervals intersect, returns their intersection, otherwise returns None", "id": "f408:c1:m12"} {"signature": "def union_fill_gap(self, i):", "body": "return Interval(min(self.start, i.start), max(self.end, i.end))", "docstring": "Like union, but ignores whether the two intervals intersect or not", "id": "f408:c1:m11"} {"signature": "def remove_contained_in_list(l):", "body": "i = l.sort()while i < len(l) - :if l[i+].contains(l[i]):l.pop(i)elif l[i].contains(l[i+]):l.pop(i+)else:i += ", "docstring": "Sorts list in place, then removes any intervals that are completely\n contained inside another interval", "id": "f408:m2"} {"signature": "def intersection(l1, l2):", "body": "if len(l1) == or len(l2) == :return []out = []l2_pos = for l in l1:while l2_pos < len(l2) and l2[l2_pos].end < l.start:l2_pos += if l2_pos == len(l2):breakwhile l2_pos < len(l2) and l.intersects(l2[l2_pos]):out.append(l.intersection(l2[l2_pos]))l2_pos += l2_pos = max(, l2_pos - )return out", "docstring": "Returns intersection of two lists. Assumes the lists are sorted by start positions", "id": "f408:m0"} {"signature": "def length_sum_from_list(l):", "body": "return sum([len(x) for x in l])", "docstring": "Returns total length of intervals from a list", "id": "f408:m3"} {"signature": "def contains(self, i):", "body": "return self.start <= i.start and i.end <= self.end", "docstring": "Returns true iff this interval contains the interval i", "id": "f408:c1:m9"} {"signature": "def intersects(self, i):", "body": "return self.start <= i.end and i.start <= self.end", "docstring": "Returns true iff this interval intersects the interval i", "id": "f408:c1:m8"} {"signature": "def union(self, i):", "body": "if self.intersects(i) or self.end + == i.start or i.end + == self.start:return Interval(min(self.start, i.start), max(self.end, i.end))else:return None", "docstring": "If intervals intersect, returns their union, otherwise returns None", "id": "f408:c1:m10"} {"signature": "@classmethoddef to_value(cls, instance):", "body": "if not isinstance(instance, OctaveUserClass) or not instance._attrs:return dict()dtype = []values = []for attr in instance._attrs:dtype.append((str(attr), object))values.append(getattr(instance, attr))struct = np.array([tuple(values)], dtype)return MatlabObject(struct, instance._name)", "docstring": "Convert to a value to send to Octave.", "id": "f410:c7:m2"} {"signature": "@classmethoddef from_value(cls, value):", "body": "instance = OctaveUserClass.__new__(cls)instance._address = '' % (instance._name, id(instance))instance._ref().push(instance._address, value)return instance", "docstring": "This is how an instance is created when we read a\n MatlabObject from a MAT file.", "id": "f410:c7:m1"} {"signature": "def _make_user_class(session, name):", "body": "attrs = session.eval('' % name, nout=).ravel().tolist()methods = session.eval('' % name, nout=).ravel().tolist()ref = weakref.ref(session)doc = _DocDescriptor(ref, name)values = dict(__doc__=doc, _name=name, _ref=ref, _attrs=attrs,__module__='')for method in methods:doc = _MethodDocDescriptor(ref, name, method)cls_name = '' % (name, method)method_values = dict(__doc__=doc)method_cls = type(str(cls_name),(OctaveUserClassMethod,), method_values)values[method] = method_cls(ref, method, name)for attr in attrs:values[attr] = OctaveUserClassAttr(ref, attr, attr)return type(str(name), (OctaveUserClass,), values)", "docstring": "Make an Octave class for a given class name", "id": "f410:m0"} {"signature": "@classmethoddef to_pointer(cls, instance):", "body": "return OctavePtr(instance._ref, instance._name, instance._address)", "docstring": "Get a pointer to the private object.", "id": "f410:c7:m3"} {"signature": "def demo(delay=, interactive=True):", "body": "script = \"\"\"\"\"\"if not PY2:script = script.replace('', '')for line in script.strip().split(''):line = line.strip()if not '' in line:time.sleep(delay)print(\"\".format(line))time.sleep(delay)if not interactive:if '' in line or '' in line or '' in line:line = ''exec(line)", "docstring": "Play a demo script showing most of the oct2py api features.\n\nParameters\n==========\ndelay : float\n Time between each command in seconds.", "id": "f411:m0"} {"signature": "def helper(self, base, keys, types):", "body": "for key, type_ in zip(keys, types):if not type(base[key]) == type_:try:assert type_(base[key]) == base[key], keyexcept ValueError:assert np.allclose(type_(base[key]), base[key])", "docstring": "Perform type checking of the values\n\nParameters\n==========\nbase : dict\n Sub-dictionary we are accessing.\nkeys : array-like\n List of keys to test in base.\ntypes : array-like\n List of expected return types for the keys.", "id": "f412:c0:m2"} {"signature": "def __init__(self, shell):", "body": "super(OctaveMagics, self).__init__(shell)self._oct = oct2py.octaveself._display = display", "docstring": "Parameters\n----------\nshell : IPython shell", "id": "f418:c0:m0"} {"signature": "def run(self):", "body": "octave = Oct2Py()octave.push('', self.getName())name = octave.pull('')now = datetime.datetime.now()print(\"\".format(self.getName(), name, now))octave.exit()try:assert self.getName() == nameexcept AssertionError: raise Oct2PyError('')return", "docstring": "Create a unique instance of Octave and verify namespace uniqueness.\n\nRaises\n======\nOct2PyError\n If the thread does not sucessfully demonstrate independence", "id": "f422:c0:m0"} {"signature": "def _isobject(self, name, exist):", "body": "if exist in [, ]:return Falsecmd = '' % nameresp = self._engine.eval(cmd, silent=True).strip()return resp == ''", "docstring": "Test whether the name is an object.", "id": "f423:c0:m18"} {"signature": "def _get_function_ptr(self, name):", "body": "func = _make_function_ptr_instanceself._function_ptrs.setdefault(name, func(self, name))return self._function_ptrs[name]", "docstring": "Get or create a function pointer of the given name.", "id": "f423:c0:m19"} {"signature": "def pull(self, var, timeout=None, verbose=True):", "body": "if isinstance(var, (str, unicode)):var = [var]outputs = []for name in var:exist = self._exist(name)if exist == :outputs.append(self.feval('', '', name,timeout=timeout, verbose=verbose))else:outputs.append(self.get_pointer(name, timeout=timeout))if len(outputs) == :return outputs[]return outputs", "docstring": "Retrieve a value or values from the Octave session.\n\nParameters\n----------\nvar : str or list\n Name of the variable(s) to retrieve.\ntimeout : float, optional.\n Time to wait for response from Octave (per line).\n**kwargs: Deprecated kwargs, ignored.\n\nReturns\n-------\nout : object\n Object returned by Octave.\n\nRaises\n------\nOct2PyError\n If the variable does not exist in the Octave session.\n\nExamples\n--------\n >>> from oct2py import octave\n >>> y = [1, 2]\n >>> octave.push('y', y)\n >>> octave.pull('y')\n array([[ 1., 2.]])\n >>> octave.push(['x', 'y'], ['spam', [1, 2, 3, 4]])\n >>> octave.pull(['x', 'y']) # doctest: +SKIP\n [u'spam', array([[1, 2, 3, 4]])]", "id": "f423:c0:m7"} {"signature": "def get_pointer(self, name, timeout=None):", "body": "exist = self._exist(name)isobject = self._isobject(name, exist)if exist == :raise Oct2PyError('' % name)elif exist == :return _make_variable_ptr_instance(self, name)elif isobject:return self._get_user_class(name)elif exist in [, , ]:return self._get_function_ptr(name)raise Oct2PyError('' % name)", "docstring": "Get a pointer to a named object in the Octave workspace.\n\n Parameters\n ----------\n name: str\n The name of the object in the Octave workspace.\n timemout: float, optional.\n Time to wait for response from Octave (per line).\n\n Examples\n --------\n >>> from oct2py import octave\n >>> octave.eval('foo = [1, 2];')\n >>> ptr = octave.get_pointer('foo')\n >>> ptr.value\n array([[ 1., 2.]])\n >>> ptr.address\n 'foo'\n >>> # Can be passed as an argument\n >>> octave.disp(ptr) # doctest: +SKIP\n 1 2\n\n >>> from oct2py import octave\n >>> sin = octave.get_pointer('sin') # equivalent to `octave.sin`\n >>> sin.address\n '@sin'\n >>> x = octave.quad(sin, 0, octave.pi())\n >>> x\n 2.0\n\n Notes\n -----\n Pointers can be passed to `feval` or dynamic functions as function arguments. A pointer passed as a nested value will be passed by value instead.\n\n Raises\n ------\n Oct2PyError\n If the variable does not exist in the Octave session or is of\n unknown type.\n\n Returns\n -------\n A variable, object, user class, or function pointer as appropriate.", "id": "f423:c0:m8"} {"signature": "def _parse_error(self, err):", "body": "self.logger.debug(err)stack = err.get('', [])if not err[''].startswith(''):err[''] = '' + err['']errmsg = '' % err['']if not isinstance(stack, StructArray):return errmsgerrmsg += ''for item in stack[:-]:errmsg += '' % itemtry:errmsg += '' % itemexcept Exception:passreturn errmsg", "docstring": "Create a traceback for an Octave evaluation error.", "id": "f423:c0:m14"} {"signature": "def _feval(self, func_name, func_args=(), dname='', nout=,timeout=None, stream_handler=None, store_as='', plot_dir=None):", "body": "engine = self._engineif engine is None:raise Oct2PyError('')out_file = osp.join(self.temp_dir, '')out_file = out_file.replace(osp.sep, '')in_file = osp.join(self.temp_dir, '')in_file = in_file.replace(osp.sep, '')func_args = list(func_args)ref_indices = []for (i, value) in enumerate(func_args):if isinstance(value, OctavePtr):ref_indices.append(i + )func_args[i] = value.addressref_indices = np.array(ref_indices)req = dict(func_name=func_name, func_args=tuple(func_args),dname=dname or '', nout=nout,store_as=store_as or '',ref_indices=ref_indices)write_file(req, out_file, oned_as=self._oned_as,convert_to_float=self.convert_to_float)engine.stream_handler = stream_handler or self.logger.infoif timeout is None:timeout = self.timeouttry:engine.eval('' % (out_file, in_file),timeout=timeout)except KeyboardInterrupt as e:stream_handler(engine.repl.interrupt())raiseexcept TIMEOUT:stream_handler(engine.repl.interrupt())raise Oct2PyError('')except EOF:stream_handler(engine.repl.child.before)self.restart()raise Oct2PyError('')resp = read_file(in_file, self)if resp['']:msg = self._parse_error(resp[''])raise Oct2PyError(msg)result = resp[''].ravel().tolist()if isinstance(result, list) and len(result) == :result = result[]if (isinstance(result, Cell) andresult.size == andisinstance(result[], string_types) andresult[] == ''):result = Noneif plot_dir:self._engine.make_figures(plot_dir)return result", "docstring": "Run the given function with the given args.", "id": "f423:c0:m13"} {"signature": "def _get_user_class(self, name):", "body": "self._user_classes.setdefault(name, _make_user_class(self, name))return self._user_classes[name]", "docstring": "Get or create a user class of the given type.", "id": "f423:c0:m20"} {"signature": "def eval(self, cmds, verbose=True, timeout=None, stream_handler=None,temp_dir=None, plot_dir=None, plot_name='', plot_format='',plot_width=None, plot_height=None, plot_res=None,nout=, **kwargs):", "body": "if isinstance(cmds, (str, unicode)):cmds = [cmds]prev_temp_dir = self.temp_dirself.temp_dir = temp_dir or self.temp_dirprev_log_level = self.logger.levelif kwargs.get('') is False:self.logger.setLevel(logging.WARN)for name in ['', '']:if name not in kwargs:continuemsg = ''warnings.warn(msg % name, stacklevel=)return_both = kwargs.pop('', False)lines = []if return_both and not stream_handler:stream_handler = lines.appendans = Nonefor cmd in cmds:resp = self.feval('', '', cmd,nout=nout, timeout=timeout,stream_handler=stream_handler,verbose=verbose, plot_dir=plot_dir,plot_name=plot_name, plot_format=plot_format,plot_width=plot_width, plot_height=plot_height,plot_res=plot_res)if resp is not None:ans = respself.temp_dir = prev_temp_dirself.logger.setLevel(prev_log_level)if return_both:return ''.join(lines), ansreturn ans", "docstring": "Evaluate an Octave command or commands.\n\nParameters\n----------\ncmds : str or list\n Commands(s) to pass to Octave.\nverbose : bool, optional\n Log Octave output at INFO level. If False, log at DEBUG level.\nstream_handler: callable, optional\n A function that is called for each line of output from the\n evaluation.\ntimeout : float, optional\n Time to wait for response from Octave (per line). If not given,\n the instance `timeout` is used.\nnout : int, optional.\n The desired number of returned values, defaults to 0. If nout\n is 0, the `ans` will be returned as the return value.\ntemp_dir: str, optional\n If specified, the session's MAT files will be created in the\n directory, otherwise a the instance `temp_dir` is used.\n a shared memory (tmpfs) path.\nplot_dir: str, optional\n If specificed, save the session's plot figures to the plot\n directory instead of displaying the plot window.\nplot_name : str, optional\n Saved plots will start with `plot_name` and\n end with \"_%%.xxx' where %% is the plot number and\n xxx is the `plot_format`.\nplot_format: str, optional\n The format in which to save the plot (PNG by default).\nplot_width: int, optional\n The plot with in pixels.\nplot_height: int, optional\n The plot height in pixels.\nplot_res: int, optional\n The plot resolution in pixels per inch.\n**kwargs Deprectated kwargs.\n\nExamples\n--------\n>>> from oct2py import octave\n>>> octave.eval('disp(\"hello\")') # doctest: +SKIP\nhello\n>>> x = octave.eval('round(quad(@sin, 0, pi/2));')\n>>> x\n1.0\n\n>>> a = octave.eval('disp(\"hello\");1;') # doctest: +SKIP\nhello\n>>> a = octave.eval('disp(\"hello\");1;', verbose=False)\n>>> a\n1.0\n\n>>> from oct2py import octave\n>>> lines = []\n>>> octave.eval('for i = 1:3; disp(i);end', \\\n stream_handler=lines.append)\n>>> lines # doctest: +SKIP\n[' 1', ' 2', ' 3']\n\nReturns\n-------\nout : object\n Octave \"ans\" variable, or None.\n\nNotes\n-----\nThe deprecated `log` kwarg will temporarily set the `logger` level to\n`WARN`. Using the `logger` settings directly is preferred.\nThe deprecated `return_both` kwarg will still work, but the preferred\nmethod is to use the `stream_handler`. If `stream_handler` is given,\nthe `return_both` kwarg will be honored but will give an empty string\nas the reponse.\n\nRaises\n------\nOct2PyError\n If the command(s) fail.", "id": "f423:c0:m11"} {"signature": "def feval(self, func_path, *func_args, **kwargs):", "body": "if not self._engine:raise Oct2PyError('')nout = kwargs.get('', None)if nout is None:nout = plot_dir = kwargs.get('')settings = dict(backend='' if plot_dir else self.backend,format=kwargs.get(''),name=kwargs.get(''),width=kwargs.get(''),height=kwargs.get(''),resolution=kwargs.get(''))self._engine.plot_settings = settingsdname = osp.dirname(func_path)fname = osp.basename(func_path)func_name, ext = osp.splitext(fname)if ext and not ext == '':raise TypeError('')if func_name == '':raise Oct2PyError('' +'')stream_handler = kwargs.get('')verbose = kwargs.get('', True)store_as = kwargs.get('', '')timeout = kwargs.get('', self.timeout)if not stream_handler:stream_handler = self.logger.info if verbose else self.logger.debugreturn self._feval(func_name, func_args, dname=dname, nout=nout,timeout=timeout, stream_handler=stream_handler,store_as=store_as, plot_dir=plot_dir)", "docstring": "Run a function in Octave and return the result.\n\n Parameters\n ----------\n func_path: str\n Name of function to run or a path to an m-file.\n func_args: object, optional\n Args to send to the function.\n nout: int, optional\n Desired number of return arguments, defaults to 1.\n store_as: str, optional\n If given, saves the result to the given Octave variable name\n instead of returning it.\n verbose : bool, optional\n Log Octave output at INFO level. If False, log at DEBUG level.\n stream_handler: callable, optional\n A function that is called for each line of output from the\n evaluation.\n timeout: float, optional\n The timeout in seconds for the call.\n plot_dir: str, optional\n If specificed, save the session's plot figures to the plot\n directory instead of displaying the plot window.\n plot_name : str, optional\n Saved plots will start with `plot_name` and\n end with \"_%%.xxx' where %% is the plot number and\n xxx is the `plot_format`.\n plot_format: str, optional\n The format in which to save the plot.\n plot_width: int, optional\n The plot with in pixels.\n plot_height: int, optional\n The plot height in pixels.\n\n Notes\n -----\n The function arguments passed follow Octave calling convention, not\n Python. That is, all values must be passed as a comma separated list,\n not using `x=foo` assignment.\n\n Examples\n --------\n >>> from oct2py import octave\n >>> cell = octave.feval('cell', 10, 10, 10)\n >>> cell.shape\n (10, 10, 10)\n\n >>> from oct2py import octave\n >>> x = octave.feval('linspace', 0, octave.pi() / 2)\n >>> x.shape\n (1, 100)\n\n >>> from oct2py import octave\n >>> x = octave.feval('svd', octave.hilb(3))\n >>> x\n array([[ 1.40831893],\n [ 0.12232707],\n [ 0.00268734]])\n >>> # specify three return values\n >>> (u, v, d) = octave.feval('svd', octave.hilb(3), nout=3)\n >>> u.shape\n (3, 3)\n\n Returns\n -------\n The Python value(s) returned by the Octave function call.", "id": "f423:c0:m10"} {"signature": "def _exist(self, name):", "body": "cmd = '' % nameresp = self._engine.eval(cmd, silent=True).strip()exist = int(resp.split()[-])if exist == :msg = ''raise Oct2PyError(msg % name)return exist", "docstring": "Test whether a name exists and return the name code.\n\n Raises an error when the name does not exist.", "id": "f423:c0:m17"} {"signature": "def get_log(name=None):", "body": "if name is None:name = ''else:name = '' + namelog = logging.getLogger(name)log.setLevel(logging.INFO)return log", "docstring": "Return a console logger.\n\n Output may be sent to the logger using the `debug`, `info`, `warning`,\n `error` and `critical` methods.\n\n Parameters\n ----------\n name : str\n Name of the log.\n\n References\n ----------\n .. [1] Logging facility for Python,\n http://docs.python.org/library/logging.html", "id": "f424:m0"} {"signature": "def large_array_put(self):", "body": "self.octave.push('', self.array)", "docstring": "Create a large matrix and load it into the octave session.", "id": "f425:c0:m2"} {"signature": "def raw_speed(self):", "body": "self.octave.eval(\"\")", "docstring": "Run a fast Octave command and see how long it takes.", "id": "f425:c0:m1"} {"signature": "def __new__(cls, value, session=None):", "body": "value = np.asarray(value)if (value.shape[value.ndim - ] == ):value = value.squeeze(axis=value.ndim - )value = np.atleast_1d(value)if not session:return value.view(cls)obj = np.empty(value.size, dtype=value.dtype).view(cls)for (i, item) in enumerate(value.ravel()):for name in value.dtype.names:obj[i][name] = _extract(item[name], session)return obj.reshape(value.shape)", "docstring": "Create a struct array from a value and optional Octave session.", "id": "f426:c1:m0"} {"signature": "def __getitem__(self, item):", "body": "item = np.recarray.__getitem__(self, item)if isinstance(item, np.ndarray) and item.dtype.kind == '':return Cell(item)return item", "docstring": "Return object arrays as cells and all other values unchanged.", "id": "f426:c1:m3"} {"signature": "def __new__(cls, value, session=None):", "body": "value = np.asarray(value, dtype=object)if (value.shape[value.ndim - ] == ):value = value.squeeze(axis=value.ndim - )value = np.atleast_1d(value)if not session:return value.view(cls)obj = np.empty(value.size, dtype=object).view(cls)for (i, item) in enumerate(value.ravel()):obj[i] = _extract(item, session)return obj.reshape(value.shape)", "docstring": "Create a cell array from a value and optional Octave session.", "id": "f426:c2:m0"} {"signature": "@register.inclusion_tag(\"\", takes_context=True)def stored_messages_list(context, num_elements=):", "body": "if \"\" in context:user = context[\"\"]if user.is_authenticated():qs = Inbox.objects.select_related(\"\").filter(user=user)return {\"\": qs[:num_elements],\"\": qs.count(),}", "docstring": "Renders a list of unread stored messages for the current user", "id": "f444:m0"} {"signature": "@login_required@api_view([''])def mark_all_read(request):", "body": "from .settings import stored_messages_settingsbackend = stored_messages_settings.STORAGE_BACKEND()backend.inbox_purge(request.user)return Response({\"\": \"\"})", "docstring": "Mark all messages as read (i.e. delete from inbox) for current logged in user", "id": "f447:m0"} {"signature": "def _prepare_messages(self, messages):", "body": "for message in messages:if not self.backend.can_handle(message):message._prepare()", "docstring": "Like the base class method, prepares a list of messages for storage\nbut avoid to do this for `models.Message` instances.", "id": "f450:c0:m4"} {"signature": "def _store(self, messages, response, *args, **kwargs):", "body": "contrib_messages = []if self.user.is_authenticated():if not messages:self.backend.inbox_purge(self.user)else:for m in messages:try:self.backend.inbox_store([self.user], m)except MessageTypeNotSupported:contrib_messages.append(m)super(StorageMixin, self)._store(contrib_messages, response, *args, **kwargs)", "docstring": "persistent messages are already in the database inside the 'archive',\nso we can say they're already \"stored\".\nHere we put them in the inbox, or remove from the inbox in case the\nmessages were iterated.\n\nmessages contains only new msgs if self.used==True\nelse contains both new and unread messages", "id": "f450:c0:m3"} {"signature": "def _get(self, *args, **kwargs):", "body": "messages, all_retrieved = super(StorageMixin, self)._get(*args, **kwargs)if self.user.is_authenticated():inbox_messages = self.backend.inbox_list(self.user)else:inbox_messages = []return messages + inbox_messages, all_retrieved", "docstring": "Retrieve unread messages for current user, both from the inbox and\nfrom other storages", "id": "f450:c0:m1"} {"signature": "def import_from_string(val, setting_name):", "body": "try:parts = val.split('')module_path, class_name = ''.join(parts[:-]), parts[-]module = importlib.import_module(module_path)return getattr(module, class_name)except ImportError as e:msg = \"\" % (val, setting_name,e.__class__.__name__, e)raise ImportError(msg)", "docstring": "Attempt to import a class from a string representation.", "id": "f453:m1"} {"signature": "def perform_import(val, setting_name):", "body": "if isinstance(val, six.string_types):return import_from_string(val, setting_name)elif isinstance(val, (list, tuple)):return [import_from_string(item, setting_name) for item in val]return val", "docstring": "If the given setting is a string import notation,\nthen perform the necessary import or imports.", "id": "f453:m0"} {"signature": "def _fromJSON(self, json_msg):", "body": "return Message(**json.loads(force_text(json_msg)))", "docstring": "Return a Message instance built from data contained in a JSON string", "id": "f456:c0:m3"} {"signature": "def create_message(self, level, msg_text, extra_tags, date=None):", "body": "raise NotImplementedError()", "docstring": "Create and return a `Message` instance.\nInstance types depend on backends implementation.\n\nParams:\n `level`: message level (see django.contrib.messages)\n `msg_text`: what you think it is\n `extra_tags`: see django.contrib.messages\n `date`: a DateTime (optional)\n\nReturn:\n `Message` instance", "id": "f457:c0:m0"} {"signature": "def _flush(self):", "body": "raise NotImplementedError()", "docstring": "Clear all backend data.\nWarning: heavily destructive! Here for convenience, not used by the API anyway.\n\nParams:\n None\n\nReturn:\n None", "id": "f457:c0:m10"} {"signature": "def expired_messages_cleanup(self):", "body": "raise NotImplementedError()", "docstring": "Remove messages that have been expired.\n\nParams:\n None\n\nReturn:\n None", "id": "f457:c0:m9"} {"signature": "def inbox_list(self, user):", "body": "raise NotImplementedError()", "docstring": "Retrieve all the messages in `user`'s Inbox.\n\nParams:\n `user`: Django User instance\n\nReturn:\n An iterable containing `Message` instances", "id": "f457:c0:m1"} {"signature": "def mark_read(user, message):", "body": "BackendClass = stored_messages_settings.STORAGE_BACKENDbackend = BackendClass()backend.inbox_delete(user, message)", "docstring": "Mark message instance as read for user.\nReturns True if the message was `unread` and thus actually marked as `read` or False in case\nit is already `read` or it does not exist at all.\n\n:param user: user instance for the recipient\n:param message: a Message instance to mark as read", "id": "f463:m2"} {"signature": "def get_version(package):", "body": "init_py = open(os.path.join(package, '')).read()return re.search(\"\", init_py).group()", "docstring": "Return package version as listed in `__version__` in `init.py`.", "id": "f466:m0"} {"signature": "def can_send(self, user, notice_type):", "body": "from notification.models import NoticeSettingreturn NoticeSetting.for_user(user, notice_type, self.medium_id).send", "docstring": "Determines whether this backend is allowed to send a notification to\nthe given user and notice_type.", "id": "f480:c0:m1"} {"signature": "def deliver(self, recipient, sender, notice_type, extra_context):", "body": "raise NotImplementedError()", "docstring": "Deliver a notification to the given recipient.", "id": "f480:c0:m2"} {"signature": "def i_am_locking(self):", "body": "raise NotImplementedError(\"\")", "docstring": "Return True if this object is locking the file.", "id": "f484:c8:m4"} {"signature": "def acquire(self, timeout=None):", "body": "raise NotImplementedError(\"\")", "docstring": "Acquire the lock.\n\n* If timeout is omitted (or None), wait forever trying to lock the\n file.\n\n* If timeout > 0, try to acquire the lock for that many seconds. If\n the lock period expires and the file is still locked, raise\n LockTimeout.\n\n* If timeout <= 0, raise AlreadyLocked immediately if the file is\n already locked.", "id": "f484:c8:m1"} {"signature": "def __exit__(self, *_exc):", "body": "self.release()", "docstring": "Context manager support.", "id": "f484:c8:m7"} {"signature": "def break_lock(self):", "body": "raise NotImplementedError(\"\")", "docstring": "Remove a lock. Useful if a locking thread failed to unlock.", "id": "f484:c8:m5"} {"signature": "def is_locked(self):", "body": "raise NotImplementedError(\"\")", "docstring": "Tell whether or not the file is locked.", "id": "f484:c8:m3"} {"signature": "def send_now(users, label, extra_context=None, sender=None):", "body": "sent = Falseif extra_context is None:extra_context = {}notice_type = NoticeType.objects.get(label=label)current_language = get_language()for user in users:try:language = get_notification_language(user)except LanguageStoreNotAvailable:language = Noneif language is not None:activate(language)for backend in NOTIFICATION_BACKENDS.values():if backend.can_send(user, notice_type):backend.deliver(user, sender, notice_type, extra_context)sent = Trueactivate(current_language)return sent", "docstring": "Creates a new notice.\n\nThis is intended to be how other apps create new notices.\n\nnotification.send(user, \"friends_invite_sent\", {\n \"spam\": \"eggs\",\n \"foo\": \"bar\",\n)", "id": "f485:m2"} {"signature": "def divrank_scipy(G, alpha=, d=, personalization=None,max_iter=, tol=, nstart=None, weight='',dangling=None):", "body": "import scipy.sparseN = len(G)if N == :return {}nodelist = G.nodes()M = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight,dtype=float)S = scipy.array(M.sum(axis=)).flatten()S[S != ] = / S[S != ]Q = scipy.sparse.spdiags(S.T, , *M.shape, format='')M = Q * MM = scipy.sparse.lil_matrix(M)M.setdiag()M = alpha * MM.setdiag( - alpha)x = scipy.repeat( / N, N)if personalization is None:p = scipy.repeat( / N, N)else:missing = set(nodelist) - set(personalization)if missing:raise NetworkXError('''''' % missing)p = scipy.array([personalization[n] for n in nodelist],dtype=float)p = p / p.sum()if dangling is None:dangling_weights = pelse:missing = set(nodelist) - set(dangling)if missing:raise NetworkXError('''''' % missing)dangling_weights = scipy.array([dangling[n] for n in nodelist],dtype=float)dangling_weights /= dangling_weights.sum()is_dangling = scipy.where(S == )[]for _ in range(max_iter):xlast = xD_t = M * xx = (d * (x / D_t * M * x + sum(x[is_dangling]) * dangling_weights)+ ( - d) * p)err = scipy.absolute(x - xlast).sum()if err < N * tol:return dict(list(zip(nodelist, list(map(float, x)))))raise NetworkXError('''' % max_iter)", "docstring": "Returns the DivRank (Diverse Rank) of the nodes in the graph.\nThis code is based on networkx.pagerank_scipy", "id": "f489:m1"} {"signature": "@not_implemented_for('')def divrank(G, alpha=, d=, personalization=None,max_iter=, tol=, nstart=None, weight='',dangling=None):", "body": "if len(G) == :return {}if not G.is_directed():D = G.to_directed()else:D = GW = nx.stochastic_graph(D, weight=weight)N = W.number_of_nodes()for n in W.nodes_iter():for n_ in W.nodes_iter():if n != n_ :if n_ in W[n]:W[n][n_][weight] *= alphaelse:if n_ not in W[n]:W.add_edge(n, n_)W[n][n_][weight] = - alphaif nstart is None:x = dict.fromkeys(W, / N)else:s = float(sum(nstart.values()))x = dict((k, v / s) for k, v in list(nstart.items()))if personalization is None:p = dict.fromkeys(W, / N)else:missing = set(G) - set(personalization)if missing:raise NetworkXError('''''' % missing)s = float(sum(personalization.values()))p = dict((k, v / s) for k, v in list(personalization.items()))if dangling is None:dangling_weights = pelse:missing = set(G) - set(dangling)if missing:raise NetworkXError('''''' % missing)s = float(sum(dangling.values()))dangling_weights = dict((k, v/s) for k, v in list(dangling.items()))dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == ]for _ in range(max_iter):xlast = xx = dict.fromkeys(list(xlast.keys()), )danglesum = d * sum(xlast[n] for n in dangling_nodes)for n in x:D_t = sum(W[n][nbr][weight] * xlast[nbr] for nbr in W[n])for nbr in W[n]:x[nbr] += (d * (W[n][nbr][weight] * xlast[nbr] / D_t) * xlast[n])x[n] += danglesum * dangling_weights[n] + ( - d) * p[n]err = sum([abs(x[n] - xlast[n]) for n in x])if err < N*tol:return xraise NetworkXError('''' % max_iter)", "docstring": "Returns the DivRank (Diverse Rank) of the nodes in the graph.\nThis code is based on networkx.pagerank.\n\nArgs: (diff from pagerank)\n alpha: controls strength of self-link [0.0-1.0]\n d: the damping factor\n\nReference:\n Qiaozhu Mei and Jian Guo and Dragomir Radev,\n DivRank: the Interplay of Prestige and Diversity in Information Networks,\n http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.174.7982", "id": "f489:m0"} {"signature": "def lexrank(sentences, continuous=False, sim_threshold=, alpha=,use_divrank=False, divrank_alpha=):", "body": "ranker_params = {'': }if use_divrank:ranker = divrank_scipyranker_params[''] = divrank_alpharanker_params[''] = alphaelse:ranker = networkx.pagerank_scipyranker_params[''] = alphagraph = networkx.DiGraph()sent_tf_list = []for sent in sentences:words = tools.word_segmenter_ja(sent)tf = collections.Counter(words)sent_tf_list.append(tf)sent_vectorizer = DictVectorizer(sparse=True)sent_vecs = sent_vectorizer.fit_transform(sent_tf_list)sim_mat = - pairwise_distances(sent_vecs, sent_vecs, metric='')if continuous:linked_rows, linked_cols = numpy.where(sim_mat > )else:linked_rows, linked_cols = numpy.where(sim_mat >= sim_threshold)graph.add_nodes_from(list(range(sent_vecs.shape[])))for i, j in zip(linked_rows, linked_cols):if i == j:continueweight = sim_mat[i,j] if continuous else graph.add_edge(i, j, {'': weight})scores = ranker(graph, **ranker_params)return scores, sim_mat", "docstring": "compute centrality score of sentences.\n\nArgs:\n sentences: [u'\u3053\u3093\u306b\u3061\u306f\uff0e', u'\u79c1\u306e\u540d\u524d\u306f\u98ef\u6cbc\u3067\u3059\uff0e', ... ]\n continuous: if True, apply continuous LexRank. (see reference)\n sim_threshold: if continuous is False and smilarity is greater or\n equal to sim_threshold, link the sentences.\n alpha: the damping factor of PageRank and DivRank\n divrank: if True, apply DivRank instead of PageRank\n divrank_alpha: strength of self-link [0.0-1.0]\n (it's not the damping factor, see divrank.py)\n\nReturns: tuple\n (\n {\n # sentence index -> score\n 0: 0.003,\n 1: 0.002,\n ...\n },\n similarity_matrix\n )\n\nReference:\n G\u00fcnes Erkan and Dragomir R. Radev.\n LexRank: graph-based lexical centrality as salience in text\n summarization. (section 3)\n http://www.cs.cmu.edu/afs/cs/project/jair/pub/volume22/erkan04a-html/erkan04a.html", "id": "f492:m0"} {"signature": "def __init__(self, i2c, device_address, *, debug=False):", "body": "while not i2c.try_lock():passtry:i2c.writeto(device_address, b'')except OSError:try:result = bytearray()i2c.readfrom_into(device_address, result)except OSError:raise ValueError(\"\" % device_address)finally:i2c.unlock()self.i2c = i2cself.device_address = device_addressself._debug = debug", "docstring": "Try to read a byte from an address,\nif you get an OSError it means the device is not there", "id": "f501:c0:m0"} {"signature": "def write_then_readinto(self, out_buffer, in_buffer, *,out_start=, out_end=None, in_start=, in_end=None, stop=True):", "body": "if out_end is None:out_end = len(out_buffer)if in_end is None:in_end = len(in_buffer)if hasattr(self.i2c, ''):if self._debug:print(\"\",[hex(i) for i in out_buffer[out_start:out_end]])self.i2c.writeto_then_readfrom(self.device_address, out_buffer, in_buffer,out_start=out_start, out_end=out_end,in_start=in_start, in_end=in_end, stop=stop)if self._debug:print(\"\",[hex(i) for i in in_buffer[in_start:in_end]])else:self.write(out_buffer, start=out_start, end=out_end, stop=stop)if self._debug:print(\"\",[hex(i) for i in out_buffer[out_start:out_end]])self.readinto(in_buffer, start=in_start, end=in_end)if self._debug:print(\"\",[hex(i) for i in in_buffer[in_start:in_end]])", "docstring": "Write the bytes from ``out_buffer`` to the device, then immediately\nreads into ``in_buffer`` from the device. The number of bytes read\nwill be the length of ``in_buffer``.\nTransmits a stop bit after the write, if ``stop`` is set.\n\nIf ``out_start`` or ``out_end`` is provided, then the output buffer\nwill be sliced as if ``out_buffer[out_start:out_end]``. This will\nnot cause an allocation like ``buffer[out_start:out_end]`` will so\nit saves memory.\n\nIf ``in_start`` or ``in_end`` is provided, then the input buffer\nwill be sliced as if ``in_buffer[in_start:in_end]``. This will not\ncause an allocation like ``in_buffer[in_start:in_end]`` will so\nit saves memory.\n\n:param bytearray out_buffer: buffer containing the bytes to write\n:param bytearray in_buffer: buffer containing the bytes to read into\n:param int out_start: Index to start writing from\n:param int out_end: Index to read up to but not include\n:param int in_start: Index to start writing at\n:param int in_end: Index to write up to but not include\n:param bool stop: If true, output an I2C stop condition after the buffer is written", "id": "f501:c0:m3"} {"signature": "def __iter__(self):", "body": "for id_ in self._items:yield self[id_]", "docstring": "allows you to iterate and use for-loops\n\n The objects in the iterator have the order in which they were appended.", "id": "f506:c0:m5"} {"signature": "def __bool__(self):", "body": "return bool(self._items)", "docstring": ":return: whether there is anything in the collection.\n :rtype: bool", "id": "f506:c0:m4"} {"signature": "def __init__(self):", "body": "self._items = OrderedDict()", "docstring": "Create a new :class:`IdCollection` with no arguments.\n\n You can add objects later using the method :meth:`append`.", "id": "f506:c0:m0"} {"signature": "def __getitem__(self, id_):", "body": "return self._items[id_]", "docstring": "Get the object with the :paramref:`id`\n\n .. code:: python\n\n ic = IdCollection()\n ic.append(object_1)\n ic.append(object_2)\n assert ic[object_1.id] == object_1\n assert ic[object_2.id] == object_1\n\n :param id_: the id of an object\n :return: the object with the :paramref:`id`\n :raises KeyError: if no object with :paramref:`id` was found", "id": "f506:c0:m3"} {"signature": "def __len__(self):", "body": "return len(self._items)", "docstring": ":return: the number of objects in this collection", "id": "f506:c0:m6"} {"signature": "def __init__(self, process=identity, chooses_path=true):", "body": "self._process = processself._chooses_path = chooses_path", "docstring": "Create a PathLoader object.\n\n :param process: ``process(path)`` is called with the `path` to load.\n The result of :paramref:`process` is returned to the caller. The\n default value is :func:`identity`, so the paths are returned when\n loaded.\n :param chooses_path: ``chooses_path(path)`` is called before\n :paramref:`process` and returns :obj:`True` or :obj:`False`\n depending on whether a specific path should be loaded and passed to\n :paramref:`process`.", "id": "f507:c0:m0"} {"signature": "def example(self, relative_path):", "body": "example_path = os.path.join(\"\", relative_path)return self.relative_file(__file__, example_path)", "docstring": "Load an example from the knitting pattern examples.\n\n :param str relative_path: the path to load\n :return: the result of the processing\n\n You can use :meth:`knittingpattern.Loader.PathLoader.examples`\n to find out the paths of all examples.", "id": "f507:c0:m8"} {"signature": "def string(self, string):", "body": "object_ = json.loads(string)return self.object(object_)", "docstring": "Load an object from a string and return the processed JSON content\n\n :return: the result of the processing step\n :param str string: the string to load the JSON from", "id": "f507:c2:m1"} {"signature": "def relative_folder(self, module, folder):", "body": "folder = self._relative_to_absolute(module, folder)return self.folder(folder)", "docstring": "Load a folder located relative to a module and return the processed\n result.\n\n :param str module: can be\n\n - a path to a folder\n - a path to a file\n - a module name\n\n :param str folder: the path of a folder relative to :paramref:`module`\n :return: a list of the results of the processing\n :rtype: list\n\n Depending on :meth:`chooses_path` some paths may not be loaded.\n Every loaded path is processed and returned part of the returned list.\n You can use :meth:`choose_paths` to find out which paths are chosen to\n load.", "id": "f507:c0:m5"} {"signature": "def choose_paths(self, paths):", "body": "return [path for path in paths if self._chooses_path(path)]", "docstring": ":return: the paths that are chosen by :meth:`chooses_path`\n :rtype: list", "id": "f507:c0:m7"} {"signature": "def relative_file(self, module, file):", "body": "path = self._relative_to_absolute(module, file)return self.path(path)", "docstring": "Load a file relative to a module.\n\n :param str module: can be\n\n - a path to a folder\n - a path to a file\n - a module name\n\n :param str folder: the path of a folder relative to :paramref:`module`\n :return: the result of the processing", "id": "f507:c0:m6"} {"signature": "def identity(object_):", "body": "return object_", "docstring": ":return: the argument\n :param object_: the object to be returned", "id": "f507:m0"} {"signature": "def rows_in_knit_order(self):", "body": "return walk(self)", "docstring": "Return the rows in the order that they should be knit.\n\n :rtype: list\n :return: the :attr:`rows` in the order that they should be knit\n\n .. seealso:: :mod:`knittingpattern.walk`", "id": "f508:c0:m5"} {"signature": "@propertydef name(self):", "body": "return self._name", "docstring": "a human readable name", "id": "f508:c0:m2"} {"signature": "def add_row(self, id_):", "body": "row = self._parser.new_row(id_)self._rows.append(row)return row", "docstring": "Add a new row to the pattern.\n\n :param id_: the id of the row", "id": "f508:c0:m4"} {"signature": "@propertydef rows(self):", "body": "return self._rows", "docstring": "a collection of rows that this pattern is made of\n\n Usually this should be a\n :class:`knittingpattern.IdCollection.IdCollection` of\n :class:`knittingpattern.Row.Row`.", "id": "f508:c0:m3"} {"signature": "def __getitem__(self, instruction_type):", "body": "return self.as_instruction({TYPE: instruction_type})", "docstring": ":return: the specification for :paramref:`instruction_type`\n\n .. seealso:: :meth:`as_instruction`", "id": "f509:c0:m7"} {"signature": "@propertydef _instruction_class(self):", "body": "return Instruction", "docstring": ":return: the class for the specifications", "id": "f509:c0:m1"} {"signature": "def __init__(self):", "body": "super().__init__()self.load.relative_folder(__file__, self.INSTRUCTIONS_FOLDER)", "docstring": "Create the default instruction library without arguments.\n\n The default specifications are loaded automatically form this package.", "id": "f509:c1:m0"} {"signature": "def _process_loaded_object(self, obj):", "body": "for instruction in obj:self.add_instruction(instruction)return self", "docstring": "add the loaded instructions from :attr:`load`", "id": "f509:c0:m4"} {"signature": "@propertydef _loader_class(self):", "body": "return JSONLoader", "docstring": ":return: the class for loading the specifications with\n :attr:`load`", "id": "f509:c0:m0"} {"signature": "@fixturedef a1(charlotte):", "body": "return charlotte.patterns[\"\"]", "docstring": ":return: the pattern ``\"A.1\"`` in charlotte", "id": "f512:m1"} {"signature": "@fixturedef a1():", "body": "return _charlotte().patterns[\"\"]", "docstring": ":return: the pattern ``\"A.1\"`` in charlotte", "id": "f524:m0"} {"signature": "@fixturedef pattern(single_instruction_pattern_set):", "body": "return single_instruction_pattern_set.patterns[\"\"]", "docstring": "The pattern which has only one instruction.", "id": "f527:m1"} {"signature": "@fixturedef row(pattern):", "body": "return pattern.rows[]", "docstring": "The row with one instruction.", "id": "f527:m2"} {"signature": "@fixturedef single_instruction_pattern_set():", "body": "return load_from_relative_file(HERE, \"\")", "docstring": "Load the pattern set with only one instruction.", "id": "f527:m0"} {"signature": "@classmethoddef __repr__(cls):", "body": "return \"\".format(cls.__module__, cls.__qualname__)", "docstring": "The string representation of the object.\n\n :return: the string representation\n :rtype: str", "id": "f532:c1:m1"} {"signature": "def __init__(self,new_loader=JSONLoader,new_parser=Parser,new_parsing_error=ParsingError,new_pattern_set=KnittingPatternSet,new_pattern_collection=IdCollection,new_row_collection=IdCollection,new_pattern=KnittingPattern,new_row=Row,new_default_instructions=DefaultInstructions,new_instruction_in_row=InstructionInRow):", "body": "self.new_loader = new_loaderself.new_parser = new_parserself.new_parsing_error = new_parsing_errorself.new_pattern_set = new_pattern_setself.new_pattern_collection = new_pattern_collectionself.new_row_collection = new_row_collectionself.new_pattern = new_patternself.new_row = new_rowself.new_default_instructions = new_default_instructionsself.new_instruction_in_row = new_instruction_in_row", "docstring": "Create a new parsing specification.", "id": "f532:c0:m0"} {"signature": "def new_knitting_pattern_set_loader(specification=DefaultSpecification()):", "body": "parser = specification.new_parser(specification)loader = specification.new_loader(parser.knitting_pattern_set)return loader", "docstring": "Create a loader for a knitting pattern set.\n\n :param specification: a :class:`specification\n `\n for the knitting pattern set, default\n :class:`DefaultSpecification`", "id": "f532:m0"} {"signature": "def load_from_file(file):", "body": "return load_from().file(file)", "docstring": "Load a knitting pattern from a file-like object.\n\n :rtype: knittingpattern.KnittingPatternSet.KnittingPatternSet", "id": "f534:m3"} {"signature": "def load_from():", "body": "from .ParsingSpecification import new_knitting_pattern_set_loaderreturn new_knitting_pattern_set_loader()", "docstring": "Create a loader to load knitting patterns with.\n\n :return: the loader to load objects with\n :rtype: knittingpattern.Loader.JSONLoader\n\n Example:\n\n .. code:: python\n\n import knittingpattern, webbrowser\n k = knittingpattern.load_from().example(\"Cafe.json\")\n webbrowser.open(k.to_svg(25).temporary_path(\".svg\"))", "id": "f534:m0"} {"signature": "def load_from_object(object_):", "body": "return load_from().object(object_)", "docstring": "Load a knitting pattern from an object.\n\n :rtype: knittingpattern.KnittingPatternSet.KnittingPatternSet", "id": "f534:m1"} {"signature": "def convert_from_image(colors=(\"\", \"\")):", "body": "from .convert.image_to_knittingpattern importconvert_image_to_knitting_patternreturn convert_image_to_knitting_pattern(colors=colors)", "docstring": "Convert and image to a knitting pattern.\n\n :return: a loader\n :rtype: knittingpattern.Loader.PathLoader\n :param tuple colors: the colors to convert to\n\n .. code:: python\n\n convert_from_image().path(\"pattern.png\").path(\"pattern.json\")\n convert_from_image().path(\"pattern.png\").knitting_pattern()\n\n .. seealso:: :mod:`knittingoattern.convert.image_to_knitting_pattern`", "id": "f534:m7"} {"signature": "def load_from_path(path):", "body": "return load_from().path(path)", "docstring": "Load a knitting pattern from a file behind located at `path`.\n\n :rtype: knittingpattern.KnittingPatternSet.KnittingPatternSet", "id": "f534:m4"} {"signature": "@propertydef last_produced_mesh(self):", "body": "for instruction in reversed(self.instructions):if instruction.produces_meshes():return instruction.last_produced_meshraise IndexError(\"\".format(self))", "docstring": "The last produced mesh.\n\n :return: the last produced mesh\n :rtype: knittingpattern.Mesh.Mesh\n :raises IndexError: if no mesh is produced\n\n .. seealso:: :attr:`number_of_produced_meshes`", "id": "f535:c0:m11"} {"signature": "@propertydef color(self):", "body": "return self.get(COLOR)", "docstring": "The color of the row.\n\n :return: the color of the row as specified or :obj:`None`", "id": "f535:c0:m9"} {"signature": "@propertydef instruction_colors(self):", "body": "return unique(instruction.colors for instruction in self.instructions)", "docstring": "The colors of the instructions in the row in the order tehy appear.\n\n :return: a list of colors of the knitting pattern in the order that\n they appear in\n :rtype: list", "id": "f535:c0:m10"} {"signature": "@propertydef instructions(self):", "body": "return self._instructions", "docstring": "The instructions in this row.\n\n :return: a collection of :class:`instructions inside the row\n `\n :rtype: ObservableList.ObservableList", "id": "f535:c0:m3"} {"signature": "def __repr__(self):", "body": "return \"\".format(self.__class__.__qualname__, self.id)", "docstring": "The string representation of this row.\n\n :return: a string representation of this row\n :rtype: str", "id": "f535:c0:m8"} {"signature": "@propertydef first_produced_mesh(self):", "body": "for instruction in self.instructions:if instruction.produces_meshes():return instruction.first_produced_meshraise IndexError(\"\".format(self))", "docstring": "The first produced mesh.\n\n :return: the first produced mesh\n :rtype: knittingpattern.Mesh.Mesh\n :raises IndexError: if no mesh is produced\n\n .. seealso:: :attr:`number_of_produced_meshes`", "id": "f535:c0:m13"} {"signature": "@propertydef rows_before(self):", "body": "rows_before = []for mesh in self.consumed_meshes:if mesh.is_produced():row = mesh.producing_rowif rows_before not in rows_before:rows_before.append(row)return rows_before", "docstring": "The rows that produce meshes for this row.\n\n :rtype: list\n :return: a list of rows that produce meshes for this row. Each row\n occurs only once. They are sorted by the first occurrence in the\n instructions.", "id": "f535:c0:m15"} {"signature": "@propertydef first_instruction(self):", "body": "return self.instructions[]", "docstring": "The first instruction of the rows instructions.\n\n :rtype: knittingpattern.Instruction.InstructionInRow\n :return: the first instruction in this row's :attr:`instructions`", "id": "f535:c0:m17"} {"signature": "@propertydef number_of_produced_meshes(self):", "body": "return sum(instruction.number_of_produced_meshesfor instruction in self.instructions)", "docstring": "The number of meshes that this row produces.\n\n :return: the number of meshes that this row produces\n :rtype: int\n\n .. seealso::\n :meth:`Instruction.number_of_produced_meshes()\n `,\n :meth:`number_of_consumed_meshes`", "id": "f535:c0:m4"} {"signature": "@propertydef rows_after(self):", "body": "rows_after = []for mesh in self.produced_meshes:if mesh.is_consumed():row = mesh.consuming_rowif rows_after not in rows_after:rows_after.append(row)return rows_after", "docstring": "The rows that consume meshes from this row.\n\n :rtype: list\n :return: a list of rows that consume meshes from this row. Each row\n occurs only once. They are sorted by the first occurrence in the\n instructions.", "id": "f535:c0:m16"} {"signature": "@propertydef last_instruction(self):", "body": "return self.instructions[-]", "docstring": "The last instruction of the rows instructions.\n\n :rtype: knittingpattern.Instruction.InstructionInRow\n :return: the last instruction in this row's :attr:`instructions`", "id": "f535:c0:m18"} {"signature": "def __init__(self, knittingpattern, layout, instruction_to_svg, builder,zoom):", "body": "self._knittingpattern = knittingpatternself._layout = layoutself._instruction_to_svg = instruction_to_svgself._builder = builderself._zoom = zoomself._instruction_type_color_to_symbol = OrderedDict()self._symbol_id_to_scale = {}", "docstring": ":param knittingpattern.KnittingPattern.KnittingPattern knittingpattern:\n a knitting pattern\n:param knittingpattern.convert.Layout.GridLayout layout:\n:param instruction_to_svg: an\n :class:`~knittingpattern.convert.InstructionToSVG.InstructionToSVG`\n :class:`\n ~knittingpattern.convert.InstructionToSVGCache.InstructionSVGCache`,\n both with instructions already loaded.\n:param knittingpattern.convert.SVGBuilder.SVGBuilder builder:\n:param float zoom: the height and width of a knit instruction", "id": "f536:c0:m0"} {"signature": "def place_svg_use(self, symbol_id, layer_id, group=None):", "body": "self.place_svg_use_coords(, , symbol_id, layer_id, group)", "docstring": "Same as :meth:`place_svg_use_coords`.\n\n With implicit `x` and `y` which are set to `0` in this method and then\n :meth:`place_svg_use_coords` is called.", "id": "f537:c0:m6"} {"signature": "@propertydef bounding_box(self):", "body": "return (self._min_x, self._min_y, self._max_x, self._max_y)", "docstring": "the bounding box of this SVG\n ``(min_x, min_y, max_x, max_y)``.\n\n .. code:: python\n\n svg_builder10x10.bounding_box = (0, 0, 10, 10)\n assert svg_builder10x10.bounding_box == (0, 0, 10, 10)\n\n ``viewBox``, ``width`` and ``height`` are computed from this.\n\n If the bounding box was never set, the result is a tuple of four\n :obj:`None`.", "id": "f537:c0:m1"} {"signature": "def _get_layer(self, layer_id):", "body": "if layer_id not in self._layer_id_to_layer:self._svg.setdefault(\"\", [])layer = {\"\": [],\"\": layer_id,\"\": layer_id,\"\": \"\",\"\": \"\"}self._layer_id_to_layer[layer_id] = layerself._svg[\"\"].append(layer)return self._layer_id_to_layer[layer_id]", "docstring": ":return: the layer with the :paramref:`layer_id`. If the layer\n does not exist, it is created.\n:param str layer_id: the id of the layer", "id": "f537:c0:m7"} {"signature": "def get_svg_dict(self):", "body": "return self._structure", "docstring": "Return the SVG structure generated.", "id": "f537:c0:m9"} {"signature": "def decorate_load_and_dump(create_loader, create_dumper):", "body": "return lambda func: load_and_dump(create_loader, create_dumper, func)", "docstring": "Same as :func:`load_and_dump` but returns a function to enable decorator\n syntax.\n\n Examples:\n\n .. code:: Python\n\n @decorate_load_and_dump(ContentLoader, JSONDumper)\n def convert_from_loader_to_dumper(loaded_stuff, other=\"arguments\"):\n # convert\n return converted_stuff\n\n @decorate_load_and_dump(PathLoader, lambda dump: ContentDumper(dump,\n encoding=None))\n def convert_from_loader_to_dumper(loaded_stuff, to_file):\n # convert\n to_file.write(converted_stuff)", "id": "f538:m1"} {"signature": "def load_and_dump(create_loader, create_dumper, load_and_dump_):", "body": "@wraps(load_and_dump_)def load_and_dump__(*args1, **kw):\"\"\"\"\"\"def load(*args2):\"\"\"\"\"\"def dump(*args3):\"\"\"\"\"\"return load_and_dump_(*(args2 + args3 + args1), **kw)return create_dumper(dump)return create_loader(load)return load_and_dump__", "docstring": ":return: a function that has the doc string of\n :paramref:`load_and_dump_`\n additional arguments to this function are passed on to\n :paramref:`load_and_dump_`.\n\n :param create_loader: a loader, e.g.\n :class:`knittingpattern.Loader.PathLoader`\n :param create_dumper: a dumper, e.g.\n :class:`knittingpattern.Dumper.ContentDumper`\n :param load_and_dump_: a function to call with the loaded content.\n The arguments to both, :paramref:`create_dumper` and,\n :paramref:`create_loader`\n will be passed to :paramref:`load_and_dump_`.\n Any additional arguments to the return value are also passed to\n :paramref:`load_and_dump_`.\n The return value of :paramref:`load_and_dump_` is passed back to the\n :paramref:`Dumper`.\n\n .. seealso:: :func:`decorate_load_and_dump`", "id": "f538:m0"} {"signature": "def title(content):", "body": "if isinstance(content, str):return re.findall(\"\", content)[-]return content.title.cdata", "docstring": "returns the title of the svg", "id": "f545:m0"} {"signature": "def connections(layout):", "body": "return list(layout.walk_connections(lambda c: (c.start.xy, c.stop.xy)))", "docstring": "The connections between the rows of the leyout.", "id": "f549:m4"} {"signature": "def sizes(layout):", "body": "return list(layout.walk_instructions(lambda p: (p.width, p.height)))", "docstring": "The sizes of the instructions of the layout.", "id": "f549:m1"} {"signature": "@fixture(scope=\"\")def grid(self, pattern):", "body": "return GridLayout(pattern)", "docstring": "The computed grid for the pattern.", "id": "f549:c0:m1"} {"signature": "def __init__(self, min_x, min_y, max_x, max_y,default_color=\"\"):", "body": "self._min_x = min_xself._min_y = min_yself._max_x = max_xself._max_y = max_yself._default_color = default_colorself._image = PIL.Image.new(\"\", (max_x - min_x, max_y - min_y),self._convert_to_image_color(default_color))", "docstring": "Initialize the builder with the bounding box and a default color.\n\n .. _png-builder-bounds:\n\n ``min_x <= x < max_x`` and ``min_y <= y < max_y`` are the bounds of the\n instructions.\n Instructions outside the bounds are not rendered.\n Any Pixel that is not set has the :paramref:`default_color`.\n\n :param int min_x: the lower bound of the x coordinates\n :param int max_x: the upper bound of the x coordinates\n :param int min_y: the lower bound of the y coordinates\n :param int max_y: the upper bound of the y coordinates\n :param default_color: a valid :ref:`color `", "id": "f550:c0:m0"} {"signature": "def set_pixel(self, x, y, color):", "body": "self._set_pixel_and_convert_color(x, y, color)", "docstring": "set the pixel at ``(x, y)`` position to :paramref:`color`\n\n If ``(x, y)`` is out of the :ref:`bounds `\n this does not change the image.\n\n .. seealso:: :meth:`set_color_in_grid`", "id": "f550:c0:m7"} {"signature": "def _convert_rrggbb_to_image_color(self, rrggbb):", "body": "return webcolors.hex_to_rgb(rrggbb)", "docstring": ":return: the color that is used by the image", "id": "f550:c0:m3"} {"signature": "@propertydef default_color(self):", "body": "return self._default_color", "docstring": ":return: the :ref:`color ` of the pixels that are not set\n\n You can set this color by passing it to the :meth:`constructor\n <__init__>`.", "id": "f550:c0:m11"} {"signature": "def _set_pixel_and_convert_color(self, x, y, color):", "body": "if color is None:returncolor = self._convert_color_to_rrggbb(color)self._set_pixel(x, y, color)", "docstring": "set the pixel but convert the color before.", "id": "f550:c0:m5"} {"signature": "def __init__(self, function_that_returns_a_knitting_pattern_set):", "body": "super().__init__(self._dump_knitting_pattern,text_is_expected=False, encoding=None)self.__on_dump = function_that_returns_a_knitting_pattern_set", "docstring": "Initialize the Dumper with a\n :paramref:`function_that_returns_a_knitting_pattern_set`.\n\n :param function_that_returns_a_knitting_pattern_set: a function that\n takes no arguments but returns a\n :class:`knittinpattern.KnittingPatternSet.KnittingPatternSet`\n\n When a dump is requested, the\n :paramref:`function_that_returns_a_knitting_pattern_set`\n is called and the knitting pattern set is converted and saved to the\n specified location.", "id": "f552:c0:m0"} {"signature": "def walk_connections(self, mapping=identity):", "body": "for start in self.walk_instructions():for stop_instruction in start.instruction.consuming_instructions:if stop_instruction is None:continuestop = self._walk.instruction_in_grid(stop_instruction)connection = Connection(start, stop)if connection.is_visible():yield mapping(connection)", "docstring": "Iterate over connections between instructions.\n\n :return: an iterator over :class:`connections ` between\n :class:`instructions in grid `\n :param mapping: funcion to map the result, see\n :meth:`walk_instructions` for an example usage", "id": "f553:c5:m3"} {"signature": "@propertydef y(self):", "body": "return self._position.y", "docstring": ":return: y coordinate in the grid\n :rtype: float", "id": "f553:c0:m2"} {"signature": "def walk_instructions(self, mapping=identity):", "body": "instructions = chain(*self.walk_rows(lambda row: row.instructions))return map(mapping, instructions)", "docstring": "Iterate over instructions.\n\n :return: an iterator over :class:`instructions in grid\n `\n :param mapping: funcion to map the result\n\n .. code:: python\n\n for pos, c in layout.walk_instructions(lambda i: (i.xy, i.color)):\n print(\"color {} at {}\".format(c, pos))", "id": "f553:c5:m1"} {"signature": "def _walk(self):", "body": "while self._todo:args = self._todo.pop()self._step(*args)", "docstring": "Loop through all the instructions that are `_todo`.", "id": "f553:c3:m7"} {"signature": "@propertydef start(self):", "body": "return self._start", "docstring": ":return: the start of the connection\n :rtype: InstructionInGrid", "id": "f553:c4:m1"} {"signature": "@propertydef instructions(self):", "body": "x = self.xy = self.yresult = []for instruction in self._row.instructions:instruction_in_grid = InstructionInGrid(instruction, Point(x, y))x += instruction_in_grid.widthresult.append(instruction_in_grid)return result", "docstring": "The instructions in a grid.\n\n :return: the :class:`instructions in a grid ` of\n this row\n :rtype: list", "id": "f553:c2:m2"} {"signature": "def row_in_grid(self, row):", "body": "return self._rows_in_grid[row]", "docstring": "Returns an `RowInGrid` object for the `row`", "id": "f553:c3:m9"} {"signature": "def __init__(self, row, position):", "body": "super().__init__(position)self._row = row", "docstring": "Create a new row in the grid.", "id": "f553:c2:m0"} {"signature": "def _expand(self, row, consumed_position, passed):", "body": "self._todo.append((row, consumed_position, passed))", "docstring": "Add the arguments `(args, kw)` to `_walk` to the todo list.", "id": "f553:c3:m1"} {"signature": "def _row_should_be_placed(self, row, position):", "body": "placed_row = self._rows_in_grid.get(row)return placed_row is None or placed_row.y < position.y", "docstring": ":return: whether to place this instruction", "id": "f553:c3:m5"} {"signature": "def _place_row(self, row, position):", "body": "self._rows_in_grid[row] = RowInGrid(row, position)", "docstring": "place the instruction on a grid", "id": "f553:c3:m6"} {"signature": "@propertydef width(self):", "body": "return self._width", "docstring": ":return: width of the object on the grid\n :rtype: float", "id": "f553:c0:m5"} {"signature": "def __init__(self, start, stop):", "body": "self._start = startself._stop = stop", "docstring": ":param InstructionInGrid start: the start of the connection\n:param InstructionInGrid stop: the end of the connection", "id": "f553:c4:m0"} {"signature": "def row_in_grid(self, row):", "body": "return self._walk.row_in_grid(row)", "docstring": "The a RowInGrid for the row with position information.\n\n :return: a row in the grid\n :rtype: RowInGrid", "id": "f553:c5:m5"} {"signature": "@propertydef color(self):", "body": "return self._instruction.color", "docstring": "The color of the instruction.\n\n :return: the color of the :attr:`instruction`", "id": "f553:c1:m3"} {"signature": "@propertydef bounding_box(self):", "body": "min_x, min_y, max_x, max_y = zip(*list(self.walk_rows(lambda row: row.bounding_box)))return min(min_x), min(min_y), max(max_x), max(max_y)", "docstring": "The minimum and maximum bounds of this layout.\n\n :return: ``(min_x, min_y, max_x, max_y)`` the bounding box\n of this layout\n :rtype: tuple", "id": "f553:c5:m4"} {"signature": "@propertydef bounding_box(self):", "body": "return self._bounding_box", "docstring": "The bounding box of this object.\n\n :return: (min x, min y, max x, max y)\n :rtype: tuple", "id": "f553:c0:m8"} {"signature": "def __init__(self, pattern):", "body": "self._pattern = patternself._rows = list(pattern.rows)self._walk = _RecursiveWalk(self._rows[].instructions[])self._rows.sort(key=lambda row: self._walk.row_in_grid(row).yx)", "docstring": ":param knittingpattern.KnittingPattern.KnittingPattern pattern: the\n pattern to layout", "id": "f553:c5:m0"} {"signature": "@propertydef yx(self):", "body": "return self._position.y, self._position.x", "docstring": ":return: ``(y, x)`` coordinate in the grid\n :rtype: tuple", "id": "f553:c0:m4"} {"signature": "@propertydef instruction(self):", "body": "return self._instruction", "docstring": "The instruction.\n\n :return: instruction that is placed on the grid\n :rtype: knittingpattern.Instruction.InstructionInRow", "id": "f553:c1:m2"} {"signature": "@propertydef height(self):", "body": "return INSTRUCTION_HEIGHT", "docstring": ":return: height of the object on the grid\n :rtype: float", "id": "f553:c0:m6"} {"signature": "def __init__(self, first_instruction):", "body": "self._rows_in_grid = {}self._todo = []self._expand(first_instruction.row, Point(, ), [])self._walk()", "docstring": "Start walking the knitting pattern starting from first_instruction.", "id": "f553:c3:m0"} {"signature": "def instruction_in_grid(self, instruction):", "body": "row_position = self._rows_in_grid[instruction.row].xyx = instruction.index_of_first_consumed_mesh_in_rowposition = Point(row_position.x + x, row_position.y)return InstructionInGrid(instruction, position)", "docstring": "Returns an `InstructionInGrid` object for the `instruction`", "id": "f553:c3:m8"} {"signature": "def _new_svg_dumper(self, on_dump):", "body": "return SVGDumper(on_dump)", "docstring": "Create a new SVGDumper with the function ``on_dump``.\n\n :rtype: knittingpattern.Dumper.SVGDumper", "id": "f554:c0:m2"} {"signature": "def instruction_to_svg_dict(self, instruction_or_id, copy_result=True):", "body": "instruction_id = self.get_instruction_id(instruction_or_id)if instruction_id in self._cache:result = self._cache[instruction_id]else:result = self._instruction_to_svg_dict(instruction_id)self._cache[instruction_id] = resultif copy_result:result = deepcopy(result)return result", "docstring": "Return the SVG dict for the SVGBuilder.\n\n :param instruction_or_id: the instruction or id, see\n :meth:`get_instruction_id`\n :param bool copy_result: whether to copy the result\n :rtype: dict\n\n The result is cached.", "id": "f554:c0:m4"} {"signature": "def to_svg(self, instruction_or_id,i_promise_not_to_change_the_result=False):", "body": "return self._new_svg_dumper(lambda: self.instruction_to_svg_dict(instruction_or_id, not i_promise_not_to_change_the_result))", "docstring": "Return the SVG for an instruction.\n\n :param instruction_or_id: either an\n :class:`~knittingpattern.Instruction.Instruction` or an id\n returned by :meth:`get_instruction_id`\n :param bool i_promise_not_to_change_the_result:\n\n - :obj:`False`: the result is copied, you can alter it.\n - :obj:`True`: the result is directly from the cache. If you change\n the result, other calls of this function get the changed result.\n\n :return: an SVGDumper\n :rtype: knittingpattern.Dumper.SVGDumper", "id": "f554:c0:m3"} {"signature": "def default_instructions_to_svg():", "body": "instruction_to_svg = InstructionToSVG()instruction_to_svg.load.relative_folder(__name__, DEFAULT_SVG_FOLDER)return instruction_to_svg", "docstring": "load the default set of svg files for instructions\n\n :return: the default svg files for the instructions in this package\n :rtype: knittingpattern.InstructionToSVG.InstructionToSVG", "id": "f555:m0"} {"signature": "def default_instruction_to_svg(self, instruction):", "body": "svg_dict = self.default_instruction_to_svg_dict(instruction)return xmltodict.unparse(svg_dict)", "docstring": "As :meth:`instruction_to_svg` but it only takes the ``default.svg``\n file into account.\n\n In case no file is found for an instruction in\n :meth:`instruction_to_svg`,\n this method is used to determine the default svg for it.\n\n The content is created by replacing the text ``{instruction.type}`` in\n the whole svg file named ``default.svg``.\n\n If no file ``default.svg`` was loaded, an empty string is returned.", "id": "f555:c0:m8"} {"signature": "@propertydef index_of_last_consumed_mesh_in_row(self):", "body": "index = self.index_of_first_consumed_mesh_in_rowreturn index + self.number_of_consumed_meshes - ", "docstring": "The index of the last consumed mesh of this instruction in its row.\n\n Same as :attr:`index_of_last_produced_mesh_in_row`\n but for the last consumed mesh.", "id": "f558:c1:m16"} {"signature": "@propertydef consuming_instructions(self):", "body": "return [(mesh.consuming_instruction if mesh.is_consumed() else None)for mesh in self.produced_meshes]", "docstring": "Instructions that consume the meshes that this instruction produces.\n\n :return: a list of :class:`instructions\n `\n :rtype: list\n\n .. seealso:: :attr:`producing_instructions`, :attr:`produced_meshes`", "id": "f558:c1:m21"} {"signature": "@propertydef hex_color(self):", "body": "if self.has_color():return convert_color_to_rrggbb(self.color)return None", "docstring": "The color in \"#RRGGBB\" format.\n\n :return: the :attr:`color` in \"#RRGGBB\" format or none if no color is\n given", "id": "f558:c0:m13"} {"signature": "def produces_meshes(self):", "body": "return self.number_of_produced_meshes != ", "docstring": "Whether this institution produces meshes.\n\n :return: whether this instruction produces any meshes\n :rtype: bool\n\n .. seealso:: :attr:`number_of_produced_meshes`", "id": "f558:c0:m10"} {"signature": "@propertydef _new_produced_mesh(self):", "body": "return ProducedMesh", "docstring": ":return: the class of the produced meshes.", "id": "f558:c1:m2"} {"signature": "def consumes_meshes(self):", "body": "return self.number_of_consumed_meshes != ", "docstring": "Whether this instruction consumes meshes.\n\n :return: whether this instruction consumes any meshes\n :rtype: bool\n\n .. seealso:: :attr:`number_of_consumed_meshes`", "id": "f558:c0:m11"} {"signature": "def does_knit(self):", "body": "return self.type == KNIT_TYPE", "docstring": "Whether this instruction is a knit instruction.\n\n :return: whether this instruction is a knit instruction\n :rtype: bool", "id": "f558:c0:m8"} {"signature": "def _raise_not_found_error(self):", "body": "raise InstructionNotFoundInRow(self._instruction_not_found_message)", "docstring": "Raise an error that this instruction is in its row no longer.\n\n :raises knittingpattern.Instruction.InstructionNotFoundInRow:\n the instruction was not found\n\n .. warning: private, do not use", "id": "f558:c1:m12"} {"signature": "@propertydef colors(self):", "body": "return [self.color]", "docstring": "All the colors that an instruction has.\n\n :return: a list of colors of the instruction. If the instruction has\n no color, this is ``[None]``.\n :rtype: list", "id": "f558:c0:m3"} {"signature": "@propertydef type(self):", "body": "return self.get(TYPE, DEFAULT_TYPE)", "docstring": "The type of the instruction.\n\n :return: the :data:`type ` of the instruction or\n :data:`DEFAULT_TYPE` if none is specified.\n :rtype: str\n\n The type should be a string.\n Depending on the type, the instruction can receive additional\n attributes.\n\n .. seealso:: :mod:`knittingpattern.InstructionLibrary`", "id": "f558:c0:m1"} {"signature": "def has_color(self):", "body": "return self.color is not None", "docstring": "Whether this instruction has a color.\n\n :return: whether a :data:`color ` is specified\n :rtype: bool", "id": "f558:c0:m7"} {"signature": "@propertydef index_of_last_produced_mesh_in_row(self):", "body": "index = self.index_of_first_produced_mesh_in_rowreturn index + self.number_of_produced_meshes - ", "docstring": "Index of the last mesh produced by this instruction in its row.\n\n :return: an index of the last produced mesh of rows produced meshes\n :rtype: int\n\n .. note:: If this instruction :meth:`produces meshes\n `, this is the index of\n its last produces mesh in the row. However, if this instruction does\n not produce meshes, this is the index **before** the first mesh of\n the instruction if it produced meshes.\n\n .. seealso:: :attr:`index_of_first_produced_mesh_in_row`", "id": "f558:c1:m14"} {"signature": "@propertydef _new_consumed_mesh(self):", "body": "return ConsumedMesh", "docstring": ":return: the class of the consumed meshes.", "id": "f558:c1:m3"} {"signature": "def unique(iterables):", "body": "included_elements = set()def included(element):result = element in included_elementsincluded_elements.add(element)return resultreturn [element for elements in iterables for element in elementsif not included(element)]", "docstring": "Create an iterable from the iterables that contains each element once.\n\n :return: an iterable over the iterables. Each element of the result\n appeared only once in the result. They are ordered by the first\n occurrence in the iterables.", "id": "f559:m0"} {"signature": "def new_pattern(self, id_, name, rows=None):", "body": "if rows is None:rows = self.new_row_collection()return self._spec.new_pattern(id_, name, rows, self)", "docstring": "Create a new knitting pattern.\n\n If rows is :obj:`None` it is replaced with the\n :meth:`new_row_collection`.", "id": "f560:c1:m16"} {"signature": "def knitting_pattern_set(self, values):", "body": "self._start()pattern_collection = self._new_pattern_collection()self._fill_pattern_collection(pattern_collection, values)self._create_pattern_set(pattern_collection, values)return self._pattern_set", "docstring": "Parse a knitting pattern set.\n\n :param dict value: the specification of the knitting pattern set\n :rtype: knittingpattern.KnittingPatternSet.KnittingPatternSet\n :raises knittingpattern.KnittingPatternSet.ParsingError: if\n :paramref:`value` does not fulfill the :ref:`specification\n `.", "id": "f560:c1:m4"} {"signature": "def _get_version(self, values):", "body": "return values[VERSION]", "docstring": ":return: the version of :paramref:`values`.", "id": "f560:c1:m20"} {"signature": "def _delay_instructions(self, row):", "body": "self._instruction_todos.append(row)", "docstring": "Add a deleyed inheritance that is ti be resolved later.\n\n When calling :meth:`_finish_instructions` this inheritance chain shall\n be resolved.", "id": "f560:c1:m8"} {"signature": "def instruction_in_row(self, row, specification):", "body": "whole_instruction_ = self._as_instruction(specification)return self._spec.new_instruction_in_row(row, whole_instruction_)", "docstring": "Parse an instruction.\n\n :param row: the row of the instruction\n :param specification: the specification of the instruction\n :return: the instruction in the row", "id": "f560:c1:m14"} {"signature": "def _row(self, values):", "body": "row_id = self._to_id(values[ID])row = self._spec.new_row(row_id, values, self)if SAME_AS in values:self._delay_inheritance(row, self._to_id(values[SAME_AS]))self._delay_instructions(row)self._id_cache[row_id] = rowreturn row", "docstring": "Parse a row.", "id": "f560:c1:m12"} {"signature": "def __init__(self, specification):", "body": "self._spec = specificationself._start()", "docstring": "Create a parser with a specification.\n\n :param specification: the types and classes to use for the resulting\n object structure, preferably a\n :class:`knittingpattern.ParsingSpecification.ParsingSpecification`", "id": "f560:c1:m0"} {"signature": "def _fill_pattern_collection(self, pattern_collection, values):", "body": "pattern = values.get(PATTERNS, [])for pattern_to_parse in pattern:parsed_pattern = self._pattern(pattern_to_parse)pattern_collection.append(parsed_pattern)", "docstring": "Fill a pattern collection.", "id": "f560:c1:m11"} {"signature": "def _get_type(self, values):", "body": "if TYPE not in values:self._error(\"\"\"\".format(KNITTING_PATTERN_TYPE))type_ = values[TYPE]if type_ != KNITTING_PATTERN_TYPE:self._error(\"\"\"\"\"\".format(type_, KNITTING_PATTERN_TYPE))return type_", "docstring": ":return: the type of a knitting pattern set.", "id": "f560:c1:m19"} {"signature": "def _delay_inheritance(self, prototype, parent_id):", "body": "self._inheritance_todos.append((prototype, parent_id))", "docstring": "Add a deleyed inheritance that is ti be resolved later.\n\n When calling :meth:`_finish_inheritance` this inheritance chain shall\n be resolved.", "id": "f560:c1:m6"} {"signature": "@staticmethoddef _to_id(id_):", "body": "return tuple(id_) if isinstance(id_, list) else id_", "docstring": "Converts the argument to a object suitable as an identifier.\n\n :return: a hashable object", "id": "f560:c1:m2"} {"signature": "def _new_pattern_collection(self):", "body": "return self._spec.new_pattern_collection()", "docstring": "Create a new pattern collection.\n\n :return: a new specified pattern collection for\n :meth:`knitting_pattern_set`", "id": "f560:c1:m9"} {"signature": "def _start(self):", "body": "self._instruction_library = self._spec.new_default_instructions()self._as_instruction = self._instruction_library.as_instructionself._id_cache = {}self._pattern_set = Noneself._inheritance_todos = []self._instruction_todos = []", "docstring": "Initialize the parsing process.", "id": "f560:c1:m1"} {"signature": "def _finish_inheritance(self):", "body": "while self._inheritance_todos:prototype, parent_id = self._inheritance_todos.pop()parent = self._id_cache[parent_id]prototype.inherit_from(parent)", "docstring": "Finish those who still need to inherit.", "id": "f560:c1:m5"} {"signature": "def new_row_collection(self):", "body": "return self._spec.new_row_collection()", "docstring": "Create a new row collection.\n\n :return: a new specified row collection for the\n :meth:`knitting pattern `", "id": "f560:c1:m10"} {"signature": "@abstractmethoddef _is_connected_to(self, other_mesh):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m11"} {"signature": "@abstractmethoddef _connect_to(self, other_mesh):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m8"} {"signature": "@abstractmethoddef _is_consumed(self):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m5"} {"signature": "def disconnect(self):", "body": "if self.is_connected():self._disconnect()", "docstring": "Remove the connection between two rows through this mesh.\n\n After disconnecting this mesh, it can be connected anew.", "id": "f561:c0:m26"} {"signature": "def can_connect_to(self, other):", "body": "assert other.is_mesh()disconnected = not other.is_connected() and not self.is_connected()types_differ = self._is_consumed_mesh() != other._is_consumed_mesh()return disconnected and types_differ", "docstring": "Whether a connection can be established between those two meshes.", "id": "f561:c0:m33"} {"signature": "@propertydef index_in_consuming_instruction(self):", "body": "self._assert_is_consumed()return self._consuming_instruction_and_index()[]", "docstring": "Index in instruction as consumed mesh.\n\n :return: the index of the mesh in the list of meshes that\n :attr:`consuming_instruction` consumes\n :rtype: int\n\n .. code:: python\n\n instruction = mesh.consuming_instruction\n index = mesh.index_in_consuming_instruction\n assert instruction.consumed_meshes[index] == mesh\n\n .. seealso:: :attr:`consuming_instruction`,\n :attr:`index_in_consuming_instruction`\n\n .. warning:: Check with :meth:`is_consumed` before!", "id": "f561:c0:m23"} {"signature": "@propertydef index_in_consuming_row(self):", "body": "self._assert_is_consumed()return self._consuming_row_and_index()[]", "docstring": "Index in row as consumed mesh.\n\n :return: the index of the mesh in the list of meshes that\n :attr:`consuming_row` consumes\n :rtype: int\n\n .. code:: python\n\n row = mesh.consuming_row\n index = mesh.index_in_consuming_row\n assert row.consumed_meshes[index] == mesh\n\n .. seealso:: :attr:`consuming_row`, :attr:`index_in_producing_row`\n\n .. warning:: Check with :meth:`is_consumed` before!", "id": "f561:c0:m20"} {"signature": "def __init__(self, consuming_instruction,index_in_consuming_instruction):", "body": "self.__consuming_instruction_and_index = (consuming_instruction,index_in_consuming_instruction)self._produced_part = None", "docstring": ":param consuming_instruction: the\n :class:`instruction `\n that consumes the mesh\n:param int index_in_consuming_instruction: the index of the mesh\n in the list of meshes that :attr:`consuming_instruction`\n consumes\n\n.. note:: There should be no necessity to create instances of this\n directly. You should be able to use\n ``instruction.produced_meshes`` or ``instruction.consumed_meshes``\n to access the :class:`meshes `.", "id": "f561:c2:m0"} {"signature": "@abstractmethoddef _consuming_instruction_and_index(self):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m2"} {"signature": "def is_connected(self):", "body": "return self._is_consumed() and self._is_produced()", "docstring": "Returns whether this mesh is already connected.\n\n :return: whether this mesh is connected to an other.\n :rtype: bool", "id": "f561:c0:m28"} {"signature": "@abstractmethoddef _is_produced(self):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m4"} {"signature": "@propertydef consuming_row(self):", "body": "self._assert_is_consumed()return self._consuming_row_and_index()[]", "docstring": "Row which consumes this mesh.\n\n :return: the row that consumes this mesh\n :rtype: knittingpattern.Row.Row\n\n .. seealso:: :attr:`index_in_consuming_row`,\n :attr:`consuming_instruction`, :attr:`producing_row`\n\n .. warning:: Check with :meth:`is_consumed` before!", "id": "f561:c0:m21"} {"signature": "def is_consumed(self):", "body": "return self._is_consumed()", "docstring": "Whether the mesh has an instruction that consumed it.\n\n :return: whether the mesh is consumed by an instruction\n :rtype: bool\n\n If you get this mesh from\n :attr:`knittingpattern.Instruction.InstructionInRow.consumed_meshes` or\n :attr:`knittingpattern.Row.Row.consumed_meshes`,\n this should be :obj:`True`.\n\n .. warning:: Before you use any methods on how the mesh is consumed,\n you should check with ``mesh.is_consumed()``.", "id": "f561:c0:m15"} {"signature": "@propertydef index_in_producing_instruction(self):", "body": "self._assert_is_produced()return self._producing_instruction_and_index()[]", "docstring": "Index in instruction as a produced mesh.\n\n :return: the index of the mesh in the list of meshes that\n :attr:`producing_instruction` produces\n :rtype: int\n\n .. code:: python\n\n instruction = mesh.producing_instruction\n index = mesh.index_in_producing_instruction\n assert instruction.produced_meshes[index] == mesh\n\n .. seealso:: :attr:`producing_instruction`,\n :attr:`index_in_consuming_instruction`\n\n .. warning:: Check with :meth:`is_produced` before!", "id": "f561:c0:m16"} {"signature": "@propertydef index_in_producing_row(self):", "body": "self._assert_is_produced()return self._producing_row_and_index()[]", "docstring": "Index in row as produced mesh.\n\n :return: the index of the mesh in the :attr:`producing_row`\n :rtype: int\n\n .. code:: python\n\n row = mesh.producing_row\n index = mesh.index_in_producing_row\n assert row[index] == mesh\n\n .. seealso:: :attr:`producing_row`, :attr:`index_in_consuming_row`\n\n .. warning:: Check with :meth:`is_produced` before!", "id": "f561:c0:m19"} {"signature": "@abstractmethoddef _as_produced_mesh(self):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m9"} {"signature": "@abstractmethoddef _producing_instruction_and_index(self):", "body": "", "docstring": "Replace this method.", "id": "f561:c0:m0"} {"signature": "def string(self):", "body": "if self.__text_is_expected:return self._string()else:return self._bytes().decode(self.__encoding)", "docstring": ":return: the dump as a string", "id": "f564:c0:m2"} {"signature": "def binary_file(self, file=None):", "body": "if file is None:file = BytesIO()self._binary_file(file)return file", "docstring": "Same as :meth:`file` but for binary content.", "id": "f564:c0:m8"} {"signature": "@propertydef encoding(self):", "body": "return self.__encoding", "docstring": ":return: the encoding for byte to string conversion\n :rtype: str", "id": "f564:c0:m1"} {"signature": "def bytes(self):", "body": "if self.__text_is_expected:return self.string().encode(self.__encoding)else:return self._bytes()", "docstring": ":return: the dump as bytes.", "id": "f564:c0:m4"} {"signature": "def temporary_file(self, delete_when_closed=True):", "body": "return self._temporary_file(delete_when_closed)", "docstring": "Saves the dump in a temporary file and returns the open file object.\n\n :param bool delete_when_closed: whether to delete the temporary file\n when it is closed.\n :return: a file-like object\n\n If :paramref:`delete_when_closed` is :obj:`True` (default) the file\n on the hard drive will be deleted if it is closed or not referenced\n any more.\n\n If :paramref:`delete_when_closed` is :obj:`False` the returned\n temporary file is not deleted when closed or unreferenced.\n The user of this method has then the responsibility to free the\n space on the host system.\n\n The returned file-like object has an attribute ``name`` that holds\n the location of the file.", "id": "f564:c0:m15"} {"signature": "def path(self, path):", "body": "self._path(path)", "docstring": "Saves the dump in a file named :paramref:`path`.\n\n :param str path: a valid path to a file location. The file can exist.", "id": "f564:c0:m11"} {"signature": "def __repr__(self):", "body": "return \"\".format(self.__class__.__name__,self.__encoding)", "docstring": "the string representation for people to read\n\n :return: the string representation of this object\n :rtype: str", "id": "f564:c0:m18"} {"signature": "def _string(self):", "body": "file = StringIO()self.__dump_to_file(file)file.seek()return file.read()", "docstring": ":return: the string from a :class:`io.StringIO`", "id": "f564:c0:m3"} {"signature": "def __init__(self, on_dump):", "body": "super().__init__(self._dump_to_file)self.__dump_object = on_dump", "docstring": "Create a new JSONDumper object with the callable `on_dump`.\n\n `on_dump` takes no arguments and returns the object that should be\n serialized to JSON.", "id": "f565:c0:m0"} {"signature": "def _dump_to_file(self, file):", "body": "json.dump(self.object(), file)", "docstring": "dump to the file", "id": "f565:c0:m2"} {"signature": "def write(self, string):", "body": "bytes_ = string.encode(self._encoding)self._file.write(bytes_)", "docstring": "Write a string to the file.", "id": "f567:c1:m1"} {"signature": "def write(self, bytes_):", "body": "string = bytes_.decode(self._encoding)self._file.write(string)", "docstring": "Write bytes to the file.", "id": "f567:c0:m1"} {"signature": "def inherit_from(self, new_specification):", "body": "self.__specification.insert(, new_specification)", "docstring": "Inherit from a :paramref:`new_specification`\n\n :param new_specification: a specification as passed to :meth:`__init__`\n\n The :paramref:`new_specification` is inserted before the first\n :paramref:`inherited value <__init__.inherited_values>`.\n\n If the order is\n\n 1. :paramref:`~__init__.specification`\n 2. :paramref:`~__init__.inherited_values`\n\n after calling ``prototype.inherit_from(new_specification)`` the lookup\n order is\n\n 1. :paramref:`~__init__.specification`\n 2. :paramref:`new_specification`\n 3. :paramref:`~__init__.inherited_values`", "id": "f568:c0:m4"} {"signature": "def __init__(self, specification, inherited_values=()):", "body": "self.__specification = [specification] + list(inherited_values)", "docstring": "create a new prototype\n\n :param specification: the specification of the prototype.\n This specification can be inherited by other prototypes.\n It can be a :class:`dict` or an other\n :class:`knittingpattern.Prototype.Prototype` or anything else that\n supports :meth:`__contains__` and :meth:`__getitem__`\n\n To look up a key in the specification it will be walked through\n\n 1. :paramref:`specification`\n 2. :paramref:`inherited_values` in order\n\n However, new lookups can be inserted at before\n :paramref:`inherited_values`, by calling :meth:`inherit_from`.", "id": "f568:c0:m0"} {"signature": "def __init__(self, type_, version, patterns, parser, comment=None):", "body": "self._version = versionself._type = type_self._patterns = patternsself._comment = commentself._parser = parser", "docstring": "Create a new knitting pattern set.\n\n This is the class for a set of :class:`knitting patterns\n `.\n\n :param str type: the type of the knitting pattern set, see the\n :ref:`specification `.\n :param str version: the version of the knitting pattern set.\n This is not the version of the library but the version of the\n :ref:`specification `.\n :param patterns: a collection of patterns. This should be a\n :class:`~knittingpattern.IdCollection.IdCollection` of\n :class:`KnittingPatterns\n `.\n :param comment: a comment about the knitting pattern", "id": "f569:c0:m0"} {"signature": "@propertydef comment(self):", "body": "return self._comment", "docstring": "The comment about the knitting pattern.\n\n :return: the comment for the knitting pattern set or None,\n see :meth:`__init__`.", "id": "f569:c0:m4"} {"signature": "def add_new_pattern(self, id_, name=None):", "body": "if name is None:name = id_pattern = self._parser.new_pattern(id_, name)self._patterns.append(pattern)return pattern", "docstring": "Add a new, empty knitting pattern to the set.\n\n :param id_: the id of the pattern\n :param name: the name of the pattern to add or if :obj:`None`, the\n :paramref:`id_` is used\n :return: a new, empty knitting pattern\n :rtype: knittingpattern.KnittingPattern.KnittingPattern", "id": "f569:c0:m7"} {"signature": "def absjoin(*args):", "body": "return os.path.abspath(os.path.join(*args))", "docstring": ":return: an absolute path to the joined arguments\n:param args: the parts of the path to join", "id": "f571:m0"} {"signature": "@fixturedef warnings(sphinx_build):", "body": "return re.findall(WARNING_PATTERN, sphinx_build)", "docstring": ":return: the warnings during the build process.", "id": "f573:m3"} {"signature": "def print_bytes(bytes_):", "body": "try:print(bytes_.decode())except UnicodeDecodeError:print(bytes_)", "docstring": "Print bytes safely as string.", "id": "f573:m0"} {"signature": "@fixture(scope=\"\")def sphinx_build():", "body": "if os.path.exists(BUILD_DIRECTORY):shutil.rmtree(BUILD_DIRECTORY)output = subprocess.check_output(\"\", shell=True, cwd=DOCS_DIRECTORY,stderr=subprocess.STDOUT)output += subprocess.check_output(\"\", shell=True, cwd=DOCS_DIRECTORY,stderr=subprocess.STDOUT)print(output.decode())return output", "docstring": "Build the documentation with sphinx and return the output.", "id": "f573:m1"} {"signature": "def _encrypt(self, value):", "body": "value = json.dumps(value)with warnings.catch_warnings():warnings.simplefilter(\"\")encrypted_value = self.cipher.encrypt(value.encode(''))hexified_value = binascii.hexlify(encrypted_value).decode('')return hexified_value", "docstring": "Turn a json serializable value into an jsonified, encrypted,\n hexa string.", "id": "f575:c0:m19"} {"signature": "def delete(self, key_name):", "body": "self._assert_valid_stash()if key_name == '':raise GhostError('''')if not self.get(key_name):raise GhostError(''.format(key_name))key = self._storage.get(key_name)if key.get(''):raise GhostError(''''.format(key_name))deleted = self._storage.delete(key_name)audit(storage=self._storage.db_path,action='',message=json.dumps(dict(key_name=key_name)))if not deleted:raise GhostError(''.format(key_name))", "docstring": "Delete a key if it exists.", "id": "f575:c0:m9"} {"signature": "def put(self,name,value=None,modify=False,metadata=None,description='',encrypt=True,lock=False,key_type='',add=False):", "body": "def assert_key_is_unlocked(existing_key):if existing_key and existing_key.get(''):raise GhostError(''''.format(name))def assert_value_provided_for_new_key(value, existing_key):if not value and not existing_key.get(''):raise GhostError('')self._assert_valid_stash()self._validate_key_schema(value, key_type)if value and encrypt and not isinstance(value, dict):raise GhostError('')key = self._handle_existing_key(name, modify or add)assert_key_is_unlocked(key)assert_value_provided_for_new_key(value, key)new_key = dict(name=name, lock=lock)if value:if add:value = self._update_existing_key(key, value)new_key[''] = self._encrypt(value) if encrypt else valueelse:new_key[''] = key.get('')new_key[''] = description or key.get('')new_key[''] = key.get('') or _get_current_time()new_key[''] = _get_current_time()new_key[''] = metadata or key.get('')new_key[''] = key.get('') or str(uuid.uuid4())new_key[''] = key.get('') or key_typekey_id = self._storage.put(new_key)audit(storage=self._storage.db_path,action='' if (modify or add) else '',message=json.dumps(dict(key_name=new_key[''],value='',description=new_key[''],uid=new_key[''],metadata=json.dumps(new_key['']),lock=new_key[''],type=new_key[''])))return key_id", "docstring": "Put a key inside the stash\n\n if key exists and modify true: delete and create\n if key exists and modify false: fail\n if key doesn't exist and modify true: fail\n if key doesn't exist and modify false: create\n\n `name` is unique and cannot be changed.\n\n `value` must be provided if the key didn't already exist, otherwise,\n the previous value will be retained.\n\n `created_at` will be left unmodified if the key\n already existed. Otherwise, the current time will be used.\n\n `modified_at` will be changed to the current time\n if the field is being modified.\n\n `metadata` will be updated if provided. If it wasn't\n provided the field from the existing key will be used and the\n same goes for the `uid` which will be generated if it didn't\n previously exist.\n\n `lock` will lock the key to prevent it from being modified or deleted\n\n `add` allows to add values to an existing key instead of overwriting.\n\n Returns the id of the key in the database", "id": "f575:c0:m4"} {"signature": "def put(self, key):", "body": "self.client.write(self._key_path(key['']), **key)return self._key_path(key[''])", "docstring": "Put and return the only unique identifier possible, its path", "id": "f575:c4:m3"} {"signature": "def put(self, key):", "body": "return self.db.insert(key)", "docstring": "Insert the key and return its database id", "id": "f575:c1:m3"} {"signature": "def delete(self, key_name):", "body": "self.client.delete_object(Bucket=self.db_path,Key=key_name)return self.get(key_name) == {}", "docstring": "Delete the key.\n :return: True if it was deleted, False otherwise", "id": "f575:c6:m5"} {"signature": "@main.command(name='', short_help='')@click.argument('')@click.argument('', nargs=-, required=True)@click.option('','',help=\"\")@click.option('',metavar='',multiple=True,help='''')@click.option('','',is_flag=True,help='')@click.option('','',is_flag=True,help='')@click.option('',is_flag=True,help='''')@click.option('','',type=click.Choice(['', '']),default='',help='')@stash_option@passphrase_option@backend_optiondef put_key(key_name,value,description,meta,modify,add,lock,key_type,stash,passphrase,backend):", "body": "stash = _get_stash(backend, stash, passphrase)try:click.echo(''.format(key_type))stash.put(name=key_name,value=_build_dict_from_key_value(value),modify=modify,metadata=_build_dict_from_key_value(meta),description=description,lock=lock,key_type=key_type,add=add)click.echo('')except GhostError as ex:sys.exit(ex)", "docstring": "Insert a key to the stash\n\n `KEY_NAME` is the name of the key to insert\n\n `VALUE` is a key=value argument which can be provided multiple times.\n it is the encrypted value of your key", "id": "f575:m14"} {"signature": "def load(self, origin_passphrase, keys=None, key_file=None):", "body": "self._assert_valid_stash()if not (bool(keys) ^ bool(key_file)):raise GhostError('''')if key_file:with open(key_file) as stash_file:keys = json.loads(stash_file.read())decrypt = origin_passphrase != self.passphraseif decrypt:stub = Stash(TinyDBStorage(''), origin_passphrase)for key in keys:self.put(name=key[''],value=stub._decrypt(key['']) if decrypt else key[''],metadata=key[''],description=key[''],lock=key.get(''),key_type=key.get(''),encrypt=decrypt)", "docstring": "Import keys to the stash from either a list of keys or a file\n\n `keys` is a list of dictionaries created by `self.export`\n `stash_path` is a path to a file created by `self.export`", "id": "f575:c0:m16"} {"signature": "@click.group(context_settings=CLICK_CONTEXT_SETTINGS)def main():", "body": "", "docstring": "Ghost generates a secret-store in which you can\n keep your secrets encrypted. Ghost isn't real. It's just in your head.", "id": "f575:m12"} {"signature": "def list(self):", "body": "return self.db.search(Query().name.matches(''))", "docstring": "Return a list of all keys (not just key names, but rather the keys\n themselves).\n\n e.g.\n {u'created_at': u'2016-10-10 08:31:53',\n u'description': None,\n u'metadata': None,\n u'modified_at': u'2016-10-10 08:31:53',\n u'name': u'aws',\n u'uid': u'459f12c0-f341-413e-9d7e-7410f912fb74',\n u'value': u'the_value'},\n {u'created_at': u'2016-10-10 08:32:29',\n u'description': u'my gcp token',\n u'metadata': {u'owner': u'nir'},\n u'modified_at': u'2016-10-10 08:32:29',\n u'name': u'gcp',\n u'uid': u'a51a0043-f241-4d52-93c1-266a3c5de15e',\n u'value': u'the_value'}]", "id": "f575:c1:m5"} {"signature": "def _decode(self, data):", "body": "return json.loads(base64.b64decode(data['']).decode(''))", "docstring": "Decode one key as returned by consul.\n\n The format of the data returned is [{'Value': base-64-encoded-json,\n 'Key': keyname}]. We need to decode and return just the values.", "id": "f575:c3:m7"} {"signature": "def get(self, key_name, decrypt=True):", "body": "self._assert_valid_stash()key = self._storage.get(key_name).copy()if not key.get(''):return Noneif decrypt:key[''] = self._decrypt(key[''])audit(storage=self._storage.db_path,action='',message=json.dumps(dict(key_name=key_name)))return key", "docstring": "Return a key with its parameters if it was found.", "id": "f575:c0:m7"} {"signature": "@main.command(name='', short_help='')@click.argument('')@click.option('',help='')@stash_option@passphrase_option@backend_optiondef load_keys(key_file, origin_passphrase, stash, passphrase, backend):", "body": "stash = _get_stash(backend, stash, passphrase)click.echo(''.format(key_file))stash.load(origin_passphrase, key_file=key_file)click.echo('')", "docstring": "Load all keys from an exported key file to the stash\n\n `KEY_FILE` is the exported stash file to load keys from", "id": "f575:m22"} {"signature": "def lock(self, key_name):", "body": "self._change_lock_state(key_name, lock=True)", "docstring": "Lock a key to prevent it from being deleted, purged and modified", "id": "f575:c0:m11"} {"signature": "@main.command(name='')@click.option('','',default='',help='')@stash_option@passphrase_option@backend_optiondef export_keys(output_path, stash, passphrase, backend):", "body": "stash = _get_stash(backend, stash, passphrase)try:click.echo(''.format(output_path))stash.export(output_path=output_path)click.echo('')except GhostError as ex:sys.exit(ex)", "docstring": "Export all keys to a file", "id": "f575:m21"} {"signature": "def _build_ssh_command(conn_info, no_tunnel=False):", "body": "command = ['', '', conn_info[''], conn_info['']]if conn_info.get('') and not no_tunnel:command.insert(, conn_info.get(''))command.insert(, '')command.insert(, '')if conn_info.get(''):command.extend(_build_proxy_command(conn_info))if conn_info.get(''):command.append(conn_info.get(''))return command", "docstring": "# TODO: Document clearly\nIndetityFile=\"~/.ssh/id_rsa\"\nProxyCommand=\"ssh -i ~/.ssh/id_rsa proxy_IP nc HOST_IP HOST_PORT\"", "id": "f575:m26"} {"signature": "@main.command(name='', short_help='')@click.argument('')@stash_option@passphrase_option@backend_optiondef lock_key(key_name,stash,passphrase,backend):", "body": "stash = _get_stash(backend, stash, passphrase)try:click.echo('')stash.lock(key_name=key_name)click.echo('')except GhostError as ex:sys.exit(ex)", "docstring": "Lock a key to prevent it from being deleted, purged or modified\n\n `KEY_NAME` is the name of the key to lock", "id": "f575:m15"} {"signature": "@propertydef is_initialized(self):", "body": "return True", "docstring": "...and therefore, this should always return true", "id": "f575:c3:m2"} {"signature": "def init(self):", "body": "self.es.indices.create(index=self.params[''], ignore=)", "docstring": "Create an Elasticsearch index if necessary", "id": "f575:c5:m1"} {"signature": "def _key_path(self, key_name):", "body": "return '' + self._stash_name + '' + key_name", "docstring": "Return a valid vault path\n\n Note that we don't use os.path.join as the path is read by vault using\n slashes even on Windows.", "id": "f575:c4:m7"} {"signature": "def init(self):", "body": "", "docstring": "Consul creates directories on the fly, so no init is required.", "id": "f575:c3:m1"} {"signature": "@main.command(name='',short_help='')@click.argument('', type=click.STRING)@click.argument('', type=click.STRING)@click.option('','',default=None,type=click.STRING,help='')@click.option('','',type=click.Choice(STORAGE_MAPPING.keys()),help='')@click.option('','',default=None,type=click.STRING,help='')@click.option('','',type=click.Choice(STORAGE_MAPPING.keys()),help='')def migrate_stash(source_stash_path,source_passphrase,source_backend,destination_stash_path,destination_passphrase,destination_backend):", "body": "click.echo(''.format(source_stash_path, destination_stash_path))try:migrate(src_path=source_stash_path,src_passphrase=source_passphrase,src_backend=source_backend,dst_path=destination_stash_path,dst_passphrase=destination_passphrase,dst_backend=destination_backend)except GhostError as ex:sys.exit(ex)click.echo('')", "docstring": "Migrate all keys from a source stash to a destination stash.\n\n `SOURCE_STASH_PATH` and `DESTINATION_STASH_PATH` are the paths\n to the stashs you wish to perform the migration on.", "id": "f575:m23"} {"signature": "def generate_passphrase(size=):", "body": "chars = string.ascii_lowercase + string.ascii_uppercase + string.digitsreturn str(''.join(random.choice(chars) for _ in range(size)))", "docstring": "Return a generate string `size` long based on lowercase, uppercase,\n and digit chars", "id": "f575:m5"} {"signature": "def init(self):", "body": "try:self.client.create_bucket(Bucket=self.db_path,CreateBucketConfiguration=self.bucket_configuration)except botocore.exceptions.ClientError as e:if '' not in str(e.response['']['']):raise e", "docstring": "Create a bucket.", "id": "f575:c6:m1"} {"signature": "def search(self, body, filter_path, **kwargs):", "body": "if '' in body['']:items = list(self.store.items())for name, key in items:return self.store[name]else:return {'': {'': []}}else:return self.store.get(body[''][''][''])", "docstring": "{\n u'hits': {\n u'hits': [\n {\n u'_id': u'AVewADAWUnUKEMeMQ4QB',\n u'_source': {\n u'description': None,\n u'created_at':\n u'2016-10-10 22:09:44',\n u'modified_at':\n u'2016-10-10 22:09:44',\n u'value': u'the_value',\n u'name': u'aws',\n u'uid': u'7a1caa7d-14d4-4045-842c-66adf22190b5',\n u'metadata': None\n }\n },\n ]\n }\n}", "id": "f576:c8:m1"} {"signature": "def send_message(self, fakeid, content):", "body": "url = ''payload = {'': fakeid,'': ,'': self.__token,'': content,'': ,}headers = {'': '','': ''.format(fakeid=fakeid,token=self.__token,),'': self.__cookies,}r = requests.post(url, data=payload, headers=headers)try:message = json.loads(r.text)except ValueError:raise NeedLoginError(r.text)try:if message[''][''] == -:raise ValueError('')if message[''][''] != :raise NeedLoginError(r.text)except KeyError:raise NeedLoginError(r.text)", "docstring": "\u4e3b\u52a8\u53d1\u9001\u6587\u672c\u6d88\u606f\n:param fakeid: \u7528\u6237\u7684 UID (\u5373 fakeid )\n:param content: \u53d1\u9001\u7684\u5185\u5bb9\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e\n:raises ValueError: \u53c2\u6570\u51fa\u9519, \u5177\u4f53\u5185\u5bb9\u6709 ``fake id not exist``", "id": "f584:c0:m5"} {"signature": "def login(self, verify_code=''):", "body": "url = ''payload = {'': self.__username,'': self.__password,'': verify_code,'': '',}headers = {'': '','': '','': self.__cookies,}r = requests.post(url, data=payload, headers=headers)s = re.search(r'', r.text)if not s:try:error_code = json.loads(r.text)['']['']except (KeyError, ValueError):raise LoginError(r.text)if error_code in [-, -]:raise LoginVerifyCodeError(r.text)elif re.search(r'', r.text):raise LoginError('')else:raise LoginError(r.text)self.__token = int(s.group())self.__cookies = ''for cookie in r.cookies:self.__cookies += cookie.name + '' + cookie.value + ''", "docstring": "\u767b\u5f55\u5fae\u4fe1\u516c\u4f17\u5e73\u53f0\n\u6ce8\u610f\u5728\u5b9e\u4f8b\u5316 ``WechatExt`` \u7684\u65f6\u5019\uff0c\u5982\u679c\u6ca1\u6709\u4f20\u5165 ``token`` \u53ca ``cookies`` \uff0c\u5c06\u4f1a\u81ea\u52a8\u8c03\u7528\u8be5\u65b9\u6cd5\uff0c\u65e0\u9700\u624b\u52a8\u8c03\u7528\n\u5f53\u4e14\u4ec5\u5f53\u6355\u83b7\u5230 ``NeedLoginError`` \u5f02\u5e38\u65f6\u624d\u9700\u8981\u8c03\u7528\u6b64\u65b9\u6cd5\u8fdb\u884c\u767b\u5f55\u91cd\u8bd5\n:param verify_code: \u9a8c\u8bc1\u7801, \u4e0d\u4f20\u5165\u5219\u4e3a\u65e0\u9a8c\u8bc1\u7801\n:raises LoginVerifyCodeError: \u9700\u8981\u9a8c\u8bc1\u7801\u6216\u9a8c\u8bc1\u7801\u51fa\u9519\uff0c\u8be5\u5f02\u5e38\u4e3a ``LoginError`` \u7684\u5b50\u7c7b\n:raises LoginError: \u767b\u5f55\u51fa\u9519\u5f02\u5e38\uff0c\u5f02\u5e38\u5185\u5bb9\u4e3a\u5fae\u4fe1\u670d\u52a1\u5668\u54cd\u5e94\u7684\u5185\u5bb9\uff0c\u53ef\u4f5c\u4e3a\u65e5\u5fd7\u8bb0\u5f55\u4e0b\u6765", "id": "f584:c0:m1"} {"signature": "def _init_appid(self):", "body": "if not self.__appid:self._init_plugin_token_appid()", "docstring": "\u521d\u59cb\u5316\u516c\u4f17\u53f7\u81ea\u8eab\u7684 ``appid`` \u503c\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m30"} {"signature": "def add_news(self, news):", "body": "if not news:raise ValueError('')for item in news:if '' not in item or '' not in item:raise ValueError('')url = ''.format(token=self.__token,)payload = {'': self.__token,'': ,'': '','': '','': ,'': '','': '',}headers = {'': ''.format(token=self.__token),'': self.__cookies,}i = for item in news:payload[''+str(i)] = item.get('')payload[''+str(i)] = item.get('')payload[''+str(i)] = item.get('')payload[''+str(i)] = item.get('')payload[''+str(i)] = item.get('')payload[''+str(i)] = item.get('')i += payload[''] = ir = requests.post(url, data=payload, headers=headers)try:message = json.loads(r.text)except ValueError:raise NeedLoginError(r.text)try:if message[''] != '':raise ValueError(r.text)except KeyError:raise NeedLoginError(r.text)", "docstring": "\u5728\u7d20\u6750\u5e93\u4e2d\u521b\u5efa\u56fe\u6587\u6d88\u606f\n\n:param news: list \u5bf9\u8c61, \u5176\u4e2d\u7684\u6bcf\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a dict \u5bf9\u8c61, \u4ee3\u8868\u4e00\u6761\u56fe\u6587, key \u503c\u5206\u522b\u4e3a ``title``, ``author``, ``summary``,\n ``content``, ``picture_id``, ``from_url``, \u5bf9\u5e94\u5185\u5bb9\u4e3a\u6807\u9898, \u4f5c\u8005, \u6458\u8981, \u5185\u5bb9, \u7d20\u6750\u5e93\u91cc\u7684\n \u56fe\u7247ID(\u53ef\u901a\u8fc7 ``upload_file`` \u51fd\u6570\u4e0a\u4f20\u83b7\u53d6), \u6765\u6e90\u94fe\u63a5\u3002\n\n \u5176\u4e2d\u5fc5\u987b\u63d0\u4f9b\u7684 key \u503c\u4e3a ``title`` \u548c ``content``\n\n \u793a\u4f8b::\n\n [\n {\n 'title': '\u56fe\u6587\u6807\u9898',\n 'author': '\u56fe\u6587\u4f5c\u8005',\n 'summary': '\u56fe\u6587\u6458\u8981',\n 'content': '\u56fe\u6587\u5185\u5bb9',\n 'picture_id': '23412341',\n 'from_url': 'http://www.baidu.com',\n },\n {\n 'title': '\u6700\u5c11\u56fe\u6587\u6807\u9898',\n 'content': '\u56fe\u6587\u5185\u5bb9',\n }\n ]\n:raises ValueError: \u53c2\u6570\u63d0\u4f9b\u9519\u8bef\u65f6\u629b\u51fa\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m12"} {"signature": "def send_audio(self, fakeid, fid):", "body": "return self.send_file(fakeid, fid, )", "docstring": "\u7ed9\u6307\u5b9a\u7528\u6237 fakeid \u53d1\u9001\u8bed\u97f3\u4fe1\u606f\n:param fakeid: \u7528\u6237\u7684 UID (\u5373 fakeid)\n:param fid: \u6587\u4ef6 ID\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e\n:raises ValueError: \u53c2\u6570\u51fa\u9519, \u9519\u8bef\u539f\u56e0\u76f4\u63a5\u6253\u5370\u5f02\u5e38\u5373\u53ef (\u5e38\u89c1\u9519\u8bef\u5185\u5bb9: ``system error`` \u6216 ``can not send this type of msg``: \u6587\u4ef6\u7c7b\u578b\u4e0d\u5339\u914d, ``user not exist``: \u7528\u6237 fakeid \u4e0d\u5b58\u5728, ``file not exist``: \u6587\u4ef6 fid \u4e0d\u5b58\u5728, \u8fd8\u6709\u5176\u4ed6\u9519\u8bef\u8bf7\u81ea\u884c\u68c0\u67e5)", "id": "f584:c0:m17"} {"signature": "def send_news(self, fakeid, msgid):", "body": "url = ''payload = {'': '','': '','': fakeid,'': ,'': self.__token,'': msgid,'': msgid,'': '','': ,'': random.random(),}headers = {'': '','': ''.format(fakeid=fakeid,),'': self.__cookies,}r = requests.post(url, data=payload, headers=headers)try:message = json.loads(r.text)except ValueError:raise NeedLoginError(r.text)try:if message[''][''] == or message[''][''] == -:raise ValueError('')if message[''][''] == :raise ValueError('')if message[''][''] != :raise NeedLoginError(r.text)except KeyError:raise NeedLoginError(r.text)", "docstring": "\u5411\u6307\u5b9a\u7528\u6237\u53d1\u9001\u56fe\u6587\u6d88\u606f \uff08\u5fc5\u987b\u4ece\u56fe\u6587\u5e93\u91cc\u9009\u53d6\u6d88\u606fID\u4f20\u5165)\n:param fakeid: \u7528\u6237\u7684 UID (\u5373 fakeid)\n:param msgid: \u56fe\u6587\u6d88\u606f ID\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e\n:raises ValueError: \u53c2\u6570\u51fa\u9519, \u5177\u4f53\u5185\u5bb9\u6709 ``fake id not exist`` \u53ca ``message id not exist``", "id": "f584:c0:m11"} {"signature": "def send_image(self, fakeid, fid):", "body": "return self.send_file(fakeid, fid, )", "docstring": "\u7ed9\u6307\u5b9a\u7528\u6237 fakeid \u53d1\u9001\u56fe\u7247\u4fe1\u606f\n:param fakeid: \u7528\u6237\u7684 UID (\u5373 fakeid)\n:param fid: \u6587\u4ef6 ID\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e\n:raises ValueError: \u53c2\u6570\u51fa\u9519, \u9519\u8bef\u539f\u56e0\u76f4\u63a5\u6253\u5370\u5f02\u5e38\u5373\u53ef (\u5e38\u89c1\u9519\u8bef\u5185\u5bb9: ``system error`` \u6216 ``can not send this type of msg``: \u6587\u4ef6\u7c7b\u578b\u4e0d\u5339\u914d, ``user not exist``: \u7528\u6237 fakeid \u4e0d\u5b58\u5728, ``file not exist``: \u6587\u4ef6 fid \u4e0d\u5b58\u5728, \u8fd8\u6709\u5176\u4ed6\u9519\u8bef\u8bf7\u81ea\u884c\u68c0\u67e5)", "id": "f584:c0:m16"} {"signature": "def get_file_list(self, type, page, count=):", "body": "url = ''.format(token=self.__token,type=type,random=round(random.random(), ),begin=page*count,count=count,)headers = {'': '','': ''.format(token=self.__token,),'': self.__cookies,}r = requests.get(url, headers=headers)try:message = json.dumps(json.loads(r.text)[''], ensure_ascii=False)except (KeyError, ValueError):raise NeedLoginError(r.text)return message", "docstring": "\u83b7\u53d6\u7d20\u6750\u5e93\u6587\u4ef6\u5217\u8868\n\n\u8fd4\u56deJSON\u793a\u4f8b::\n\n {\n \"type\": 2,\n \"file_item\": [\n {\n \"update_time\": 1408723089,\n \"name\": \"Doraemonext.png\",\n \"play_length\": 0,\n \"file_id\": 206471048,\n \"type\": 2,\n \"size\": \"53.7 K\"\n },\n {\n \"update_time\": 1408722328,\n \"name\": \"Doraemonext.png\",\n \"play_length\": 0,\n \"file_id\": 206470809,\n \"type\": 2,\n \"size\": \"53.7 K\"\n }\n ],\n \"file_cnt\": {\n \"voice_cnt\": 1,\n \"app_msg_cnt\": 10,\n \"commondity_msg_cnt\": 0,\n \"video_cnt\": 0,\n \"img_cnt\": 29,\n \"video_msg_cnt\": 0,\n \"total\": 40\n }\n }\n\n:param type: \u6587\u4ef6\u7c7b\u578b (2: \u56fe\u7247, 3: \u97f3\u9891, 4: \u89c6\u9891)\n:param page: \u9875\u7801 (\u4ece 0 \u5f00\u59cb)\n:param count: \u6bcf\u9875\u5927\u5c0f\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m15"} {"signature": "def stat_article_detail_list(self, page=, start_date=str(date.today()+timedelta(days=-)), end_date=str(date.today())):", "body": "self._init_plugin_token_appid()url = ''.format(page=page,appid=self.__appid,token=self.__plugin_token,rnd=int(time.time()),start_date=start_date,end_date=end_date,)headers = {'': '','': ''.format(page=page,appid=self.__appid,token=self.__plugin_token,rnd=int(time.time()),start_date=start_date,end_date=end_date,),'': self.__cookies,}r = requests.get(url, headers=headers)if not re.search(r'', self.__cookies):for cookie in r.cookies:self.__cookies += cookie.name + '' + cookie.value + ''try:data = json.loads(r.text)if data.get(''):raise NeedLoginError(r.text)message = json.dumps(data, ensure_ascii=False)except (KeyError, ValueError):raise NeedLoginError(r.text)return message", "docstring": "\u83b7\u53d6\u56fe\u6587\u5206\u6790\u6570\u636e\n\n\u8fd4\u56deJSON\u793a\u4f8b ::\n\n {\n \"hasMore\": true, // \u8bf4\u660e\u662f\u5426\u53ef\u4ee5\u589e\u52a0 page \u9875\u7801\u6765\u83b7\u53d6\u6570\u636e\n \"data\": [\n {\n \"index\": [\n \"20,816\", // \u9001\u8fbe\u4eba\u6570\n \"1,944\", // \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"2,554\", // \u56fe\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"9.34%\", // (\u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u9001\u8fbe\u4eba\u6570)\n \"0\", // \u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"0\", // \u539f\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"0%\", // \uff08\u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570)\n \"47\", // \u5206\u4eab\u8f6c\u53d1\u4eba\u6570\n \"61\", // \u5206\u4eab\u8f6c\u53d1\u6b21\u6570\n \"1\" // \u5fae\u4fe1\u6536\u85cf\u4eba\u6570\n ],\n \"time\": \"2015-01-21\",\n \"table_data\": \"{\\\"fields\\\":{\\\"TargetUser\\\":{\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"number\\\":false,\\\"colAlign\\\":\\\"center\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"PageConversion\\\":{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"OriPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"OriPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"Conversion\\\":{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"ShareUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"ShareCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"AddToFavUser\\\":{\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0}},\\\"data\\\":[{\\\"MsgId\\\":\\\"205104027_1\\\",\\\"Title\\\":\\\"\\\\u56de\\\\u5bb6\\\\u5927\\\\u4f5c\\\\u6218 | \\\\u5feb\\\\u6765\\\\u5e26\\\\u6211\\\\u56de\\\\u5bb6\\\",\\\"RefDate\\\":\\\"20150121\\\",\\\"TargetUser\\\":\\\"20,816\\\",\\\"IntPageReadUser\\\":\\\"1,944\\\",\\\"IntPageReadCount\\\":\\\"2,554\\\",\\\"OriPageReadUser\\\":\\\"0\\\",\\\"OriPageReadCount\\\":\\\"0\\\",\\\"ShareUser\\\":\\\"47\\\",\\\"ShareCount\\\":\\\"61\\\",\\\"AddToFavUser\\\":\\\"1\\\",\\\"Conversion\\\":\\\"0%\\\",\\\"PageConversion\\\":\\\"9.34%\\\"}],\\\"fixedRow\\\":false,\\\"cssSetting\\\":{\\\"\\\":\\\"\\\"},\\\"complexHeader\\\":[[{\\\"field\\\":\\\"TargetUser\\\",\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"colSpan\\\":1},{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u5206\\\\u4eab\\\\u8f6c\\\\u53d1\\\",\\\"colSpan\\\":2},{\\\"field\\\":\\\"AddToFavUser\\\",\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"enable\\\":true}],[{\\\"field\\\":\\\"IntPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"IntPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"PageConversion\\\",\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"OriPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"OriPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"Conversion\\\",\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"ShareUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"ShareCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"}]]}\",\n \"id\": \"205104027_1\",\n \"title\": \"\u56de\u5bb6\u5927\u4f5c\u6218 | \u5feb\u6765\u5e26\u6211\u56de\u5bb6\"\n },\n {\n \"index\": [\n \"20,786\", // \u9001\u8fbe\u4eba\u6570\n \"2,598\", // \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"3,368\", // \u56fe\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"12.5%\", // (\u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u9001\u8fbe\u4eba\u6570)\n \"0\", // \u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"0\", // \u539f\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"0%\", // \uff08\u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570)\n \"73\", // \u5206\u4eab\u8f6c\u53d1\u4eba\u6570\n \"98\", // \u5206\u4eab\u8f6c\u53d1\u6b21\u6570\n \"1\" // \u5fae\u4fe1\u6536\u85cf\u4eba\u6570\n ],\n \"time\": \"2015-01-20\",\n \"table_data\": \"{\\\"fields\\\":{\\\"TargetUser\\\":{\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"number\\\":false,\\\"colAlign\\\":\\\"center\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"PageConversion\\\":{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"OriPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"OriPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"Conversion\\\":{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"ShareUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"ShareCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"AddToFavUser\\\":{\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0}},\\\"data\\\":[{\\\"MsgId\\\":\\\"205066833_1\\\",\\\"Title\\\":\\\"\\\\u56de\\\\u5bb6\\\\u5927\\\\u4f5c\\\\u6218 | \\\\u5982\\\\u4f55\\\\u4f18\\\\u96c5\\\\u5730\\\\u53bb\\\\u5f80\\\\u8f66\\\\u7ad9\\\\u548c\\\\u673a\\\\u573a\\\",\\\"RefDate\\\":\\\"20150120\\\",\\\"TargetUser\\\":\\\"20,786\\\",\\\"IntPageReadUser\\\":\\\"2,598\\\",\\\"IntPageReadCount\\\":\\\"3,368\\\",\\\"OriPageReadUser\\\":\\\"0\\\",\\\"OriPageReadCount\\\":\\\"0\\\",\\\"ShareUser\\\":\\\"73\\\",\\\"ShareCount\\\":\\\"98\\\",\\\"AddToFavUser\\\":\\\"1\\\",\\\"Conversion\\\":\\\"0%\\\",\\\"PageConversion\\\":\\\"12.5%\\\"}],\\\"fixedRow\\\":false,\\\"cssSetting\\\":{\\\"\\\":\\\"\\\"},\\\"complexHeader\\\":[[{\\\"field\\\":\\\"TargetUser\\\",\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"colSpan\\\":1},{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u5206\\\\u4eab\\\\u8f6c\\\\u53d1\\\",\\\"colSpan\\\":2},{\\\"field\\\":\\\"AddToFavUser\\\",\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"enable\\\":true}],[{\\\"field\\\":\\\"IntPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"IntPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"PageConversion\\\",\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"OriPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"OriPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"Conversion\\\",\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"ShareUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"ShareCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"}]]}\",\n \"id\": \"205066833_1\",\n \"title\": \"\u56de\u5bb6\u5927\u4f5c\u6218 | \u5982\u4f55\u4f18\u96c5\u5730\u53bb\u5f80\u8f66\u7ad9\u548c\u673a\u573a\"\n },\n {\n \"index\": [\n \"20,745\", // \u9001\u8fbe\u4eba\u6570\n \"1,355\", // \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"1,839\", // \u56fe\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"6.53%\", // (\u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u9001\u8fbe\u4eba\u6570)\n \"145\", // \u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570\n \"184\", // \u539f\u6587\u9875\u9605\u8bfb\u6b21\u6570\n \"10.7%\", // \uff08\u539f\u6587\u9875\u9605\u8bfb\u4eba\u6570 / \u56fe\u6587\u9875\u9605\u8bfb\u4eba\u6570)\n \"48\", // \u5206\u4eab\u8f6c\u53d1\u4eba\u6570\n \"64\", // \u5206\u4eab\u8f6c\u53d1\u6b21\u6570\n \"5\" // \u5fae\u4fe1\u6536\u85cf\u4eba\u6570\n ],\n \"time\": \"2015-01-19\",\n \"table_data\": \"{\\\"fields\\\":{\\\"TargetUser\\\":{\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"number\\\":false,\\\"colAlign\\\":\\\"center\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"IntPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"PageConversion\\\":{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"OriPageReadUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"OriPageReadCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"Conversion\\\":{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":\\\"2\\\"},\\\"ShareUser\\\":{\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"ShareCount\\\":{\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0},\\\"AddToFavUser\\\":{\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"number\\\":true,\\\"colAlign\\\":\\\"right\\\",\\\"needOrder\\\":false,\\\"precision\\\":0}},\\\"data\\\":[{\\\"MsgId\\\":\\\"205028693_1\\\",\\\"Title\\\":\\\"\\\\u5145\\\\u7535\\\\u65f6\\\\u95f4 | \\\\u542c\\\\u542c\\\\u7535\\\\u53f0\\\\uff0c\\\\u4f18\\\\u96c5\\\\u5730\\\\u63d0\\\\u5347\\\\u5b66\\\\u4e60\\\\u6548\\\\u7387\\\",\\\"RefDate\\\":\\\"20150119\\\",\\\"TargetUser\\\":\\\"20,745\\\",\\\"IntPageReadUser\\\":\\\"1,355\\\",\\\"IntPageReadCount\\\":\\\"1,839\\\",\\\"OriPageReadUser\\\":\\\"145\\\",\\\"OriPageReadCount\\\":\\\"184\\\",\\\"ShareUser\\\":\\\"48\\\",\\\"ShareCount\\\":\\\"64\\\",\\\"AddToFavUser\\\":\\\"5\\\",\\\"Conversion\\\":\\\"10.7%\\\",\\\"PageConversion\\\":\\\"6.53%\\\"}],\\\"fixedRow\\\":false,\\\"cssSetting\\\":{\\\"\\\":\\\"\\\"},\\\"complexHeader\\\":[[{\\\"field\\\":\\\"TargetUser\\\",\\\"thText\\\":\\\"\\\\u9001\\\\u8fbe\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"colSpan\\\":1},{\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u9875\\\\u9605\\\\u8bfb\\\",\\\"colSpan\\\":3},{\\\"thText\\\":\\\"\\\\u5206\\\\u4eab\\\\u8f6c\\\\u53d1\\\",\\\"colSpan\\\":2},{\\\"field\\\":\\\"AddToFavUser\\\",\\\"thText\\\":\\\"\\\\u5fae\\\\u4fe1\\\\u6536\\\\u85cf\\\\u4eba\\\\u6570\\\",\\\"rowSpan\\\":2,\\\"enable\\\":true}],[{\\\"field\\\":\\\"IntPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"IntPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"PageConversion\\\",\\\"thText\\\":\\\"\\\\u56fe\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"OriPageReadUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"OriPageReadCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"},{\\\"field\\\":\\\"Conversion\\\",\\\"thText\\\":\\\"\\\\u539f\\\\u6587\\\\u8f6c\\\\u5316\\\\u7387\\\"},{\\\"field\\\":\\\"ShareUser\\\",\\\"thText\\\":\\\"\\\\u4eba\\\\u6570\\\"},{\\\"field\\\":\\\"ShareCount\\\",\\\"thText\\\":\\\"\\\\u6b21\\\\u6570\\\"}]]}\",\n \"id\": \"205028693_1\",\n \"title\": \"\u5145\u7535\u65f6\u95f4 | \u542c\u542c\u7535\u53f0\uff0c\u4f18\u96c5\u5730\u63d0\u5347\u5b66\u4e60\u6548\u7387\"\n }\n ]\n }\n\n:param page: \u9875\u7801 (\u7531\u4e8e\u817e\u8baf\u63a5\u53e3\u9650\u5236\uff0cpage \u4ece 1 \u5f00\u59cb\uff0c3 \u6761\u6570\u636e\u4e3a 1 \u9875)\n:param start_date: \u5f00\u59cb\u65f6\u95f4\uff0c\u9ed8\u8ba4\u662f\u4eca\u5929-30\u5929 (\u7c7b\u578b: str \u683c\u5f0f\u793a\u4f8b: \"2015-01-15\")\n:param end_date: \u7ed3\u675f\u65f6\u95f4\uff0c\u9ed8\u8ba4\u662f\u4eca\u5929 (\u7c7b\u578b: str \u683c\u5f0f\u793a\u4f8b: \"2015-02-01\")\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\uff0c\u5177\u4f53\u7684\u5404\u9879\u5185\u5bb9\u89e3\u91ca\u53c2\u89c1\u4e0a\u9762\u7684 JSON \u8fd4\u56de\u793a\u4f8b\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m7"} {"signature": "def get_user_info(self, fakeid):", "body": "url = ''payloads = {'': ,'': '','': round(random.random(), ),'': self.__token,'': '','': fakeid,}headers = {'': '','': ''.format(token=self.__token,),'': self.__cookies,}r = requests.post(url, data=payloads, headers=headers)try:message = json.dumps(json.loads(r.text)[''], ensure_ascii=False)except (KeyError, ValueError):raise NeedLoginError(r.text)return message", "docstring": "\u83b7\u53d6\u6307\u5b9a\u7528\u6237\u7684\u4e2a\u4eba\u4fe1\u606f\n\n\u8fd4\u56deJSON\u793a\u4f8b::\n\n {\n \"province\": \"\u6e56\u5317\",\n \"city\": \"\u6b66\u6c49\",\n \"gender\": 1,\n \"nick_name\": \"Doraemonext\",\n \"country\": \"\u4e2d\u56fd\",\n \"remark_name\": \"\",\n \"fake_id\": 844735403,\n \"signature\": \"\",\n \"group_id\": 0,\n \"user_name\": \"\"\n }\n\n:param fakeid: \u7528\u6237\u7684 UID (\u5373 fakeid)\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m19"} {"signature": "def _init_self_information(self):", "body": "url = ''.format(token=self.__token)headers = {'': '','': '','': self.__cookies,}r = requests.get(url, headers=headers)ticket_id = re.search(r'', r.text)if not ticket_id:raise NeedLoginError(r.text)self.__ticket_id = ticket_id.group()ticket = re.search(r'', r.text)if not ticket:raise NeedLoginError(r.text)self.__ticket = ticket.group()fakeid = re.search(r'', r.text)if not fakeid:raise NeedLoginError(r.text)self.__fakeid = fakeid.group()", "docstring": "\u521d\u59cb\u5316\u516c\u4f17\u53f7\u81ea\u8eab\u7684\u5c5e\u6027\u503c (\u76ee\u524d\u5305\u62ec ``Ticket`` \u503c \u53ca \u516c\u4f17\u53f7\u81ea\u8eab\u7684 ``fakeid`` \u503c)\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m29"} {"signature": "def get_top_message(self):", "body": "return self.get_message_list(count=)", "docstring": "\u83b7\u53d6\u6700\u65b0\u4e00\u6761\u6d88\u606f\n\n\u8fd4\u56deJSON\u793a\u4f8b::\n\n {\n \"msg_item\": [\n {\n \"id\": 206448489,\n \"type\": 2,\n \"fakeid\": \"844735403\",\n \"nick_name\": \"Doraemonext\",\n \"date_time\": 1408696938,\n \"source\": \"\",\n \"msg_status\": 4,\n \"has_reply\": 0,\n \"refuse_reason\": \"\",\n \"multi_item\": [ ],\n \"to_uin\": 2391068708,\n \"send_stat\": {\n \"total\": 0,\n \"succ\": 0,\n \"fail\": 0\n }\n }\n ]\n }\n\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\n:raises NeedLoginError: \u64cd\u4f5c\u672a\u6267\u884c\u6210\u529f, \u9700\u8981\u518d\u6b21\u5c1d\u8bd5\u767b\u5f55, \u5f02\u5e38\u5185\u5bb9\u4e3a\u670d\u52a1\u5668\u8fd4\u56de\u7684\u9519\u8bef\u6570\u636e", "id": "f584:c0:m22"} {"signature": "@classmethoddef _transcoding_list(cls, data):", "body": "if not isinstance(data, list):raise ValueError('')result = []for item in data:if isinstance(item, dict):result.append(cls._transcoding_dict(item))elif isinstance(item, list):result.append(cls._transcoding_list(item))else:result.append(item)return result", "docstring": "\u7f16\u7801\u8f6c\u6362 for list\n :param data: \u9700\u8981\u8f6c\u6362\u7684 list \u6570\u636e\n :return: \u8f6c\u6362\u597d\u7684 list", "id": "f585:c0:m1"} {"signature": "@classmethoddef _transcoding(cls, data):", "body": "if not data:return dataresult = Noneif isinstance(data, str) and hasattr(data, ''):result = data.decode('')else:result = datareturn result", "docstring": "\u7f16\u7801\u8f6c\u6362\n :param data: \u9700\u8981\u8f6c\u6362\u7684\u6570\u636e\n :return: \u8f6c\u6362\u597d\u7684\u6570\u636e", "id": "f585:c0:m0"} {"signature": "@classmethoddef _transcoding_dict(cls, data):", "body": "if not isinstance(data, dict):raise ValueError('')result = {}for k, v in data.items():k = cls._transcoding(k)if isinstance(v, dict):v = cls._transcoding_dict(v)elif isinstance(v, list):v = cls._transcoding_list(v)else:v = cls._transcoding(v)result.update({k: v})return result", "docstring": "\u7f16\u7801\u8f6c\u6362 for dict\n:param data: \u9700\u8981\u8f6c\u6362\u7684 dict \u6570\u636e\n:return: \u8f6c\u6362\u597d\u7684 dict", "id": "f585:c0:m2"} {"signature": "@token.setterdef token(self, token):", "body": "self.__token = tokenself._update_crypto()", "docstring": "\u8bbe\u7f6e\u5f53\u524d Token", "id": "f587:c0:m2"} {"signature": "@propertydef appid(self):", "body": "return self.__appid", "docstring": "\u83b7\u53d6\u5f53\u524d App ID", "id": "f587:c0:m3"} {"signature": "@propertydef paysignkey(self):", "body": "return self.__paysignkey", "docstring": "\u83b7\u53d6\u5546\u6237\u7b7e\u540d\u5bc6\u94a5 Key", "id": "f587:c0:m15"} {"signature": "def _update_crypto(self):", "body": "if self.__encrypt_mode in ['', ''] and self.__encoding_aes_key is not None:if self.__token is None or self.__appid is None:raise NeedParamError('')self.__crypto = BasicCrypto(self.__token, self.__encoding_aes_key, self.__appid)else:self.__crypto = None", "docstring": "\u6839\u636e\u5f53\u524d\u914d\u7f6e\u5185\u5bb9\u66f4\u65b0 Crypto \u7c7b", "id": "f587:c0:m22"} {"signature": "def set_appid_appsecret(self, appid, appsecret):", "body": "self.__appid = appidself.__appsecret = appsecretself._update_crypto()", "docstring": "\u8bbe\u7f6e\u5f53\u524d App ID \u53ca App Secret", "id": "f587:c0:m5"} {"signature": "@encoding_aes_key.setterdef encoding_aes_key(self, encoding_aes_key):", "body": "self.__encoding_aes_key = encoding_aes_keyself._update_crypto()", "docstring": "\u8bbe\u7f6e\u5f53\u524d EncodingAESKey", "id": "f587:c0:m7"} {"signature": "@propertydef crypto(self):", "body": "return self.__crypto", "docstring": "\u83b7\u53d6\u5f53\u524d Crypto \u5b9e\u4f8b", "id": "f587:c0:m10"} {"signature": "def grant_jsapi_ticket(self):", "body": "self._check_appid_appsecret()if callable(self.__jsapi_ticket_refreshfunc):self.__jsapi_ticket, self.__jsapi_ticket_expires_at = self.__jsapi_ticket_refreshfunc()returnresponse_json = self.__request.get(url=\"\",params={\"\": \"\",},access_token=self.access_token,)self.__jsapi_ticket = response_json['']self.__jsapi_ticket_expires_at = int(time.time()) + response_json['']if callable(self.__jsapi_ticket_setfunc):self.__jsapi_ticket_setfunc(self.__jsapi_ticket, self.__jsapi_ticket_expires_at)return response_json", "docstring": "\u83b7\u53d6 jsapi ticket \u5e76\u66f4\u65b0\u5f53\u524d\u914d\u7f6e\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305 (\u4f20\u5165 jsapi_ticket_refreshfunc \u53c2\u6570\u540e\u8fd4\u56de None)", "id": "f587:c0:m17"} {"signature": "def grant_access_token(self):", "body": "self._check_appid_appsecret()if callable(self.__access_token_refreshfunc):self.__access_token, self.__access_token_expires_at = self.__access_token_refreshfunc()returnresponse_json = self.__request.get(url=\"\",params={\"\": \"\",\"\": self.__appid,\"\": self.__appsecret,},access_token=self.__access_token)self.__access_token = response_json['']self.__access_token_expires_at = int(time.time()) + response_json['']if callable(self.__access_token_setfunc):self.__access_token_setfunc(self.__access_token, self.__access_token_expires_at)return response_json", "docstring": "\u83b7\u53d6 access token \u5e76\u66f4\u65b0\u5f53\u524d\u914d\u7f6e\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305 (\u4f20\u5165 access_token_refreshfunc \u53c2\u6570\u540e\u8fd4\u56de None)", "id": "f587:c0:m16"} {"signature": "@propertydef jsapi_ticket(self):", "body": "self._check_appid_appsecret()if callable(self.__jsapi_ticket_getfunc):self.__jsapi_ticket, self.__jsapi_ticket_expires_at = self.__jsapi_ticket_getfunc()if self.__jsapi_ticket:now = time.time()if self.__jsapi_ticket_expires_at - now > :return self.__jsapi_ticketself.grant_jsapi_ticket() return self.__jsapi_ticket", "docstring": "\u83b7\u53d6\u5f53\u524d jsapi ticket \u503c, \u672c\u65b9\u6cd5\u4f1a\u81ea\u884c\u7ef4\u62a4 jsapi ticket \u6709\u6548\u6027", "id": "f587:c0:m12"} {"signature": "def __init__(self, token=None, appid=None, appsecret=None, partnerid=None,partnerkey=None, paysignkey=None, access_token=None, access_token_expires_at=None,jsapi_ticket=None, jsapi_ticket_expires_at=None, checkssl=False, conf=None):", "body": "if conf is not None:self.__conf = confelif isinstance(token, WechatConf): self.__conf = tokenelse: self.__conf = WechatConf(token=token,appid=appid,appsecret=appsecret,access_token=access_token,access_token_expires_at=access_token_expires_at,jsapi_ticket=jsapi_ticket,jsapi_ticket_expires_at=jsapi_ticket_expires_at,encrypt_mode='',partnerid=partnerid,partnerkey=partnerkey,paysignkey=paysignkey,checkssl=checkssl,)self.__request = WechatRequest(conf=self.__conf)self.__is_parse = Falseself.__message = None", "docstring": ":param token: \u5fae\u4fe1 Token\n:param appid: App ID\n:param appsecret: App Secret\n:param partnerid: \u8d22\u4ed8\u901a\u5546\u6237\u8eab\u4efd\u6807\u8bc6, \u652f\u4ed8\u6743\u9650\u4e13\u7528\n:param partnerkey: \u8d22\u4ed8\u901a\u5546\u6237\u6743\u9650\u5bc6\u94a5 Key, \u652f\u4ed8\u6743\u9650\u4e13\u7528\n:param paysignkey: \u5546\u6237\u7b7e\u540d\u5bc6\u94a5 Key, \u652f\u4ed8\u6743\u9650\u4e13\u7528\n:param access_token: \u76f4\u63a5\u5bfc\u5165\u7684 access_token \u503c, \u8be5\u503c\u9700\u8981\u5728\u4e0a\u4e00\u6b21\u8be5\u7c7b\u5b9e\u4f8b\u5316\u4e4b\u540e\u624b\u52a8\u8fdb\u884c\u7f13\u5b58\u5e76\u5728\u6b64\u5904\u4f20\u5165, \u5982\u679c\u4e0d\u4f20\u5165, \u5c06\u4f1a\u5728\u9700\u8981\u65f6\u81ea\u52a8\u91cd\u65b0\u83b7\u53d6\n:param access_token_expires_at: \u76f4\u63a5\u5bfc\u5165\u7684 access_token \u7684\u8fc7\u671f\u65e5\u671f\uff0c\u8be5\u503c\u9700\u8981\u5728\u4e0a\u4e00\u6b21\u8be5\u7c7b\u5b9e\u4f8b\u5316\u4e4b\u540e\u624b\u52a8\u8fdb\u884c\u7f13\u5b58\u5e76\u5728\u6b64\u5904\u4f20\u5165, \u5982\u679c\u4e0d\u4f20\u5165, \u5c06\u4f1a\u5728\u9700\u8981\u65f6\u81ea\u52a8\u91cd\u65b0\u83b7\u53d6\n:param jsapi_ticket: \u76f4\u63a5\u5bfc\u5165\u7684 jsapi_ticket \u503c, \u8be5\u503c\u9700\u8981\u5728\u4e0a\u4e00\u6b21\u8be5\u7c7b\u5b9e\u4f8b\u5316\u4e4b\u540e\u624b\u52a8\u8fdb\u884c\u7f13\u5b58\u5e76\u5728\u6b64\u5904\u4f20\u5165, \u5982\u679c\u4e0d\u4f20\u5165, \u5c06\u4f1a\u5728\u9700\u8981\u65f6\u81ea\u52a8\u91cd\u65b0\u83b7\u53d6\n:param jsapi_ticket_expires_at: \u76f4\u63a5\u5bfc\u5165\u7684 jsapi_ticket \u7684\u8fc7\u671f\u65e5\u671f\uff0c\u8be5\u503c\u9700\u8981\u5728\u4e0a\u4e00\u6b21\u8be5\u7c7b\u5b9e\u4f8b\u5316\u4e4b\u540e\u624b\u52a8\u8fdb\u884c\u7f13\u5b58\u5e76\u5728\u6b64\u5904\u4f20\u5165, \u5982\u679c\u4e0d\u4f20\u5165, \u5c06\u4f1a\u5728\u9700\u8981\u65f6\u81ea\u52a8\u91cd\u65b0\u83b7\u53d6\n:param checkssl: \u662f\u5426\u68c0\u67e5 SSL, \u9ed8\u8ba4\u4e3a False, \u53ef\u907f\u514d urllib3 \u7684 InsecurePlatformWarning \u8b66\u544a\n:param conf: WechatConf \u914d\u7f6e\u7c7b, \u63d0\u4f9b\u6b64\u53c2\u6570\u5c06\u9ed8\u8ba4\u5ffd\u7565\u5176\u4ed6\u6240\u6709\u53c2\u6570, \u6240\u6709\u6570\u636e\u5747\u4ece\u6b64\u914d\u7f6e\u7c7b\u4e2d\u83b7\u53d6", "id": "f588:c0:m0"} {"signature": "def response_video(self, media_id, title=None, description=None):", "body": "self._check_parse()title = self._transcoding(title)description = self._transcoding(description)response = VideoReply(message=self.__message, media_id=media_id, title=title, description=description).render()return self._encrypt_response(response)", "docstring": "\u5c06 media_id \u6240\u4ee3\u8868\u7684\u89c6\u9891\u7ec4\u88c5\u4e3a\u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684\u54cd\u5e94\u6570\u636e\n:param media_id: \u89c6\u9891\u7684 MediaID\n:param title: \u89c6\u9891\u6d88\u606f\u7684\u6807\u9898\n:param description: \u89c6\u9891\u6d88\u606f\u7684\u63cf\u8ff0\n:return: \u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684 XML \u54cd\u5e94\u6570\u636e", "id": "f588:c0:m16"} {"signature": "def response_news(self, articles):", "body": "self._check_parse()for article in articles:if article.get(''):article[''] = self._transcoding(article[''])if article.get(''):article[''] = self._transcoding(article[''])if article.get(''):article[''] = self._transcoding(article[''])if article.get(''):article[''] = self._transcoding(article[''])news = ArticleReply(message=self.__message)for article in articles:article = Article(**article)news.add_article(article)response = news.render()return self._encrypt_response(response)", "docstring": "\u5c06\u65b0\u95fb\u4fe1\u606f\u7ec4\u88c5\u4e3a\u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684\u54cd\u5e94\u6570\u636e\n:param articles: list \u5bf9\u8c61, \u6bcf\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a dict \u5bf9\u8c61, key \u5305\u542b `title`, `description`, `picurl`, `url`\n:return: \u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684 XML \u54cd\u5e94\u6570\u636e", "id": "f588:c0:m18"} {"signature": "def _check_parse(self):", "body": "if not self.__is_parse:raise NeedParseError()", "docstring": "\u68c0\u67e5\u662f\u5426\u6210\u529f\u89e3\u6790\u5fae\u4fe1\u670d\u52a1\u5668\u4f20\u6765\u7684\u6570\u636e\n:raises NeedParseError: \u9700\u8981\u89e3\u6790\u5fae\u4fe1\u670d\u52a1\u5668\u4f20\u6765\u7684\u6570\u636e", "id": "f588:c0:m49"} {"signature": "def _check_official_error(self, json_data):", "body": "if \"\" in json_data and json_data[\"\"] != :raise OfficialAPIError(errcode=json_data.get(''), errmsg=json_data.get('', ''))", "docstring": "\u68c0\u6d4b\u5fae\u4fe1\u516c\u4f17\u5e73\u53f0\u8fd4\u56de\u503c\u4e2d\u662f\u5426\u5305\u542b\u9519\u8bef\u7684\u8fd4\u56de\u7801\n:raises OfficialAPIError: \u5982\u679c\u8fd4\u56de\u7801\u63d0\u793a\u6709\u9519\u8bef\uff0c\u629b\u51fa\u5f02\u5e38\uff1b\u5426\u5219\u8fd4\u56de True", "id": "f588:c0:m50"} {"signature": "def send_video_message(self, user_id, media_id, title=None, description=None):", "body": "video_data = {'': media_id,}if title:video_data[''] = titleif description:video_data[''] = descriptionreturn self.request.post(url='',data={'': user_id,'': '','': video_data,})", "docstring": "\u53d1\u9001\u89c6\u9891\u6d88\u606f\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html\n:param user_id: \u7528\u6237 ID, \u5c31\u662f\u4f60\u6536\u5230\u7684 WechatMessage \u7684 source\n:param media_id: \u53d1\u9001\u7684\u89c6\u9891\u7684\u5a92\u4f53ID\u3002 \u53ef\u4ee5\u901a\u8fc7 :func:`upload_media` \u4e0a\u4f20\u3002\n:param title: \u89c6\u9891\u6d88\u606f\u7684\u6807\u9898\n:param description: \u89c6\u9891\u6d88\u606f\u7684\u63cf\u8ff0\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m39"} {"signature": "def get_group_by_id(self, openid):", "body": "return self.request.post(url='',data={'': openid,})", "docstring": "\u67e5\u8be2\u7528\u6237\u6240\u5728\u5206\u7ec4\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/13/be5272dc4930300ba561d927aead2569.html\n:param openid: \u7528\u6237\u7684OpenID\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m31"} {"signature": "def show_qrcode(self, ticket):", "body": "return requests.get(url='',params={'': ticket})", "docstring": "\u901a\u8fc7ticket\u6362\u53d6\u4e8c\u7ef4\u7801\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/18/28fc21e7ed87bec960651f0ce873ef8a.html\n:param ticket: \u4e8c\u7ef4\u7801 ticket \u3002\u53ef\u4ee5\u901a\u8fc7 :func:`create_qrcode` \u83b7\u53d6\u5230\n:return: \u8fd4\u56de\u7684 Request \u5bf9\u8c61", "id": "f588:c0:m43"} {"signature": "def get_groups(self):", "body": "return self.request.get('')", "docstring": "\u67e5\u8be2\u6240\u6709\u5206\u7ec4\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/13/be5272dc4930300ba561d927aead2569.html\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m30"} {"signature": "def create_group(self, name):", "body": "return self.request.post(url='',data={'': {'': name,},})", "docstring": "\u521b\u5efa\u5206\u7ec4\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/13/be5272dc4930300ba561d927aead2569.html\n:param name: \u5206\u7ec4\u540d\u5b57\uff0830\u4e2a\u5b57\u7b26\u4ee5\u5185\uff09\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305\n:raise HTTPError: \u5fae\u4fe1api http \u8bf7\u6c42\u5931\u8d25", "id": "f588:c0:m29"} {"signature": "def parse_data(self, data, msg_signature=None, timestamp=None, nonce=None):", "body": "result = {}if isinstance(data, six.text_type): data = data.encode('')if self.conf.encrypt_mode == '':if not (msg_signature and timestamp and nonce):raise ParseError('')data = self.conf.crypto.decrypt_message(msg=data,msg_signature=msg_signature,timestamp=timestamp,nonce=nonce,)try:xml = XMLStore(xmlstring=data)except Exception:raise ParseError()result = xml.xml2dictresult[''] = dataresult[''] = result.pop('').lower()message_type = MESSAGE_TYPES.get(result[''], UnknownMessage)self.__message = message_type(result)self.__is_parse = True", "docstring": "\u89e3\u6790\u5fae\u4fe1\u670d\u52a1\u5668\u53d1\u9001\u8fc7\u6765\u7684\u6570\u636e\u5e76\u4fdd\u5b58\u7c7b\u4e2d\n:param data: HTTP Request \u7684 Body \u6570\u636e\n:param msg_signature: EncodingAESKey \u7684 msg_signature\n:param timestamp: EncodingAESKey \u7528\u65f6\u95f4\u6233\n:param nonce: EncodingAESKey \u7528\u968f\u673a\u6570\n:raises ParseError: \u89e3\u6790\u5fae\u4fe1\u670d\u52a1\u5668\u6570\u636e\u9519\u8bef, \u6570\u636e\u4e0d\u5408\u6cd5", "id": "f588:c0:m7"} {"signature": "def send_image_message(self, user_id, media_id):", "body": "return self.request.post(url='',data={'': user_id,'': '','': {'': media_id,},})", "docstring": "\u53d1\u9001\u56fe\u7247\u6d88\u606f\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html\n:param user_id: \u7528\u6237 ID, \u5c31\u662f\u4f60\u6536\u5230\u7684 WechatMessage \u7684 source\n:param media_id: \u56fe\u7247\u7684\u5a92\u4f53ID\u3002 \u53ef\u4ee5\u901a\u8fc7 :func:`upload_media` \u4e0a\u4f20\u3002\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m37"} {"signature": "def set_template_industry(self, industry_id1, industry_id2):", "body": "return self.request.post(url='',data={'': str(industry_id1),'': str(industry_id2),})", "docstring": "\u8bbe\u7f6e\u6240\u5c5e\u884c\u4e1a\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/17/304c1885ea66dbedf7dc170d84999a9d.html\n:param industry_id1: \u4e3b\u8425\u884c\u4e1a\u4ee3\u7801\n:param industry_id2: \u526f\u8425\u884c\u4e1a\u4ee3\u7801\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m44"} {"signature": "def grant_token(self, **kwargs):", "body": "return self.conf.grant_access_token()", "docstring": "\u83b7\u53d6 Access Token\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/11/0e4b294685f817b95cbed85ba5e82b8f.html\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m20"} {"signature": "def send_article_message(self, user_id, articles=None, media_id=None):", "body": "if articles is None and media_id is None:raise TypeError('')if articles:articles_data = []for article in articles:article = Article(**article)articles_data.append({'': article.title,'': article.description,'': article.url,'': article.picurl,})return self.request.post(url='',data={'': user_id,'': '','': {'': articles_data,},})return self.request.post(url='',data={'': user_id,'': '','': {'': media_id,},})", "docstring": "\u53d1\u9001\u56fe\u6587\u6d88\u606f\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html\n:param user_id: \u7528\u6237 ID, \u5c31\u662f\u4f60\u6536\u5230\u7684 WechatMessage \u7684 source\n:param articles: list \u5bf9\u8c61, \u6bcf\u4e2a\u5143\u7d20\u4e3a\u4e00\u4e2a dict \u5bf9\u8c61, key \u5305\u542b `title`, `description`, `picurl`, `url`\n:param media_id: \u5f85\u53d1\u9001\u7684\u56fe\u6587 Media ID\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m41"} {"signature": "@conf.setterdef conf(self, conf):", "body": "self.__conf = confself.__request = WechatRequest(conf=self.__conf)", "docstring": "\u8bbe\u7f6e\u5f53\u524d WechatConf \u5b9e\u4f8b", "id": "f588:c0:m2"} {"signature": "@request.setterdef request(self, request):", "body": "self.__request = request", "docstring": "\u8bbe\u7f6e\u5f53\u524d WechatConf \u5b9e\u4f8b", "id": "f588:c0:m4"} {"signature": "def get_user_info(self, user_id, lang=''):", "body": "return self.request.get(url='',params={'': user_id,'': lang,})", "docstring": "\u83b7\u53d6\u7528\u6237\u57fa\u672c\u4fe1\u606f\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/14/bb5031008f1494a59c6f71fa0f319c66.html\n:param user_id: \u7528\u6237 ID, \u5c31\u662f\u4f60\u6536\u5230\u7684 WechatMessage \u7684 source\n:param lang: \u8fd4\u56de\u56fd\u5bb6\u5730\u533a\u8bed\u8a00\u7248\u672c\uff0czh_CN \u7b80\u4f53\uff0czh_TW \u7e41\u4f53\uff0cen \u82f1\u8bed\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m34"} {"signature": "def update_group(self, group_id, name):", "body": "return self.request.post(url='',data={'': {'': int(group_id),'': name,}})", "docstring": "\u4fee\u6539\u5206\u7ec4\u540d\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/13/be5272dc4930300ba561d927aead2569.html\n:param group_id: \u5206\u7ec4id\uff0c\u7531\u5fae\u4fe1\u5206\u914d\n:param name: \u5206\u7ec4\u540d\u5b57\uff0830\u4e2a\u5b57\u7b26\u4ee5\u5185\uff09\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m32"} {"signature": "def get_menu(self):", "body": "return self.request.get('')", "docstring": "\u67e5\u8be2\u81ea\u5b9a\u4e49\u83dc\u5355\n\u8be6\u60c5\u8bf7\u53c2\u8003 http://mp.weixin.qq.com/wiki/16/ff9b7b85220e1396ffa16794a9d95adc.html\n:return: \u8fd4\u56de\u7684 JSON \u6570\u636e\u5305", "id": "f588:c0:m23"} {"signature": "@propertydef request(self):", "body": "return self.__request", "docstring": "\u83b7\u53d6\u5f53\u524d WechatConf \u914d\u7f6e\u5b9e\u4f8b", "id": "f588:c0:m3"} {"signature": "def response_image(self, media_id):", "body": "self._check_parse()response = ImageReply(message=self.__message, media_id=media_id).render()return self._encrypt_response(response)", "docstring": "\u5c06 media_id \u6240\u4ee3\u8868\u7684\u56fe\u7247\u7ec4\u88c5\u4e3a\u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684\u54cd\u5e94\u6570\u636e\n:param media_id: \u56fe\u7247\u7684 MediaID\n:return: \u7b26\u5408\u5fae\u4fe1\u670d\u52a1\u5668\u8981\u6c42\u7684 XML \u54cd\u5e94\u6570\u636e", "id": "f588:c0:m14"} {"signature": "def _element2dict(self, parent):", "body": "d = {}for node in parent.childNodes:if not isinstance(node, minidom.Element):continueif not node.hasChildNodes():continueif node.childNodes[].nodeType == minidom.Node.ELEMENT_NODE:try:d[node.tagName]except KeyError:d[node.tagName] = []d[node.tagName].append(self._element2dict(node))elif len(node.childNodes) == and node.childNodes[].nodeType in [minidom.Node.CDATA_SECTION_NODE, minidom.Node.TEXT_NODE]:d[node.tagName] = node.childNodes[].datareturn d", "docstring": "\u5c06\u5355\u4e2a\u8282\u70b9\u8f6c\u6362\u4e3a dict", "id": "f590:c0:m2"} {"signature": "def _check_signature(self, msg_signature, timestamp, nonce, echostr):", "body": "signature = get_sha1_signature(self.__token, timestamp, nonce, echostr)if not signature == msg_signature:raise ValidateSignatureError()try:return self.__pc.decrypt(echostr, self.__id)except DecryptAESError as e:raise ValidateSignatureError(e)", "docstring": "\u9a8c\u8bc1\u7b7e\u540d\u6709\u6548\u6027\n\n :param msg_signature: \u7b7e\u540d\u4e32\uff0c\u5bf9\u5e94URL\u53c2\u6570\u7684msg_signature\n :param timestamp: \u65f6\u95f4\u6233\uff0c\u5bf9\u5e94URL\u53c2\u6570\u7684timestamp\n :param nonce: \u968f\u673a\u4e32\uff0c\u5bf9\u5e94URL\u53c2\u6570\u7684nonce\n :param echostr: \u968f\u673a\u4e32\uff0c\u5bf9\u5e94URL\u53c2\u6570\u7684echostr\n :return: \u89e3\u5bc6\u4e4b\u540e\u7684echostr\n :raise ValidateSignatureError: \u7b7e\u540d\u65e0\u6548\u5f02\u5e38", "id": "f592:c0:m1"} {"signature": "def __init__(self, token, encoding_aes_key, _id):", "body": "self.__key = base64.b64decode(to_binary(encoding_aes_key) + to_binary(''))if len(self.__key) != :raise ValidateAESKeyError(encoding_aes_key)self.__token = to_binary(token)self.__id = to_binary(_id)self.__pc = BaseCrypto(self.__key)", "docstring": "\u6784\u9020\u51fd\u6570\n\n :param token: \u516c\u4f17\u5e73\u53f0\u4e0a\uff0c\u5f00\u53d1\u8005\u8bbe\u7f6e\u7684Token\n :param encoding_aes_key: \u516c\u4f17\u5e73\u53f0\u4e0a\uff0c\u5f00\u53d1\u8005\u8bbe\u7f6e\u7684EncodingAESKey\n :param _id: \u516c\u4f17\u53f7\u7684 appid \u6216\u4f01\u4e1a\u53f7\u7684 corpid", "id": "f592:c0:m0"} {"signature": "def decrypt(self, text, appid):", "body": "try:cryptor = AES.new(self.key, self.mode, self.key[:])plain_text = cryptor.decrypt(base64.b64decode(text))except Exception as e:raise DecryptAESError(e)try:if six.PY2:pad = ord(plain_text[-])else:pad = plain_text[-]content = plain_text[:-pad]xml_len = socket.ntohl(struct.unpack(\"\", content[: ])[])xml_content = content[: xml_len + ]from_appid = content[xml_len + :]except Exception as e:raise IllegalBuffer(e)if from_appid != appid:raise ValidateAppIDError()return xml_content", "docstring": "\u5bf9\u89e3\u5bc6\u540e\u7684\u660e\u6587\u8fdb\u884c\u8865\u4f4d\u5220\u9664\n\n @param text: \u5bc6\u6587\n @return: \u5220\u9664\u586b\u5145\u8865\u4f4d\u540e\u7684\u660e\u6587", "id": "f593:c0:m2"} {"signature": "def encrypt(self, text, appid):", "body": "text = self.get_random_str() + struct.pack(\"\", socket.htonl(len(text))) + to_binary(text) + appidpkcs7 = PKCS7Encoder()text = pkcs7.encode(text)cryptor = AES.new(self.key, self.mode, self.key[:])try:ciphertext = cryptor.encrypt(text)return base64.b64encode(ciphertext)except Exception as e:raise EncryptAESError(e)", "docstring": "\u5bf9\u660e\u6587\u8fdb\u884c\u52a0\u5bc6\n\n @param text: \u9700\u8981\u52a0\u5bc6\u7684\u660e\u6587\n @return: \u52a0\u5bc6\u5f97\u5230\u7684\u5b57\u7b26\u4e32", "id": "f593:c0:m1"} {"signature": "def get_random_str(self):", "body": "rule = string.ascii_letters + string.digitsreturn \"\".join(random.sample(rule, ))", "docstring": "\u968f\u673a\u751f\u621016\u4f4d\u5b57\u7b26\u4e32\n\n @return: 16\u4f4d\u5b57\u7b26\u4e32", "id": "f593:c0:m3"} {"signature": "def __init__(self, message, media_id, title=None, description=None):", "body": "title = title or ''description = description or ''super(VideoReply, self).__init__(message=message, media_id=media_id, title=title, description=description)", "docstring": ":param message: WechatMessage\u5bf9\u8c61\n:param media_id: \u89c6\u9891\u7684 MediaID\n:param title: \u89c6\u9891\u6d88\u606f\u7684\u6807\u9898\n:param description: \u89c6\u9891\u6d88\u606f\u7684\u63cf\u8ff0", "id": "f598:c4:m0"} {"signature": "def __init__(self, message):", "body": "super(GroupTransferReply, self).__init__(message=message)", "docstring": ":param message: WechatMessage \u5bf9\u8c61", "id": "f598:c8:m0"} {"signature": "def __init__(self, message, media_id):", "body": "super(VoiceReply, self).__init__(message=message, media_id=media_id)", "docstring": ":param message: WechatMessage \u5bf9\u8c61\n:param media_id: \u8bed\u97f3\u7684 MediaID", "id": "f598:c3:m0"} {"signature": "def convert_ext_to_mime(extension):", "body": "table = {'': '','': '','': '','': '','': '',}if extension in table:return table[extension]raise ValueError(\"\")", "docstring": "\u5c06\u6269\u5c55\u540d\u8f6c\u6362\u4e3a MIME \u683c\u5f0f\n :return: mime string", "id": "f599:m5"} {"signature": "def exists(self, openid):", "body": "raise NotImplementedError('')", "docstring": "\u5f53 openid \u5b58\u5728\u65f6\u8fd4\u56de True", "id": "f604:c1:m26"} {"signature": "@_check_player_is_activedef show_video(self):", "body": "self._player_interface.UnHideVideo()", "docstring": "Shows the video (to undo a `hide_video`)", "id": "f624:c2:m48"} {"signature": "@_check_player_is_active@_from_dbus_typedef previous(self):", "body": "return self._player_interface.Previous()", "docstring": "Skip to the previous chapter\n\nReturns:\n bool: Whether the player skipped to the previous chapter", "id": "f624:c2:m61"} {"signature": "@_check_player_is_active@_from_dbus_typedef playback_status(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n str: one of (\"Playing\" | \"Paused\" | \"Stopped\")", "id": "f624:c2:m19"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_aspect_mode(self, mode):", "body": "self._player_interface.SetAspectMode(ObjectPath(''), String(mode))", "docstring": "Set the aspect mode of the video\n\nArgs:\n mode (str): One of (\"letterbox\" | \"fill\" | \"stretch\")", "id": "f624:c2:m43"} {"signature": "def quit(self):", "body": "if self._process is None:logger.debug('')returntry:logger.debug('')process_group_id = os.getpgid(self._process.pid)os.killpg(process_group_id, signal.SIGTERM)logger.debug('' % process_group_id)self._process_monitor.join()except OSError:logger.error('')self._process = None", "docstring": "Quit the player, blocking until the process has died", "id": "f624:c2:m68"} {"signature": "def position(self):", "body": "return self._position_us() / ( * )", "docstring": "Returns:\n int: position in seconds", "id": "f624:c2:m23"} {"signature": "@_check_player_is_active@_from_dbus_typedef can_pause(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n bool: whether the player can pause", "id": "f624:c2:m18"} {"signature": "@_check_player_is_active@_from_dbus_typedef maximum_rate(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n float: maximum playback rate (as proportion of normal rate)", "id": "f624:c2:m25"} {"signature": "@_check_player_is_active@_from_dbus_typedef next(self):", "body": "return self._player_interface.Next()", "docstring": "Skip to the next chapter\n\nReturns:\n bool: Whether the player skipped to the next chapter", "id": "f624:c2:m60"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_video_crop(self, x1, y1, x2, y2):", "body": "crop = \"\" % (str(x1),str(y1),str(x2),str(y2))self._player_interface.SetVideoCropPos(ObjectPath(''), String(crop))", "docstring": "Args:\n x1 (int): Top left x coordinate (px)\n y1 (int): Top left y coordinate (px)\n x2 (int): Bottom right x coordinate (px)\n y2 (int): Bottom right y coordinate (px)", "id": "f624:c2:m46"} {"signature": "@_check_player_is_active@_from_dbus_typedef play(self):", "body": "if not self.is_playing():self.play_pause()self._is_playing = Trueself.playEvent(self)", "docstring": "Play the video asynchronously returning control immediately to the calling code", "id": "f624:c2:m59"} {"signature": "@_check_player_is_active@_from_dbus_typedef list_audio(self):", "body": "return self._player_interface.ListAudio()", "docstring": "Returns:\n [str]: A list of all known audio streams, each item is in the\n format: ``::::``", "id": "f624:c2:m49"} {"signature": "@_check_player_is_activedef play_pause(self):", "body": "self._player_interface.PlayPause()self._is_playing = not self._is_playingif self._is_playing:self.playEvent(self)else:self.pauseEvent(self)", "docstring": "Pause playback if currently playing, otherwise start playing if currently paused.", "id": "f624:c2:m36"} {"signature": "@_check_player_is_active@_from_dbus_typedef can_seek(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n bool: whether the player can seek", "id": "f624:c2:m15"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_video_pos(self, x1, y1, x2, y2):", "body": "position = \"\" % (str(x1),str(y1),str(x2),str(y2))self._player_interface.VideoPos(ObjectPath(''), String(position))", "docstring": "Set the video position on the screen\n\nArgs:\n x1 (int): Top left x coordinate (px)\n y1 (int): Top left y coordinate (px)\n x2 (int): Bottom right x coordinate (px)\n y2 (int): Bottom right y coordinate (px)", "id": "f624:c2:m44"} {"signature": "@_check_player_is_activedef show_subtitles(self):", "body": "return self._player_interface.ShowSubtitles()", "docstring": "Shows subtitles after :class:`hide_subtitles`", "id": "f624:c2:m54"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_alpha(self, alpha):", "body": "self._player_interface.SetAlpha(ObjectPath(''), Int64(alpha))", "docstring": "Set the transparency of the video overlay\n\nArgs:\n alpha (float): The transparency (0..255)", "id": "f624:c2:m40"} {"signature": "@_check_player_is_active@_from_dbus_typedef rate(self):", "body": "return self._rate", "docstring": "Returns:\n float: playback rate, 1 is the normal rate, 2 would be double speed.", "id": "f624:c2:m26"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_rate(self, rate):", "body": "self._rate = self._player_interface_property('', dbus.Double(rate))return self._rate", "docstring": "Set the playback rate of the video as a multiple of the default playback speed\n\nExamples:\n >>> player.set_rate(2)\n # Will play twice as fast as normal speed\n >>> player.set_rate(0.5)\n # Will play half speed", "id": "f624:c2:m27"} {"signature": "@_check_player_is_activedef select_audio(self, index):", "body": "return self._player_interface.SelectAudio(dbus.Int32(index))", "docstring": "Select audio stream specified by the index of the stream in :class:`list_audio`\n\nArgs:\n index (int): index of audio stream returned by :class:`list_audio`", "id": "f624:c2:m53"} {"signature": "@_check_player_is_active@_from_dbus_typedef set_volume(self, volume):", "body": "if volume == :volume = return self._player_interface_property('', dbus.Double(volume))", "docstring": "Args:\n float: volume in the interval [0, 10]", "id": "f624:c2:m21"} {"signature": "@_check_player_is_active@_from_dbus_typedef stop(self):", "body": "self._player_interface.Stop()self.stopEvent(self)", "docstring": "Stop the player, causing it to quit", "id": "f624:c2:m37"} {"signature": "@_check_player_is_active@_from_dbus_typedef supported_uri_schemes(self):", "body": "return self._root_interface_property('')", "docstring": "Returns:\n str: list of supported URI schemes\nExamples:\n >>> player.supported_uri_schemes()\n [\"file\", \"http\", \"rtsp\", \"rtmp\"]", "id": "f624:c2:m12"} {"signature": "@_check_player_is_activedef hide_subtitles(self):", "body": "return self._player_interface.HideSubtitles()", "docstring": "Hide subtitles", "id": "f624:c2:m55"} {"signature": "@_check_player_is_active@_from_dbus_typedef can_quit(self):", "body": "return self._root_interface_property('')", "docstring": "Returns:\n bool: whether the player can quit or not", "id": "f624:c2:m6"} {"signature": "@_check_player_is_active@_from_dbus_typedef volume(self):", "body": "if self._is_muted:return return self._player_interface_property('')", "docstring": "Returns:\n float: current player volume", "id": "f624:c2:m20"} {"signature": "@_check_player_is_activedef mute(self):", "body": "self._is_muted = Trueself._player_interface.Mute()", "docstring": "Mute audio. If already muted, then this does not do anything", "id": "f624:c2:m41"} {"signature": "@_check_player_is_active@_from_dbus_typedef width(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n int: video width in px", "id": "f624:c2:m31"} {"signature": "@_check_player_is_activedef select_subtitle(self, index):", "body": "return self._player_interface.SelectSubtitle(dbus.Int32(index))", "docstring": "Enable a subtitle specified by the index it is listed in :class:`list_subtitles`\n\nArgs:\n index (int): index of subtitle listing returned by :class:`list_subtitles`", "id": "f624:c2:m52"} {"signature": "@_check_player_is_active@_from_dbus_typedef can_go_next(self):", "body": "return self._player_interface_property('')", "docstring": "Returns:\n bool: whether the player can move to the next item in the playlist", "id": "f624:c2:m13"} {"signature": "@_check_player_is_activedef unmute(self):", "body": "self._is_muted = Falseself._player_interface.Unmute()", "docstring": "Unmutes the video. If already unmuted, then this does not do anything", "id": "f624:c2:m42"} {"signature": "@_check_player_is_active@_from_dbus_typedef has_track_list(self):", "body": "return self._root_interface_property('')", "docstring": "Returns:\n bool: whether the player has a track list or not", "id": "f624:c2:m10"} {"signature": "def find_address_file(self):", "body": "possible_address_files = []while not possible_address_files:isnt_pid_file = lambda path: not path.endswith('')possible_address_files = list(filter(isnt_pid_file,glob('')))possible_address_files.sort(key=lambda path: os.path.getmtime(path))time.sleep()self.path = possible_address_files[-]", "docstring": "Finds the OMXPlayer DBus connection\nAssumes there is an alive OMXPlayer process.\n:return:", "id": "f628:c0:m2"} {"signature": "def exit_on_keyboard_interrupt(f):", "body": "@wraps(f)def wrapper(*args, **kwargs):raise_exception = kwargs.pop('', False)try:return f(*args, **kwargs)except KeyboardInterrupt:if not raise_exception:sys.exit()raise KeyboardInterruptreturn wrapper", "docstring": "Decorator that allows user to exit script by sending a keyboard interrupt\n (ctrl + c) without raising an exception.", "id": "f635:m0"} {"signature": "def format_answers(self, fmt=''):", "body": "fmts = ('', '', '')if fmt not in fmts:eprint(\"\".format(fmt, fmts))returndef stringify(val):if type(val) in (list, tuple):return ''.join(str(e) for e in val)return valif fmt == '':return json.dumps(self.answers)elif fmt == '':answers = [[k, v] for k, v in self.answers.items()]return json.dumps(answers)elif fmt == '':answers = ''.join(''.format(k, stringify(v)) for k, v in self.answers.items())return answers", "docstring": "Formats answers depending on `fmt`.", "id": "f635:c2:m14"} {"signature": "def remove(self, key):", "body": "return self.questions.pop(key)", "docstring": "Remove all questions associated with `key`. Raises exception if `key`\n doesn't exist.", "id": "f635:c2:m5"} {"signature": "def add(self, *args, **kwargs):", "body": "if '' in kwargs and isinstance(kwargs[''], Question):question = kwargs['']else:question = Question(*args, **kwargs)self.questions.setdefault(question.key, []).append(question)return question", "docstring": "Add a Question instance to the questions dict. Each key points\n to a list of Question instances with that key. Use the `question`\n kwarg to pass a Question instance if you want, or pass in the same\n args you would pass to instantiate a question.", "id": "f635:c2:m1"} {"signature": "@register(key='')def one(prompt, *args, **kwargs):", "body": "indicator = ''if sys.version_info < (, ):indicator = '>'def go_back(picker):return None, -options, verbose_options = prepare_options(args)idx = kwargs.get('', )picker = Picker(verbose_options, title=prompt, indicator=indicator, default_index=idx)picker.register_custom_handler(ord(''), go_back)picker.register_custom_handler(curses.KEY_LEFT, go_back)with stdout_redirected(sys.stderr):option, index = picker.start()if index == -:raise QuestionnaireGoBackif kwargs.get('', False):return indexreturn options[index]", "docstring": "Instantiates a picker, registers custom handlers for going back,\n and starts the picker.", "id": "f636:m3"} {"signature": "@register(key='')def many(prompt, *args, **kwargs):", "body": "def get_options(options, chosen):return [options[i] for i, c in enumerate(chosen) if c]def get_verbose_options(verbose_options, chosen):no, yes = '', ''if sys.version_info < (, ):no, yes = '', ''opts = [''.format(yes if c else no, verbose_options[i]) for i, c in enumerate(chosen)]return opts + [''.format('', kwargs.get('', ''))]options, verbose_options = prepare_options(args)chosen = [False] * len(options)index = kwargs.get('', )default = kwargs.get('', None)if isinstance(default, list):for idx in default:chosen[idx] = Trueif isinstance(default, int):chosen[default] = Truewhile True:try:index = one(prompt, *get_verbose_options(verbose_options, chosen), return_index=True, idx=index)except QuestionnaireGoBack:if any(chosen):raise QuestionnaireGoBack()else:raise QuestionnaireGoBackif index == len(options):return get_options(options, chosen)chosen[index] = not chosen[index]", "docstring": "Calls `pick` in a while loop to allow user to pick many\n options. Returns a list of chosen options.", "id": "f636:m4"} {"signature": "def register(key=''):", "body": "def decorate(func):prompters[key] = funcreturn funcreturn decorate", "docstring": "Add decorated functions to prompters dict.", "id": "f636:m2"} {"signature": "def delete_file_on_return(path):", "body": "def decorator(func):@functools.wraps(func)def wrapper(*args, **kwds):try:return func(*args, **kwds)finally:try:os.remove(path)except (IOError, OSError):passreturn wrapperreturn decorator", "docstring": "Decorator to run function at `path`.\n\n:type path: str\n:arg path: relative path from repository root (e.g., 'pyqode' or 'test').", "id": "f656:m1"} {"signature": "def spam(self):", "body": "pass", "docstring": "Spam", "id": "f660:c0:m1"} {"signature": "def open_file(self, path, line=None):", "body": "editor = Noneif path:interpreter, pyserver, args = self._get_backend_parameters()editor = self.tabWidget.open_document(path, None, interpreter=interpreter, server_script=pyserver,args=args)if editor:self.setup_editor(editor)self.recent_files_manager.open_file(path)self.menu_recents.update_actions()if line is not None:TextHelper(self.tabWidget.current_widget()).goto_line(line)return editor", "docstring": "Creates a new GenericCodeEdit, opens the requested file and adds it\nto the tab widget.\n\n:param path: Path of the file to open\n\n:return The opened editor if open succeeded.", "id": "f666:c0:m8"} {"signature": "def setup_mnu_edit(self, editor):", "body": "self.menuEdit.addActions(editor.actions())self.menuEdit.addSeparator()self.setup_mnu_style(editor)", "docstring": "Setup the edit menu for the current editor. We show the current editor\ncontext menu and a menu to change the python interpreter.\n\n:param editor: new editor", "id": "f666:c0:m14"} {"signature": "def on_goto_out_of_doc(self, assignment):", "body": "editor = self.open_file(assignment.module_path)if editor:TextHelper(editor).goto_line(assignment.line, assignment.column)", "docstring": "Open the a new tab when goto goes out of the current document.\n\n:param assignment: Destination", "id": "f666:c0:m27"} {"signature": "def setup_recent_files_menu(self):", "body": "self.recent_files_manager = widgets.RecentFilesManager('', '')self.menu_recents = widgets.MenuRecentFiles(self.menuFile, title='',recent_files_manager=self.recent_files_manager)self.menu_recents.open_requested.connect(self.open_file)self.menuFile.insertMenu(self.actionSave, self.menu_recents)self.menuFile.insertSeparator(self.actionSave)", "docstring": "Setup the recent files menu and manager", "id": "f666:c0:m5"} {"signature": "def on_open(self):", "body": "filename, filter = QtWidgets.QFileDialog.getOpenFileName(self, '')if filename:self.open_file(filename)self.actionRun.setEnabled(True)self.actionConfigure_run.setEnabled(True)", "docstring": "Shows an open file dialog and open the file if the dialog was\naccepted.", "id": "f666:c0:m11"} {"signature": "def closeEvent(self, QCloseEvent):", "body": "self.tabWidget.closeEvent(QCloseEvent)", "docstring": "Delegates the close event to the tabWidget to be sure we do not quit\nthe application while there are some still some unsaved tabs.", "id": "f666:c0:m6"} {"signature": "def on_run(self):", "body": "filename = self.tabWidget.current_widget().file.pathwd = os.path.dirname(filename)args = Settings().get_run_config_for_file(filename)self.interactiveConsole.start_process(Settings().interpreter, args=[filename] + args, cwd=wd)self.dockWidget.show()self.actionRun.setEnabled(False)self.actionConfigure_run.setEnabled(False)", "docstring": "Run the current current script", "id": "f666:c0:m26"} {"signature": "def setup_actions(self):", "body": "self.actionOpen.triggered.connect(self.on_open)self.actionNew.triggered.connect(self.on_new)self.actionSave.triggered.connect(self.on_save)self.actionSave_as.triggered.connect(self.on_save_as)self.actionQuit.triggered.connect(QtWidgets.QApplication.instance().quit)self.tabWidget.current_changed.connect(self.on_current_tab_changed)self.tabWidget.last_tab_closed.connect(self.on_last_tab_closed)self.actionAbout.triggered.connect(self.on_about)self.actionRun.triggered.connect(self.on_run)self.interactiveConsole.process_finished.connect(self.on_process_finished)self.actionConfigure_run.triggered.connect(self.on_configure_run)", "docstring": "Connects slots to signals", "id": "f666:c0:m3"} {"signature": "def on_panel_state_changed(self):", "body": "action = self.sender()action.panel.enabled = action.isChecked()action.panel.setVisible(action.isChecked())", "docstring": "Enable disable the selected panel.", "id": "f666:c0:m23"} {"signature": "@propertydef run_configs(self):", "body": "string = self.settings.value('', '')return json.loads(string)", "docstring": "Returns the dictionary of run configurations. A run configuration is\njust a list of arguments to append to the run command.\n\nThis is internally stored as a json object", "id": "f671:c0:m3"} {"signature": "def detect_fold_level(self, prev_block, block):", "body": "lvl = super(PythonFoldDetector, self).detect_fold_level(prev_block, block)prev_lvl = TextBlockHelper.get_fold_lvl(prev_block)if prev_block and lvl > prev_lvl and not (self._strip_comments(prev_block).endswith('')):lvl = prev_lvllvl = self._handle_docstrings(block, lvl, prev_block)lvl = self._handle_imports(block, lvl, prev_block)return lvl", "docstring": "Perfoms fold level detection for current block (take previous block\ninto account).\n\n:param prev_block: previous block, None if `block` is the first block.\n:param block: block to analyse.\n:return: block fold level", "id": "f694:c0:m3"} {"signature": "def any(name, alternates):", "body": "return \"\" % name + \"\".join(alternates) + \"\"", "docstring": "Return a named group pattern matching list of alternates.", "id": "f695:m0"} {"signature": "def indent(self):", "body": "if not self.tab_always_indent:super(PyIndenterMode, self).indent()else:cursor = self.editor.textCursor()assert isinstance(cursor, QtGui.QTextCursor)if cursor.hasSelection():self.indent_selection(cursor)else:tab_len = self.editor.tab_lengthcursor.beginEditBlock()if self.editor.use_spaces_instead_of_tabs:cursor.insertText(tab_len * \"\")else:cursor.insertText('')cursor.endEditBlock()self.editor.setTextCursor(cursor)", "docstring": "Performs an indentation", "id": "f698:c0:m3"} {"signature": "def on_state_changed(self, state):", "body": "if state:self.action.triggered.connect(self.comment)self.editor.add_action(self.action, sub_menu='')if '' in os.environ[''].lower():self.editor.key_pressed.connect(self.on_key_pressed)else:self.editor.remove_action(self.action, sub_menu='')self.action.triggered.disconnect(self.comment)if '' in os.environ[''].lower():self.editor.key_pressed.disconnect(self.on_key_pressed)", "docstring": "Called when the mode is activated/deactivated", "id": "f700:c0:m1"} {"signature": "def update_terminal_colors(self):", "body": "self.color_scheme = self.create_color_scheme(background=self.syntax_highlighter.color_scheme.background,foreground=self.syntax_highlighter.color_scheme.formats[''].foreground().color())", "docstring": "Update terminal color scheme based on the pygments color scheme colors", "id": "f708:c2:m3"} {"signature": "def mousePressEvent(self, e):", "body": "super(PyInteractiveConsole, self).mousePressEvent(e)cursor = self.cursorForPosition(e.pos())p = cursor.positionInBlock()usd = cursor.block().userData()if usd and usd.start_pos_in_block <= p <= usd.end_pos_in_block:if e.button() == QtCore.Qt.LeftButton:self.open_file_requested.emit(usd.filename, usd.line)", "docstring": "Emits open_file_requested if the press event occured over\na file location string.", "id": "f709:c0:m4"} {"signature": "def mouseMoveEvent(self, e):", "body": "super(PyInteractiveConsole, self).mouseMoveEvent(e)cursor = self.cursorForPosition(e.pos())assert isinstance(cursor, QtGui.QTextCursor)p = cursor.positionInBlock()usd = cursor.block().userData()if usd and usd.start_pos_in_block <= p <= usd.end_pos_in_block:if QtWidgets.QApplication.overrideCursor() is None:QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))else:if QtWidgets.QApplication.overrideCursor() is not None:QtWidgets.QApplication.restoreOverrideCursor()", "docstring": "Extends mouseMoveEvent to display a pointing hand cursor when the\nmouse cursor is over a file location", "id": "f709:c0:m3"} {"signature": "def icon_from_typename(name, icon_type):", "body": "ICONS = {'': ICON_CLASS,'': ICON_NAMESPACE,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_NAMESPACE,'': ICON_KEYWORD,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_VAR,'': ICON_FUNC,'': ICON_FUNC,'': ICON_FUNC_PRIVATE,'': ICON_FUNC_PROTECTED}ret_val = Noneicon_type = icon_type.upper()if hasattr(name, \"\"):name = name.stringif icon_type == \"\" or icon_type == \"\":icon_type = \"\"if icon_type == \"\" or icon_type == \"\":if name.startswith(\"\"):icon_type += \"\"elif name.startswith(\"\"):icon_type += \"\"if icon_type in ICONS:ret_val = ICONS[icon_type]elif icon_type:_logger().warning(\"\", icon_type)return ret_val", "docstring": "Returns the icon resource filename that corresponds to the given typename.\n\n:param name: name of the completion. Use to make the distinction between\n public and private completions (using the count of starting '_')\n:pram typename: the typename reported by jedi\n\n:returns: The associate icon resource filename or None.", "id": "f714:m8"} {"signature": "def run_pyflakes(request_data):", "body": "global prev_resultsfrom pyflakes import checkerimport _astWARNING = ERROR = ret_val = []code = request_data['']path = request_data['']encoding = request_data['']if not encoding:encoding = ''if not path:path = os.path.join(tempfile.gettempdir(), '')if not code:return []else:try:tree = compile(code.encode(encoding), path, \"\",_ast.PyCF_ONLY_AST)except SyntaxError as value:msg = '' % value.args[](lineno, offset, text) = value.lineno - , value.offset, value.textif text is None:_logger().warning(\"\",path)else:ret_val.append((msg, ERROR, lineno))else:w = checker.Checker(tree, os.path.split(path)[])w.messages.sort(key=lambda m: m.lineno)for message in w.messages:msg = \"\" % str(message).split('')[-].strip()line = message.lineno - status = WARNINGif message.__class__ not in PYFLAKES_ERROR_MESSAGESelse ERRORret_val.append((msg, status, line))prev_results = ret_valreturn ret_val", "docstring": "Worker that run a frosted (the fork of pyflakes) code analysis on the\ncurrent editor text.", "id": "f714:m7"} {"signature": "@staticmethoddef complete(code, line, column, path, encoding, prefix):", "body": "ret_val = []try:script = jedi.Script(code, line + , column, path, encoding)completions = script.completions()print('' % completions)except jedi.NotFoundError:completions = []for completion in completions:ret_val.append({'': completion.name,'': icon_from_typename(completion.name, completion.type),'': completion.description})return ret_val", "docstring": "Completes python code using `jedi`_.\n\n:returns: a list of completion.", "id": "f714:c0:m0"} {"signature": "def calltips(request_data):", "body": "code = request_data['']line = request_data[''] + column = request_data['']path = request_data['']encoding = ''script = jedi.Script(code, line, column, path, encoding)signatures = script.call_signatures()for sig in signatures:results = (str(sig.module_name), str(sig.name),[p.description for p in sig.params], sig.index,sig.bracket_start, column)return resultsreturn []", "docstring": "Worker that returns a list of calltips.\n\nA calltips is a tuple made of the following parts:\n - module_name: name of the module of the function invoked\n - call_name: name of the function that is being called\n - params: the list of parameter names.\n - index: index of the current parameter\n - bracket_start\n\n:returns tuple(module_name, call_name, params)", "id": "f714:m1"} {"signature": "def acquire(self):", "body": "try:pidfile = open(self._pidfile, \"\")except IOError as err:raise SystemExit(err)try:fcntl.flock(pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)except IOError:raise SystemExit('' + self._pidfile)pidfile.seek()pidfile.truncate()pidfile.write(str(os.getpid()) + '')pidfile.flush()self.pidfile = pidfileatexit.register(self.release)", "docstring": "Acquire the pidfile.\n\n Create the pidfile, lock it, write the pid into it\n and register the release with atexit.\n\n\n :return: None\n :raise: SystemExit", "id": "f731:c0:m3"} {"signature": "@propertydef _files_preserve(self):", "body": "result = set()files = [] if not self.files_preserve else self.files_preservefiles.extend([self.stdin, self.stdout, self.stderr])for item in files:if hasattr(item, ''):result.add(item.fileno())if isinstance(item, int):result.add(item)return result", "docstring": "create a set of protected files\n\n create a set of files, based on self.files_preserve and\n self.stdin, self,stdout and self.stderr, that should not get\n closed while daemonizing.\n\n :return: set", "id": "f733:c1:m4"} {"signature": "def parent_is_inet():", "body": "result = Falsesock = socket.fromfd(sys.__stdin__.fileno(),socket.AF_INET,socket.SOCK_RAW)try:sock.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE)result = Trueexcept (OSError, socket.error) as err:if not err.args[] == errno.ENOTSOCK:result = Truereturn result", "docstring": "Check if parent is inet\n\n Check if our parent seems ot be a superserver, aka inetd/xinetd.\n\n This is done by checking if sys.__stdin__ is a network socket.\n\n :return: bool", "id": "f733:m3"} {"signature": "@propertydef _signal_handler_map(self):", "body": "result = {}for signum, handler in self.signal_map.items():result[signum] = self._get_signal_handler(handler)return result", "docstring": "Create the signal handler map\n\n create a dictionary with signal:handler mapping based on\n self.signal_map\n\n :return: dict", "id": "f733:c1:m5"} {"signature": "def open(self):", "body": "if self.is_open:returntry:os.chdir(self.working_directory)if self.chroot_directory:os.chroot(self.chroot_directory)os.setgid(self.gid)os.setuid(self.uid)os.umask(self.umask)except OSError as err:raise DaemonError(''.format(err))if self.prevent_core:try:resource.setrlimit(resource.RLIMIT_CORE, (, ))except Exception as err:raise DaemonError(''.format(err))if self.detach_process:try:if os.fork() > :os._exit()except OSError as err:raise DaemonError(''.format(err))os.setsid()try:if os.fork() > :os._exit()except OSError as err:raise DaemonError(''.format(err))for (signal_number, handler) in self._signal_handler_map.items():signal.signal(signal_number, handler)close_filenos(self._files_preserve)redirect_stream(sys.stdin, self.stdin)redirect_stream(sys.stdout, self.stdout)redirect_stream(sys.stderr, self.stderr)if self.pidfile:self.pidfile.acquire()self._is_open = True", "docstring": "Daemonize this process\n\n Do everything that is needed to become a Unix daemon.\n\n :return: None\n :raise: DaemonError", "id": "f733:c1:m10"} {"signature": "def parent_is_init():", "body": "if os.getppid() == :return Truereturn False", "docstring": "Check if parent is Init\n\n Check if the parent process is init, or something else that\n owns PID 1.\n\n :return: bool", "id": "f733:m2"} {"signature": "def close_filenos(preserve):", "body": "maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[]if maxfd == resource.RLIM_INFINITY:maxfd = for fileno in range(maxfd):if fileno not in preserve:try:os.close(fileno)except OSError as err:if not err.errno == errno.EBADF:raise DaemonError(''.format(fileno, err))", "docstring": "Close unprotected file descriptors\n\n Close all open file descriptors that are not in preserve.\n\n If ulimit -nofile is \"unlimited\", all is defined filenos <= 4096,\n else all is <= the output of resource.getrlimit().\n\n :param preserve: set with protected files\n :type preserve: set\n\n :return: None", "id": "f733:m0"} {"signature": "def __exit__(self, exc_type, exc_val, exc_tb):", "body": "self.close()", "docstring": "Context Handler, wrapping self.close()\n\n :return: None", "id": "f733:c1:m2"} {"signature": "@propertydef is_open(self):", "body": "return self._is_open", "docstring": "True when this instances open method was called\n\n :return: bool", "id": "f733:c1:m8"} {"signature": "def __init__(self, chroot_directory=None, working_directory='',umask=, uid=None, gid=None, prevent_core=True,detach_process=None, files_preserve=None, pidfile=None,stdin=None, stdout=None, stderr=None, signal_map=None):", "body": "self._is_open = Falseself._working_directory = Noneself.chroot_directory = chroot_directoryself.umask = umaskself.uid = uid if uid else os.getuid()self.gid = gid if gid else os.getgid()if detach_process is None:self.detach_process = detach_required()else:self.detach_process = detach_processself.signal_map = signal_map if signal_map else default_signal_map()self.files_preserve = files_preserveself.pidfile = pidfileself.prevent_core = prevent_coreself.stdin = stdinself.stdout = stdoutself.stderr = stderrself.working_directory = working_directory", "docstring": "Initialize a new Instance", "id": "f733:c1:m0"} {"signature": "def default_signal_map():", "body": "name_map = {'': None,'': None,'': None,'': ''}signal_map = {}for name, target in name_map.items():if hasattr(signal, name):signal_map[getattr(signal, name)] = targetreturn signal_map", "docstring": "Create the default signal map for this system.\n\n :return: dict", "id": "f733:m1"} {"signature": "def redirect_stream(system, target):", "body": "if target is None:target_fd = os.open(os.devnull, os.O_RDWR)else:target_fd = target.fileno()try:os.dup2(target_fd, system.fileno())except OSError as err:raise DaemonError(''.format(system, target, err))", "docstring": "Redirect Unix streams\n\n If None, redirect Stream to /dev/null, else redirect to target.\n\n :param system: ether sys.stdin, sys.stdout, or sys.stderr\n :type system: file object\n\n :param target: File like object, or None\n :type target: None, File Object\n\n :return: None\n :raise: DaemonError", "id": "f733:m5"} {"signature": "@working_directory.setterdef working_directory(self, value):", "body": "self._working_directory = value", "docstring": "Set working directory\n\n New value is ignored if already daemonized.\n\n :param value: str\n :return:", "id": "f733:c1:m7"} {"signature": "def create_and_load(self):", "body": "create_db_and_user()initialize_database()populate_database(self.daily_files, self.quarterly_files)", "docstring": "Use this to create a user, a database, and load the database with files.\nIt will take a while to run and will only work if your network allows FTP\nfile transfer. It also requires you to have a postgres server running locally.", "id": "f737:c0:m1"} {"signature": "def retrieve_document(file_path, directory=''):", "body": "ftp = FTP('', timeout=None)ftp.login()name = file_path.replace('', '')if not os.path.exists(directory):os.makedirs(directory)with tempfile.TemporaryFile() as temp:ftp.retrbinary('' % file_path, temp.write)temp.seek()with open(''.format(directory, name), '') as f:f.write(temp.read().decode(\"\"))f.closedrecords = tempretry = Falseftp.close()", "docstring": "This function takes a file path beginning with edgar and stores the form in a directory.\nThe default directory is sec_filings but can be changed through a keyword argument.", "id": "f740:m14"} {"signature": "def generate(grammar=None, num=, output=sys.stdout, max_recursion=, seed=None):", "body": "if seed is not None:gramfuzz.rand.seed(seed)fuzzer = gramfuzz.GramFuzzer()fuzzer.load_grammar(grammar)cat_group = os.path.basename(grammar).replace(\"\", \"\")results = fuzzer.gen(cat_group=cat_group, num=num, max_recursion=max_recursion)for res in results:output.write(res)", "docstring": "Load and generate ``num`` number of top-level rules from the specified grammar.\n\n :param list grammar: The grammar file to load and generate data from\n :param int num: The number of times to generate data\n :param output: The output destination (an open, writable stream-type object. default=``sys.stdout``)\n :param int max_recursion: The maximum reference-recursion when generating data (default=``10``)\n :param int seed: The seed to initialize the PRNG with. If None, will not initialize it.", "id": "f748:m0"} {"signature": "def make_present_participles(verbs):", "body": "res = []for verb in verbs:parts = verb.split()if parts[].endswith(\"\"):parts[] = parts[][:-] + \"\"else:parts[] = parts[] + \"\"res.append(\"\".join(parts))return res", "docstring": "Make the list of verbs into present participles\n\n E.g.:\n\n empower -> empowering\n drive -> driving", "id": "f751:m1"} {"signature": "def __init__(self, refname, **kwargs):", "body": "self.refname = refnameself.cat = kwargs.setdefault(\"\", self.cat)self.failsafe = kwargs.setdefault(\"\", self.failsafe)self.fuzzer = GramFuzzer.instance()", "docstring": "Create a new ``Ref`` instance\n\n :param str refname: The name of the rule to reference\n :param str cat: The name of the category the rule is defined in", "id": "f756:c13:m0"} {"signature": "def __init__(self, *values, **kwargs):", "body": "self.shortest_vals = Noneself.values = list(values)if \"\" in kwargs and len(values) == :self.values = kwargs[\"\"]self.rolling = kwargs.setdefault(\"\", False)", "docstring": "Create a new ``Or`` instance with the provide values\n\n :param list values: The list of values to choose randomly from", "id": "f756:c10:m0"} {"signature": "def __init__(self, value=None, **kwargs):", "body": "super(String, self).__init__(value, **kwargs)self.charset = kwargs.setdefault(\"\", self.charset)", "docstring": "Create a new instance of the ``String`` field.\n\n :param value: The hard-coded value of the String field\n :param int min: The minimum size of the String when built\n :param int max: The maximum size of the String when built\n :param str charset: The character-set to be used when building the string", "id": "f756:c6:m0"} {"signature": "def __init__(self, name, *values, **options):", "body": "self.name = nameself.options = optionsself.values = list(values)self.sep = self.options.setdefault(\"\", self.sep)self.cat = self.options.setdefault(\"\", self.cat)self.no_prune = self.options.setdefault(\"\", self.no_prune)self.fuzzer = GramFuzzer.instance()frame,mod_path,_,_,_,_ = inspect.stack()[]module_name = os.path.basename(mod_path).replace(\"\", \"\").replace(\"\", \"\")if \"\" in frame.f_locals:self.fuzzer.cat_group_defaults[module_name] = frame.f_locals[\"\"]self.fuzzer.add_definition(self.cat, self.name, self, no_prune=self.no_prune, gram_file=module_name)", "docstring": "Create a new rule definition. Simply instantiating a new rule definition\n will add it to the current ``GramFuzzer`` instance.\n\n :param str name: The name of the rule being defined\n :param list values: The list of values that define the value of the rule\n (will be concatenated when built)\n :param str cat: The category to create the rule in (default=``\"default\"``).\n :param bool no_prune: If this rule should not be pruned *EVEN IF* it is found to be\n unreachable (default=``False``)", "id": "f756:c12:m0"} {"signature": "def __init__(self, *values, **kwargs):", "body": "super(Q, self).__init__(*values, **kwargs)self.escape = kwargs.setdefault(\"\", self.escape)self.html_js_escape = kwargs.setdefault(\"\", self.html_js_escape)self.quote = kwargs.setdefault(\"\", self.quote)", "docstring": "Create the new ``Quote`` instance\n\n :param bool escape: Whether or not quoted data should be escaped (default=``False``)\n :param bool html_js_escape: Whether or not quoted data should be html-javascript escaped (default=``False``)\n :param str quote: The quote character to be used if ``escape`` and ``html_js_escape`` are ``False``", "id": "f756:c9:m0"} {"signature": "def _odds_val(self):", "body": "if len(self.odds) == :self.odds = [(, [self.min, self.max])]rand_val = rand.random()total = for percent,v in self.odds:if total <= rand_val < total+percent:found_v = vbreaktotal += percentres = Noneif isinstance(v, (tuple,list)):rand_func = rand.randfloat if type(v[]) is float else rand.randintif len(v) == :res = rand_func(v[], v[])elif len(v) == :res = v[]else:res = vreturn res", "docstring": "Determine a new random value derived from the\n defined :any:`gramfuzz.fields.Field.odds` value.\n\n :returns: The derived value", "id": "f756:c1:m2"} {"signature": "def seed(val):", "body": "RANDOM.seed(val)", "docstring": "Set the seed for any subsequent random values/choices\n\n :param val: The random seed value", "id": "f757:m0"} {"signature": "def randint(a, b=None):", "body": "if b is None:return _randint(, a-)else:return _randint(a, b-)", "docstring": "Return a random integer\n\n :param int a: Either the minimum value (inclusive) if ``b`` is set, or\n the maximum value if ``b`` is not set (non-inclusive, in which case the minimum\n is implicitly 0)\n :param int b: The maximum value to generate (non-inclusive)\n :returns: int", "id": "f757:m1"} {"signature": "@classmethoddef instance(cls):", "body": "if cls.__instance__ is None:cls()return cls.__instance__", "docstring": "Return the singleton instance of the ``GramFuzzer``", "id": "f758:c0:m0"} {"signature": "def add_to_cat_group(self, cat, cat_group, def_name):", "body": "self.cat_groups.setdefault(cat, {}).setdefault(cat_group, deque()).append(def_name)", "docstring": "Associate the provided rule definition name ``def_name`` with the\n category group ``cat_group`` in the category ``cat``.\n\n :param str cat: The category the rule definition was declared in\n :param str cat_group: The group within the category the rule belongs to\n :param str def_name: The name of the rule definition", "id": "f758:c0:m12"} {"signature": "def preprocess_rules(self):", "body": "to_prune = self._find_shortest_paths()self._prune_rules(to_prune)self._rules_processed = True", "docstring": "Calculate shortest reference-paths of each rule (and Or field),\n and prune all unreachable rules.", "id": "f758:c0:m4"} {"signature": "def set_max_recursion(self, level):", "body": "import gramfuzz.fieldsgramfuzz.fields.Ref.max_recursion = level", "docstring": "Set the maximum reference-recursion depth (not the Python system maximum stack\n recursion level). This controls how many levels deep of nested references are allowed\n before gramfuzz attempts to generate the shortest (reference-wise) rules possible.\n\n :param int level: The new maximum reference level", "id": "f758:c0:m3"} {"signature": "def load_grammar(self, path):", "body": "if not os.path.exists(path):raise Exception(\"\".format(path))grammar_path = os.path.dirname(path)if grammar_path not in sys.path:sys.path.append(grammar_path)with open(path, \"\") as f:data = f.read()code = compile(data, path, \"\")locals_ = {\"\": self, \"\": path}exec(code, locals_)if \"\" in locals_:cat_group = os.path.basename(path).replace(\"\", \"\")self.set_cat_group_top_level_cat(cat_group, locals_[\"\"])", "docstring": "Load a grammar file (python file containing grammar definitions) by\n file path. When loaded, the global variable ``GRAMFUZZER`` will be set\n within the module. This is not always needed, but can be useful.\n\n :param str path: The path to the grammar file", "id": "f758:c0:m2"} {"signature": "def post_revert(self, cat, res, total_num, num, info):", "body": "if self._staged_defs is None:returnfor cat,def_name,def_value in self._staged_defs:self.defs.setdefault(cat, {}).setdefault(def_name, deque()).append(def_value)self._staged_defs = None", "docstring": "Commit any staged rule definition changes (rule generation went\n smoothly).", "id": "f758:c0:m16"} {"signature": "def revert(self, info=None):", "body": "self._staged_defs = None", "docstring": "Revert after a single def errored during generate (throw away all\n staged rule definition changes)", "id": "f758:c0:m17"} {"signature": "@propertydef safe(self):", "body": "return self._dumper_class is yaml.SafeDumper", "docstring": "Returns ``True`` if the safe mode is being used with (de)serialization.", "id": "f767:c0:m4"} {"signature": "def default_decoder(self, obj):", "body": "typename, marshalled_state = self.unwrap_callback(obj)if typename is None:return objtry:cls, unmarshaller = self.serializer.unmarshallers[typename]except KeyError:raise LookupError(''.format(typename)) from Noneif cls is not None:instance = cls.__new__(cls)unmarshaller(instance, marshalled_state)return instanceelse:return unmarshaller(marshalled_state)", "docstring": "Handle a dict that might contain a wrapped state for a custom type.", "id": "f771:c0:m2"} {"signature": "@abstractmethoddef serialize(self, obj) -> bytes:", "body": "", "docstring": "Serialize a Python object into bytes.", "id": "f773:c0:m0"} {"signature": "def register_custom_type(self, cls: type, marshaller: Optional[Callable[[Any], Any]] = default_marshaller,unmarshaller: Union[Callable[[Any, Any], None],Callable[[Any], Any], None] = default_unmarshaller, *,typename: str = None, wrap_state: bool = True) -> None:", "body": "assert check_argument_types()typename = typename or qualified_name(cls)if marshaller:self.marshallers[cls] = typename, marshaller, wrap_stateself.custom_type_codec.register_object_encoder_hook(self)if unmarshaller and self.custom_type_codec is not None:target_cls = cls if len(signature(unmarshaller).parameters) == :target_cls = Noneself.unmarshallers[typename] = target_cls, unmarshallerself.custom_type_codec.register_object_decoder_hook(self)", "docstring": "Register a marshaller and/or unmarshaller for the given class.\n\nThe state object returned by the marshaller and passed to the unmarshaller can be any\nserializable type. Usually a dictionary mapping of attribute names to values is used.\n\n.. warning:: Registering marshallers/unmarshallers for any custom type will override any\n serializer specific encoding/decoding hooks (respectively) already in place!\n\n:param cls: the class to register\n:param marshaller: a callable that takes the object to be marshalled as the argument and\n returns a state object\n:param unmarshaller: a callable that either:\n\n * takes an uninitialized instance of ``cls`` and its state object as arguments and\n restores the state of the object\n * takes a state object and returns a new instance of ``cls``\n:param typename: a unique identifier for the type (defaults to the ``module:varname``\n reference to the class)\n:param wrap_state: ``True`` to wrap the marshalled state before serialization so that it\n can be recognized later for unmarshalling, ``False`` to serialize it as is", "id": "f773:c1:m1"} {"signature": "@abstractmethoddef deserialize(self, payload: bytes):", "body": "", "docstring": "Deserialize bytes into a Python object.", "id": "f773:c0:m1"} {"signature": "def get_response(word):", "body": "url = URL + \"\" + API_KEY + \"\" + word + \"\"return requests.get(url)", "docstring": "Fetch translate result from baidu api\n Args:\n word(str): query word\n Returns:\n (requests.models.Response): response object", "id": "f778:m0"} {"signature": "@taskdef travis_setpass():", "body": "print(\"\")", "docstring": "Stores the PyPI password (encrypted) in the .travis.yml file.", "id": "f782:m2"} {"signature": "def assert_equal_files(self, obtained_fn, expected_fn, fix_callback=lambda x:x, binary=False, encoding=None):", "body": "import osfrom zerotk.easyfs import GetFileContents, GetFileLines__tracebackhide__ = Trueimport iodef FindFile(filename):data_filename = self.get_filename(filename)if os.path.isfile(data_filename):return data_filenameif os.path.isfile(filename):return filenamefrom ._exceptions import MultipleFilesNotFoundraise MultipleFilesNotFound([filename, data_filename])obtained_fn = FindFile(obtained_fn)expected_fn = FindFile(expected_fn)if binary:obtained_lines = GetFileContents(obtained_fn, binary=True)expected_lines = GetFileContents(expected_fn, binary=True)assert obtained_lines == expected_lineselse:obtained_lines = fix_callback(GetFileLines(obtained_fn, encoding=encoding))expected_lines = GetFileLines(expected_fn, encoding=encoding)if obtained_lines != expected_lines:html_fn = os.path.splitext(obtained_fn)[] + ''html_diff = self._generate_html_diff(expected_fn, expected_lines, obtained_fn, obtained_lines)with io.open(html_fn, '') as f:f.write(html_diff)import difflibdiff = ['', obtained_fn, expected_fn]diff += ['' % html_fn]diff += difflib.context_diff(obtained_lines, expected_lines)raise AssertionError(''.join(diff) + '')", "docstring": "Compare two files contents. If the files differ, show the diff and write a nice HTML\ndiff file into the data directory.\n\nSearches for the filenames both inside and outside the data directory (in that order).\n\n:param unicode obtained_fn: basename to obtained file into the data directory, or full path.\n\n:param unicode expected_fn: basename to expected file into the data directory, or full path.\n\n:param bool binary:\n Thread both files as binary files.\n\n:param unicode encoding:\n File's encoding. If not None, contents obtained from file will be decoded using this\n `encoding`.\n\n:param callable fix_callback:\n A callback to \"fix\" the contents of the obtained (first) file.\n This callback receives a list of strings (lines) and must also return a list of lines,\n changed as needed.\n The resulting lines will be used to compare with the contents of expected_fn.\n\n:param bool binary:\n .. seealso:: zerotk.easyfs.GetFileContents", "id": "f785:c0:m6"} {"signature": "def get_data_dir(self):", "body": "return self._data_dir", "docstring": ":rtype: unicode\n:returns:\n Returns the absolute path to data-directory name to use, standardized by StandardizePath.\n\n@remarks:\n This method triggers the data-directory creation.", "id": "f785:c0:m3"} {"signature": "def _generate_html_diff(self, expected_fn, expected_lines, obtained_fn, obtained_lines):", "body": "import difflibdiffer = difflib.HtmlDiff()return differ.make_file(fromlines=expected_lines,fromdesc=expected_fn,tolines=obtained_lines,todesc=obtained_fn,)", "docstring": "Returns a nice side-by-side diff of the given files, as a string.", "id": "f785:c0:m7"} {"signature": "def _CopyFileLocal(source_filename, target_filename, copy_symlink=True):", "body": "import shutiltry:dir_name = os.path.dirname(target_filename)if dir_name and not os.path.isdir(dir_name):os.makedirs(dir_name)if copy_symlink and IsLink(source_filename):if os.path.isfile(target_filename) or IsLink(target_filename):DeleteFile(target_filename)source_filename = ReadLink(source_filename)CreateLink(source_filename, target_filename)else:if sys.platform == '':while IsLink(source_filename):link = ReadLink(source_filename)if os.path.isabs(link):source_filename = linkelse:source_filename = os.path.join(os.path.dirname(source_filename), link)shutil.copyfile(source_filename, target_filename)shutil.copymode(source_filename, target_filename)except Exception as e:reraise(e, '' % (source_filename, target_filename))", "docstring": "Copy a file locally to a directory.\n\n:param unicode source_filename:\n The filename to copy from.\n\n:param unicode target_filename:\n The filename to copy to.\n\n:param bool copy_symlink:\n If True and source_filename is a symlink, target_filename will also be created as\n a symlink.\n\n If False, the file being linked will be copied instead.", "id": "f788:m9"} {"signature": "def CreateFile(filename, contents, eol_style=EOL_STYLE_NATIVE, create_dir=True, encoding=None, binary=False):", "body": "if binary:if isinstance(contents, six.text_type):raise TypeError('')else:if not isinstance(contents, six.text_type):raise TypeError('')contents = _HandleContentsEol(contents, eol_style)encoding = encoding or sys.getfilesystemencoding()contents = contents.encode(encoding)binary = Trueif create_dir:dirname = os.path.dirname(filename)if dirname:CreateDirectory(dirname)from six.moves.urllib.parse import urlparsefilename_url = urlparse(filename)if _UrlIsLocal(filename_url):with open(filename, '') as oss:oss.write(contents)elif filename_url.scheme == '':from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(directory_url.scheme)else:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(filename_url.scheme)return filename", "docstring": "Create a file with the given contents.\n\n:param unicode filename:\n Filename and path to be created.\n\n:param unicode contents:\n The file contents as a string.\n\n:type eol_style: EOL_STYLE_XXX constant\n:param eol_style:\n Replaces the EOL by the appropriate EOL depending on the eol_style value.\n Considers that all content is using only \"\\n\" as EOL.\n\n:param bool create_dir:\n If True, also creates directories needed in filename's path\n\n:param unicode encoding:\n Target file's content encoding. Defaults to sys.getfilesystemencoding()\n Ignored if `binary` = True\n\n:param bool binary:\n If True, file is created in binary mode. In this case, `contents` must be `bytes` and not\n `unicode`\n\n:return unicode:\n Returns the name of the file created.\n\n:raises NotImplementedProtocol:\n If file protocol is not local or FTP\n\n:raises ValueError:\n If trying to mix unicode `contents` without `encoding`, or `encoding` without\n unicode `contents`\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m27"} {"signature": "def CreateMD5(source_filename, target_filename=None):", "body": "if target_filename is None:target_filename = source_filename + ''from six.moves.urllib.parse import urlparsesource_url = urlparse(source_filename)if _UrlIsLocal(source_url):md5_contents = Md5Hex(filename=source_filename)else:md5_contents = Md5Hex(contents=GetFileContents(source_filename, binary=True))CreateFile(target_filename, md5_contents)", "docstring": "Creates a md5 file from a source file (contents are the md5 hash of source file)\n\n:param unicode source_filename:\n Path to source file\n\n:type target_filename: unicode or None\n:param target_filename:\n Name of the target file with the md5 contents\n\n If None, defaults to source_filename + '.md5'", "id": "f788:m6"} {"signature": "def MoveDirectory(source_dir, target_dir):", "body": "if not IsDir(source_dir):from ._exceptions import DirectoryNotFoundErrorraise DirectoryNotFoundError(source_dir)if Exists(target_dir):from ._exceptions import DirectoryAlreadyExistsErrorraise DirectoryAlreadyExistsError(target_dir)from six.moves.urllib.parse import urlparsesource_url = urlparse(source_dir)target_url = urlparse(target_dir)if _UrlIsLocal(source_url) and _UrlIsLocal(target_url):import shutilshutil.move(source_dir, target_dir)elif source_url.scheme == '' and target_url.scheme == '':from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(target_url.scheme)else:raise NotImplementedError('')", "docstring": "Moves a directory.\n\n:param unicode source_dir:\n\n:param unicode target_dir:\n\n:raises NotImplementedError:\n If trying to move anything other than:\n Local dir -> local dir\n FTP dir -> FTP dir (same host)", "id": "f788:m20"} {"signature": "def CreateDirectory(directory):", "body": "from six.moves.urllib.parse import urlparsedirectory_url = urlparse(directory)if _UrlIsLocal(directory_url):if not os.path.exists(directory):os.makedirs(directory)return directoryelif directory_url.scheme == '':from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(directory_url.scheme)else:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(directory_url.scheme)", "docstring": "Create directory including any missing intermediate directory.\n\n:param unicode directory:\n\n:return unicode|urlparse.ParseResult:\n Returns the created directory or url (see urlparse).\n\n:raises NotImplementedProtocol:\n If protocol is not local or FTP.\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m29"} {"signature": "def IsLink(path):", "body": "_AssertIsLocal(path)if sys.platform != '':return os.path.islink(path)import jaraco.windows.filesystemreturn jaraco.windows.filesystem.islink(path)", "docstring": ":param unicode path:\n Path being tested\n\n:returns bool:\n True if `path` is a link", "id": "f788:m35"} {"signature": "def CopyFiles(source_dir, target_dir, create_target_dir=False, md5_check=False):", "body": "import fnmatchif IsDir(source_dir):source_mask = ''else:source_dir, source_mask = os.path.split(source_dir)if not IsDir(target_dir):if create_target_dir:CreateDirectory(target_dir)else:from ._exceptions import DirectoryNotFoundErrorraise DirectoryNotFoundError(target_dir)filenames = ListFiles(source_dir)if filenames is None:returnfor i_filename in filenames:if md5_check and i_filename.endswith(''):continue if fnmatch.fnmatch(i_filename, source_mask):source_path = source_dir + '' + i_filenametarget_path = target_dir + '' + i_filenameif IsDir(source_path):CopyFiles(source_path, target_path, create_target_dir=True, md5_check=md5_check)else:CopyFile(source_path, target_path, md5_check=md5_check)", "docstring": "Copy files from the given source to the target.\n\n:param unicode source_dir:\n A filename, URL or a file mask.\n Ex.\n x:\\coilib50\n x:\\coilib50\\*\n http://server/directory/file\n ftp://server/directory/file\n\n\n:param unicode target_dir:\n A directory or an URL\n Ex.\n d:\\Temp\n ftp://server/directory\n\n:param bool create_target_dir:\n If True, creates the target path if it doesn't exists.\n\n:param bool md5_check:\n .. seealso:: CopyFile\n\n:raises DirectoryNotFoundError:\n If target_dir does not exist, and create_target_dir is False\n\n.. seealso:: CopyFile for documentation on accepted protocols\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m10"} {"signature": "def CopyDirectory(source_dir, target_dir, override=False):", "body": "_AssertIsLocal(source_dir)_AssertIsLocal(target_dir)if override and IsDir(target_dir):DeleteDirectory(target_dir, skip_on_error=False)import shutilshutil.copytree(source_dir, target_dir)", "docstring": "Recursively copy a directory tree.\n\n:param unicode source_dir:\n Where files will come from\n\n:param unicode target_dir:\n Where files will go to\n\n:param bool override:\n If True and target_dir already exists, it will be deleted before copying.\n\n:raises NotImplementedForRemotePathError:\n If trying to copy to/from remote directories", "id": "f788:m16"} {"signature": "def MoveFile(source_filename, target_filename):", "body": "_AssertIsLocal(source_filename)_AssertIsLocal(target_filename)import shutilshutil.move(source_filename, target_filename)", "docstring": "Moves a file.\n\n:param unicode source_filename:\n\n:param unicode target_filename:\n\n:raises NotImplementedForRemotePathError:\n If trying to operate with non-local files.", "id": "f788:m19"} {"signature": "def CreateLink(target_path, link_path, override=True):", "body": "_AssertIsLocal(target_path)_AssertIsLocal(link_path)if override and IsLink(link_path):DeleteLink(link_path)dirname = os.path.dirname(link_path)if dirname:CreateDirectory(dirname)if sys.platform != '':return os.symlink(target_path, link_path) else:import jaraco.windows.filesystemreturn jaraco.windows.filesystem.symlink(target_path, link_path)from ._easyfs_win32 import CreateSymbolicLinktry:dw_flags = if target_path and os.path.isdir(target_path):dw_flags = return CreateSymbolicLink(target_path, link_path, dw_flags)except Exception as e:reraise(e, '' % locals())", "docstring": "Create a symbolic link at `link_path` pointing to `target_path`.\n\n:param unicode target_path:\n Link target\n\n:param unicode link_path:\n Fullpath to link name\n\n:param bool override:\n If True and `link_path` already exists as a link, that link is overridden.", "id": "f788:m34"} {"signature": "def _GetNativeEolStyle(platform=sys.platform):", "body": "_NATIVE_EOL_STYLE_MAP = {'' : EOL_STYLE_WINDOWS,'' : EOL_STYLE_UNIX,'' : EOL_STYLE_UNIX,'' : EOL_STYLE_MAC,}result = _NATIVE_EOL_STYLE_MAP.get(platform)if result is None:from ._exceptions import UnknownPlatformErrorraise UnknownPlatformError(platform)return result", "docstring": "Internal function that determines EOL_STYLE_NATIVE constant with the proper value for the\ncurrent platform.", "id": "f788:m0"} {"signature": "def _DoCopyFile(source_filename, target_filename, copy_symlink=True):", "body": "from six.moves.urllib.parse import urlparsesource_url = urlparse(source_filename)target_url = urlparse(target_filename)if _UrlIsLocal(source_url):if not Exists(source_filename):from ._exceptions import FileNotFoundErrorraise FileNotFoundError(source_filename)if _UrlIsLocal(target_url):_CopyFileLocal(source_filename, target_filename, copy_symlink=copy_symlink)elif target_url.scheme in ['']:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(target_url.scheme)else:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(target_url.scheme)elif source_url.scheme in ['', '', '']:if _UrlIsLocal(target_url):from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(target_url.scheme)else:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(target_url.scheme)else:from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(source_url.scheme)", "docstring": ":param unicode source_filename:\n The source filename.\n Schemas: local, ftp, http\n\n:param unicode target_filename:\n Target filename.\n Schemas: local, ftp\n\n:param copy_symlink:\n @see _CopyFileLocal\n\n:raises FileNotFoundError:\n If source_filename does not exist", "id": "f788:m8"} {"signature": "def _HandleContentsEol(contents, eol_style):", "body": "if eol_style == EOL_STYLE_NONE:return contentsif eol_style == EOL_STYLE_UNIX:return contents.replace('', eol_style).replace('', eol_style)if eol_style == EOL_STYLE_MAC:return contents.replace('', eol_style).replace('', eol_style)if eol_style == EOL_STYLE_WINDOWS:return contents.replace('', '').replace('', '').replace('', EOL_STYLE_WINDOWS)raise ValueError('' % (eol_style,))", "docstring": "Replaces eol on each line by the given eol_style.\n\n:param unicode contents:\n:type eol_style: EOL_STYLE_XXX constant\n:param eol_style:", "id": "f788:m39"} {"signature": "def GetFileLines(filename, newline=None, encoding=None):", "body": "return GetFileContents(filename,binary=False,encoding=encoding,newline=newline,).split('')", "docstring": "Reads a file and returns its contents as a list of lines. Works for both local and remote files.\n\n:param unicode filename:\n\n:param None|''|'\\n'|'\\r'|'\\r\\n' newline:\n Controls universal newlines.\n See 'io.open' newline parameter documentation for more details.\n\n:param unicode encoding:\n File's encoding. If not None, contents obtained from file will be decoded using this\n `encoding`.\n\n:returns list(unicode):\n The file's lines\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m22"} {"signature": "def CheckIsFile(filename):", "body": "if not IsFile(filename):from ._exceptions import FileNotFoundErrorraise FileNotFoundError(filename)", "docstring": "Check if the given file exists.\n\n@filename: unicode\n The filename to check for existence.\n\n@raise: FileNotFoundError\n Raises if the file does not exist.", "id": "f788:m25"} {"signature": "def ReplaceInFile(filename, old, new, encoding=None):", "body": "contents = GetFileContents(filename, encoding=encoding)contents = contents.replace(old, new)CreateFile(filename, contents, encoding=encoding)return contents", "docstring": "Replaces all occurrences of \"old\" by \"new\" in the given file.\n\n:param unicode filename:\n The name of the file.\n\n:param unicode old:\n The string to search for.\n\n:param unicode new:\n Replacement string.\n\n:return unicode:\n The new contents of the file.", "id": "f788:m28"} {"signature": "def ListFiles(directory):", "body": "from six.moves.urllib.parse import urlparsedirectory_url = urlparse(directory)if _UrlIsLocal(directory_url):if not os.path.isdir(directory):return Nonereturn os.listdir(directory)elif directory_url.scheme == '':from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(directory_url.scheme)else:from ._exceptions import NotImplementedProtocolraise NotImplementedProtocol(directory_url.scheme)", "docstring": "Lists the files in the given directory\n\n:type directory: unicode | unicode\n:param directory:\n A directory or URL\n\n:rtype: list(unicode) | list(unicode)\n:returns:\n List of filenames/directories found in the given directory.\n Returns None if the given directory does not exists.\n\n If `directory` is a unicode string, all files returned will also be unicode\n\n:raises NotImplementedProtocol:\n If file protocol is not local or FTP\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m24"} {"signature": "def GetFileContents(filename, binary=False, encoding=None, newline=None):", "body": "source_file = OpenFile(filename, binary=binary, encoding=encoding, newline=newline)try:contents = source_file.read()finally:source_file.close()return contents", "docstring": "Reads a file and returns its contents. Works for both local and remote files.\n\n:param unicode filename:\n\n:param bool binary:\n If True returns the file as is, ignore any EOL conversion.\n\n:param unicode encoding:\n File's encoding. If not None, contents obtained from file will be decoded using this\n `encoding`.\n\n:param None|''|'\\n'|'\\r'|'\\r\\n' newline:\n Controls universal newlines.\n See 'io.open' newline parameter documentation for more details.\n\n:returns str|unicode:\n The file's contents.\n Returns unicode string when `encoding` is not None.\n\n.. seealso:: FTP LIMITATIONS at this module's doc for performance issues information", "id": "f788:m21"} {"signature": "def find_image(conn, name):", "body": "for item in conn.list_images()['']:if (item[''][''] == configuration.LOCATION anditem[''][''] == '' andname in item['']['']):return itemreturn None", "docstring": "Find image by partial name and location.", "id": "f792:m1"} {"signature": "def get_ipblock(self, ipblock_id):", "body": "response = self._perform_request('' % ipblock_id)return response", "docstring": "Retrieves a single IP block by ID.\n\n:param ipblock_id: The unique ID of the IP block.\n:type ipblock_id: ``str``", "id": "f811:c0:m21"} {"signature": "def list_snapshots(self, depth=):", "body": "response = self._perform_request('' % str(depth))return response", "docstring": "Retrieves a list of snapshots available in the account.", "id": "f811:c0:m66"} {"signature": "def get_attached_cdroms(self, datacenter_id, server_id, depth=):", "body": "response = self._perform_request('' % (datacenter_id,server_id,str(depth)))return response", "docstring": "Retrieves a list of CDROMs attached to the server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m58"} {"signature": "def get_attached_cdrom(self, datacenter_id, server_id, cdrom_id):", "body": "response = self._perform_request('' % (datacenter_id,server_id,cdrom_id))return response", "docstring": "Retrieves an attached CDROM.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param cdrom_id: The unique ID of the CDROM.\n:type cdrom_id: ``str``", "id": "f811:c0:m59"} {"signature": "def remove_group_user(self, group_id, user_id):", "body": "response = self._perform_request(url='' % (group_id, user_id),method='')return response", "docstring": "Removes a user from a group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param user_id: The unique ID of the user.\n:type user_id: ``str``", "id": "f811:c0:m89"} {"signature": "def get_snapshot(self, snapshot_id):", "body": "response = self._perform_request('' % snapshot_id)return response", "docstring": "Retrieves a single snapshot by ID.\n\n:param snapshot_id: The unique ID of the snapshot.\n:type snapshot_id: ``str``", "id": "f811:c0:m65"} {"signature": "def get_image(self, image_id):", "body": "response = self._perform_request('' % image_id)return response", "docstring": "Retrieves a single image by ID.\n\n:param image_id: The unique ID of the image.\n:type image_id: ``str``", "id": "f811:c0:m17"} {"signature": "def get_group(self, group_id, depth=):", "body": "response = self._perform_request('' % (group_id, str(depth)))return response", "docstring": "Retrieves a single group by ID.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m73"} {"signature": "def get_loadbalanced_nic(self, datacenter_id,loadbalancer_id, nic_id, depth=):", "body": "response = self._perform_request('' % (datacenter_id,loadbalancer_id,nic_id,str(depth)))return response", "docstring": "Gets the properties of a load balanced NIC.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param loadbalancer_id: The unique ID of the load balancer.\n:type loadbalancer_id: ``str``\n\n:param nic_id: The unique ID of the NIC.\n:type nic_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m38"} {"signature": "def __init__(self, name=None, location=None, size=None):", "body": "self.name = nameself.location = locationself.size = size", "docstring": "IPBlock class initializer.\n\n:param name: The name of the IP block.\n:type name: ``str``\n\n:param location: The location for the IP block.\n:type location: ``str``\n\n:param size: The number of IPs in the block.\n:type size: ``str``", "id": "f811:c3:m0"} {"signature": "def remove_snapshot(self, snapshot_id):", "body": "response = self._perform_request(url='' + snapshot_id, method='')return response", "docstring": "Removes a snapshot.\n\n:param snapshot_id: The ID of the snapshot\n you wish to remove.\n:type snapshot_id: ``str``", "id": "f811:c0:m71"} {"signature": "def delete_datacenter(self, datacenter_id):", "body": "response = self._perform_request(url='' % (datacenter_id),method='')return response", "docstring": "Removes the data center and all its components such as servers, NICs,\nload balancers, volumes.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``", "id": "f811:c0:m9"} {"signature": "def create_nic(self, datacenter_id, server_id, nic):", "body": "data = json.dumps(self._create_nic_dict(nic))response = self._perform_request(url='' % (datacenter_id,server_id),method='',data=data)return response", "docstring": "Creates a NIC on the specified server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param nic: A NIC dict.\n:type nic: ``dict``", "id": "f811:c0:m45"} {"signature": "def add_group_user(self, group_id, user_id):", "body": "data = {\"\": user_id}response = self._perform_request(url='' % group_id,method='',data=json.dumps(data))return response", "docstring": "Adds an existing user to a group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param user_id: The unique ID of the user.\n:type user_id: ``str``", "id": "f811:c0:m88"} {"signature": "def _read_config(self, filename=None):", "body": "if filename:self._config_filename = filenameelse:try:import appdirsexcept ImportError:raise Exception(\"\"\"\")self._config_filename = appdirs.user_config_dir(_LIBRARY_NAME, \"\") + \"\"if not self._config:self._config = configparser.ConfigParser()self._config.optionxform = strself._config.read(self._config_filename)", "docstring": "Read the user configuration", "id": "f811:c0:m1"} {"signature": "def update_image(self, image_id, **kwargs):", "body": "data = {}for attr, value in kwargs.items():data[self._underscore_to_camelcase(attr)] = valueresponse = self._perform_request(url='' + image_id,method='',data=json.dumps(data))return response", "docstring": "Replace all properties of an image.", "id": "f811:c0:m20"} {"signature": "def create_user(self, user):", "body": "data = self._create_user_dict(user=user)response = self._perform_request(url='',method='',data=json.dumps(data))return response", "docstring": "Creates a new user.\n\n:param user: The user object to be created.\n:type user: ``dict``", "id": "f811:c0:m84"} {"signature": "def create_datacenter(self, datacenter):", "body": "server_items = []volume_items = []lan_items = []loadbalancer_items = []entities = dict()properties = {\"\": datacenter.name}if datacenter.location:properties[''] = datacenter.locationif datacenter.description:properties[''] = datacenter.descriptionif datacenter.servers:for server in datacenter.servers:server_items.append(self._create_server_dict(server))servers = {\"\": server_items}server_entities = {\"\": servers}entities.update(server_entities)if datacenter.volumes:for volume in datacenter.volumes:volume_items.append(self._create_volume_dict(volume))volumes = {\"\": volume_items}volume_entities = {\"\": volumes}entities.update(volume_entities)if datacenter.loadbalancers:for loadbalancer in datacenter.loadbalancers:loadbalancer_items.append(self._create_loadbalancer_dict(loadbalancer))loadbalancers = {\"\": loadbalancer_items}loadbalancer_entities = {\"\": loadbalancers}entities.update(loadbalancer_entities)if datacenter.lans:for lan in datacenter.lans:lan_items.append(self._create_lan_dict(lan))lans = {\"\": lan_items}lan_entities = {\"\": lans}entities.update(lan_entities)if not entities:raw = {\"\": properties,}else:raw = {\"\": properties,\"\": entities}data = json.dumps(raw)response = self._perform_request(url='',method='',data=data)return response", "docstring": "Creates a data center -- both simple and complex are supported.", "id": "f811:c0:m10"} {"signature": "def attach_cdrom(self, datacenter_id, server_id, cdrom_id):", "body": "data = '' + cdrom_id + ''response = self._perform_request(url='' % (datacenter_id,server_id),method='',data=data)return response", "docstring": "Attaches a CDROM to a server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param cdrom_id: The unique ID of the CDROM.\n:type cdrom_id: ``str``", "id": "f811:c0:m60"} {"signature": "def delete_user(self, user_id):", "body": "response = self._perform_request(url='' % user_id,method='')return response", "docstring": "Removes a user.\n\n:param user_id: The unique ID of the user.\n:type user_id: ``str``", "id": "f811:c0:m86"} {"signature": "def add_share(self, group_id, resource_id, **kwargs):", "body": "properties = {}for attr, value in kwargs.items():properties[self._underscore_to_camelcase(attr)] = valuedata = {\"\": properties}response = self._perform_request(url='' % (group_id, resource_id),method='',data=json.dumps(data))return response", "docstring": "Shares a resource through a group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param resource_id: The unique ID of the resource.\n:type resource_id: ``str``", "id": "f811:c0:m79"} {"signature": "def get_attached_volume(self, datacenter_id, server_id, volume_id):", "body": "response = self._perform_request('' % (datacenter_id,server_id,volume_id))return response", "docstring": "Retrieves volume information.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param volume_id: The unique ID of the volume.\n:type volume_id: ``str``", "id": "f811:c0:m55"} {"signature": "def __init__(self, name=None, ips=None, dhcp=None, lan=None, firewall_active=None,firewall_rules=None, nat=None, **kwargs):", "body": "if firewall_rules is None:firewall_rules = []self.name = nameself.nat = natself.ips = ipsself.dhcp = dhcpself.lan = lanself.firewall_active = firewall_activeself.firewall_rules = firewall_rules", "docstring": "NIC class initializer.\n\n:param name: The name of the NIC.\n:type name: ``str``\n\n:param ips: A list of IPs.\n:type ips: ``list``\n\n:param dhcp: Enable or disable DHCP. Default is enabled.\n:type dhcp: ``bool``\n\n:param lan: ID of the LAN in which the NIC should reside.\n:type lan: ``str``\n\n:param nat: Enable or disable NAT. Default is disabled.\n:type nat: ``bool``\n\n:param firewall_active: Turns the firewall on or off;\n default is disabled.\n:type firewall_active: ``bool``\n\n:param firewall_rules: List of firewall rule dicts.\n:type firewall_rules: ``list``", "id": "f811:c6:m0"} {"signature": "def get_resource(self, resource_type, resource_id, depth=):", "body": "response = self._perform_request('' % (resource_type, resource_id, str(depth)))return response", "docstring": "Retrieves a single resource of a particular type.\n\n:param resource_type: The resource type: datacenter, image,\n snapshot or ipblock.\n:type resource_type: ``str``\n\n:param resource_id: The unique ID of the resource.\n:type resource_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m91"} {"signature": "def create_lan(self, datacenter_id, lan):", "body": "data = json.dumps(self._create_lan_dict(lan))response = self._perform_request(url='' % datacenter_id,method='',data=data)return response", "docstring": "Creates a LAN in the data center.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param lan: The LAN object to be created.\n:type lan: ``dict``", "id": "f811:c0:m28"} {"signature": "def list_loadbalancers(self, datacenter_id, depth=):", "body": "response = self._perform_request('' % (datacenter_id, str(depth)))return response", "docstring": "Retrieves a list of load balancers in the data center.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m32"} {"signature": "def detach_volume(self, datacenter_id, server_id, volume_id):", "body": "response = self._perform_request(url='' % (datacenter_id,server_id,volume_id),method='')return response", "docstring": "Detaches a volume from a server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param volume_id: The unique ID of the volume.\n:type volume_id: ``str``", "id": "f811:c0:m57"} {"signature": "def get_attached_volumes(self, datacenter_id, server_id, depth=):", "body": "response = self._perform_request('' % (datacenter_id,server_id,str(depth)))return response", "docstring": "Retrieves a list of volumes attached to the server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m54"} {"signature": "def list_ipblocks(self, depth=):", "body": "response = self._perform_request('' % str(depth))return response", "docstring": "Retrieves a list of IP blocks available in the account.", "id": "f811:c0:m22"} {"signature": "def create_snapshot(self, datacenter_id, volume_id,name=None, description=None):", "body": "data = {'': name, '': description}response = self._perform_request('' % (datacenter_id, volume_id),method='',data=urlencode(data))return response", "docstring": "Creates a snapshot of the specified volume.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param volume_id: The unique ID of the volume.\n:type volume_id: ``str``\n\n:param name: The name given to the volume.\n:type name: ``str``\n\n:param description: The description given to the volume.\n:type description: ``str``", "id": "f811:c0:m69"} {"signature": "def delete_group(self, group_id):", "body": "response = self._perform_request(url='' % group_id,method='')return response", "docstring": "Removes a group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``", "id": "f811:c0:m76"} {"signature": "def start_server(self, datacenter_id, server_id):", "body": "response = self._perform_request(url='' % (datacenter_id,server_id),method='')return response", "docstring": "Starts the server.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``", "id": "f811:c0:m62"} {"signature": "def remove_loadbalanced_nic(self, datacenter_id,loadbalancer_id, nic_id):", "body": "response = self._perform_request(url='' % (datacenter_id,loadbalancer_id,nic_id),method='')return response", "docstring": "Removes a NIC from the load balancer.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param loadbalancer_id: The unique ID of the load balancer.\n:type loadbalancer_id: ``str``\n\n:param nic_id: The unique ID of the NIC.\n:type nic_id: ``str``", "id": "f811:c0:m39"} {"signature": "def list_resources(self, resource_type=None, depth=):", "body": "if resource_type is not None:response = self._perform_request('' % (resource_type, str(depth)))else:response = self._perform_request('' + str(depth))return response", "docstring": "Retrieves a list of all resources.\n\n:param resource_type: The resource type: datacenter, image,\n snapshot or ipblock. Default is None,\n i.e., all resources are listed.\n:type resource_type: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m90"} {"signature": "@staticmethoddef _underscore_to_camelcase(value):", "body": "def camelcase():yield str.lowerwhile True:yield str.capitalizec = camelcase()return \"\".join(next(c)(x) if x else '' for x in value.split(\"\"))", "docstring": "Convert Python snake case back to mixed case.", "id": "f811:c0:m103"} {"signature": "def get_firewall_rule(self, datacenter_id,server_id, nic_id, firewall_rule_id):", "body": "response = self._perform_request('' % (datacenter_id,server_id,nic_id,firewall_rule_id))return response", "docstring": "Retrieves a single firewall rule by ID.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param nic_id: The unique ID of the NIC.\n:type nic_id: ``str``\n\n:param firewall_rule_id: The unique ID of the firewall rule.\n:type firewall_rule_id: ``str``", "id": "f811:c0:m12"} {"signature": "def list_images(self, depth=):", "body": "response = self._perform_request('' + str(depth))return response", "docstring": "Retrieves a list of images available in the data center.\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m18"} {"signature": "def __init__(self, name=None, cores=None, ram=None, availability_zone=None,boot_volume_id=None, boot_cdrom=None, cpu_family=None,create_volumes=None, attach_volumes=None, nics=None):", "body": "if create_volumes is None:create_volumes = []if attach_volumes is None:attach_volumes = []if nics is None:nics = []self.name = nameself.cores = coresself.ram = ramself.availability_zone = availability_zoneself.boot_volume_id = boot_volume_idself.boot_cdrom = boot_cdromself.cpu_family = cpu_familyself.create_volumes = create_volumesself.attach_volumes = attach_volumesself.nics = nics", "docstring": "Server class initializer.\n\n:param name: The name of your server..\n:type name: ``str``\n\n:param cores: The number of cores for the server.\n:type cores: ``str``\n\n:param ram: The amount of memory for the server.\n:type ram: ``str``\n\n:param availability_zone: The availability zone for the server.\n:type availability_zone: ``str``\n\n:param boot_volume_id: The ID of the boot volume.\n:type boot_volume_id: ``str``\n\n:param boot_cdrom: Attach a CDROM.\n:type boot_cdrom: ``str``\n\n:param cpu_family: Set the desired CPU type.\n:type cpu_family: ``str``\n\n:param create_volumes: List of volume dicts to create.\n:type create_volumes: ``list``\n\n:param attach_volumes: List of volume IDs to attach.\n:type attach_volumes: ``list``\n\n:param nics: List of NIC dicts to create.\n:type nics: ``list``", "id": "f811:c7:m0"} {"signature": "def list_group_users(self, group_id, depth=):", "body": "response = self._perform_request('' % (group_id, str(depth)))return response", "docstring": "Retrieves a list of all users that are members of a particular group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m87"} {"signature": "def get_volume(self, datacenter_id, volume_id):", "body": "response = self._perform_request('' % (datacenter_id, volume_id))return response", "docstring": "Retrieves a single volume by ID.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param volume_id: The unique ID of the volume.\n:type volume_id: ``str``", "id": "f811:c0:m92"} {"signature": "def get_share(self, group_id, resource_id, depth=):", "body": "response = self._perform_request(''% (group_id, resource_id, str(depth)))return response", "docstring": "Retrieves a specific resource share available to a group.\n\n:param group_id: The unique ID of the group.\n:type group_id: ``str``\n\n:param resource_id: The unique ID of the resource.\n:type resource_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m78"} {"signature": "def delete_snapshot(self, snapshot_id):", "body": "response = self._perform_request(url='' + snapshot_id, method='')return response", "docstring": "Removes a snapshot from your account.\n\n:param snapshot_id: The unique ID of the snapshot.\n:type snapshot_id: ``str``", "id": "f811:c0:m67"} {"signature": "def get_nic(self, datacenter_id, server_id, nic_id, depth=):", "body": "response = self._perform_request('' % (datacenter_id,server_id,nic_id,str(depth)))return response", "docstring": "Retrieves a NIC by its ID.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param nic_id: The unique ID of the NIC.\n:type nic_id: ``str``\n\n:param depth: The depth of the response data.\n:type depth: ``int``", "id": "f811:c0:m42"} {"signature": "def __init__(self, name=None, public=None, nics=None):", "body": "if nics is None:nics = []self.name = nameself.public = publicself.nics = nics", "docstring": "LAN class initializer.\n\n:param name: The name of the LAN.\n:type name: ``str``\n\n:param public: Indicates if the LAN is public.\n:type public: ``bool``\n\n:param nics: A list of NICs\n:type nics: ``list``", "id": "f811:c4:m0"} {"signature": "def wait_for_completion(self, response, timeout=, initial_wait=, scaleup=):", "body": "if not response:returnlogger = logging.getLogger(__name__)wait_period = initial_waitnext_increase = time.time() + wait_period * scaleupif timeout:timeout = time.time() + timeoutwhile True:request = self.get_request(request_id=response[''], status=True)if request[''][''] == '':breakelif request[''][''] == '':raise PBFailedRequest(''.format(response[''], request['']['']),response[''])current_time = time.time()if timeout and current_time > timeout:raise PBTimeoutError(''.format(response['']), response[''])if current_time > next_increase:wait_period *= next_increase = time.time() + wait_period * scaleupscaleup *= logger.info(\"\",response[''], request[''][''], wait_period)time.sleep(wait_period)", "docstring": "Poll resource request status until resource is provisioned.\n\n:param response: A response dict, which needs to have a 'requestId' item.\n:type response: ``dict``\n\n:param timeout: Maximum waiting time in seconds. None means infinite waiting time.\n:type timeout: ``int``\n\n:param initial_wait: Initial polling interval in seconds.\n:type initial_wait: ``int``\n\n:param scaleup: Double polling interval every scaleup steps, which will be doubled.\n:type scaleup: ``int``", "id": "f811:c0:m97"} {"signature": "def __init__(self, name=None, location=None, description=None, volumes=None, servers=None, lans=None, loadbalancers=None,**kwargs):", "body": "if volumes is None:volumes = []if servers is None:servers = []if lans is None:lans = []if loadbalancers is None:loadbalancers = []self.name = nameself.description = descriptionself.location = locationself.servers = serversself.volumes = volumesself.lans = lansself.loadbalancers = loadbalancers", "docstring": "The Datacenter class initializer.\n\n:param name: The data center name..\n:type name: ``str``\n\n:param location: The data center geographical location.\n:type location: ``str``\n\n:param description: Optional description.\n:type description: ``str``\n\n:param volumes: List of volume dicts.\n:type volumes: ``list``\n\n:param servers: List of server dicts.\n:type servers: ``list``\n\n:param lans: List of LAN dicts.\n:type lans: ``list``\n\n:param loadbalancers: List of load balancer dicts.\n:type loadbalancers: ``list``", "id": "f811:c1:m0"} {"signature": "def delete_volume(self, datacenter_id, volume_id):", "body": "response = self._perform_request(url='' % (datacenter_id, volume_id), method='')return response", "docstring": "Removes a volume from the data center.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param volume_id: The unique ID of the volume.\n:type volume_id: ``str``", "id": "f811:c0:m94"} {"signature": "def update_firewall_rule(self, datacenter_id, server_id,nic_id, firewall_rule_id, **kwargs):", "body": "data = {}for attr, value in kwargs.items():data[self._underscore_to_camelcase(attr)] = valueif attr == '':data[''] = valueelif attr == '':data[''] = valueelif attr == '':data[''] = valueelif attr == '':data[''] = valueelif attr == '':data[''] = valueelif attr == '':data[''] = valueelif attr == '':data[''] = valueelse:data[self._underscore_to_camelcase(attr)] = valueresponse = self._perform_request(url='' % (datacenter_id,server_id,nic_id,firewall_rule_id),method='',data=json.dumps(data))return response", "docstring": "Updates a firewall rule.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``\n\n:param nic_id: The unique ID of the NIC.\n:type nic_id: ``str``\n\n:param firewall_rule_id: The unique ID of the firewall rule.\n:type firewall_rule_id: ``str``", "id": "f811:c0:m16"} {"signature": "def delete_lan(self, datacenter_id, lan_id):", "body": "response = self._perform_request(url='' % (datacenter_id, lan_id), method='')return response", "docstring": "Removes a LAN from the data center.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param lan_id: The unique ID of the LAN.\n:type lan_id: ``str``", "id": "f811:c0:m27"} {"signature": "def create_server(self, datacenter_id, server):", "body": "data = json.dumps(self._create_server_dict(server))response = self._perform_request(url='' % (datacenter_id),method='',data=data)return response", "docstring": "Creates a server within the data center.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server: A dict of the server to be created.\n:type server: ``dict``", "id": "f811:c0:m52"} {"signature": "def __init__(self, name=None, protocol=None, source_mac=None, source_ip=None,target_ip=None, port_range_start=None,port_range_end=None, icmp_type=None,icmp_code=None, **kwargs):", "body": "self.name = nameself.protocol = protocolself.source_mac = source_macself.source_ip = source_ipself.target_ip = target_ipself.port_range_start = port_range_startself.port_range_end = port_range_endif icmp_type is not None:icmp_type = str(icmp_type)self.icmp_type = icmp_typeif icmp_code is not None:icmp_code = str(icmp_code)self.icmp_code = icmp_code", "docstring": "FirewallRule class initializer.\n\n:param name: The name of the firewall rule.\n:type name: ``str``\n\n:param protocol: Either TCP or UDP\n:type protocol: ``str``\n\n:param source_mac: Source MAC you want to restrict.\n:type source_mac: ``str``\n\n:param source_ip: Source IP you want to restrict.\n:type source_ip: ``str``\n\n:param target_ip: Target IP you want to restrict.\n:type target_ip: ``str``\n\n:param port_range_start: Optional port range.\n:type port_range_start: ``str``\n\n:param port_range_end: Optional port range.\n:type port_range_end: ``str``\n\n:param icmp_type: Defines the allowed type.\n:type icmp_type: ``str``\n\n:param icmp_code: Defines the allowed code.\n:type icmp_code: ``str``", "id": "f811:c2:m0"} {"signature": "def __init__(self, name=None, ip=None, dhcp=None, balancednics=None, **kwargs):", "body": "if balancednics is None:balancednics = []self.name = nameself.ip = ipself.dhcp = dhcpself.balancednics = balancednics", "docstring": "LoadBalancer class initializer.\n\n:param name: The name of the load balancer.\n:type name: ``str``\n\n:param ip: The IP for the load balancer.\n:type ip: ``str``\n\n:param dhcp: Indicates if the load balancer\n uses DHCP or not.\n:type dhcp: ``bool``\n\n:param balancednics: A list of NICs associated\n with the load balancer.\n:type balancednics: ``list``", "id": "f811:c5:m0"} {"signature": "def list_locations(self, depth=):", "body": "response = self._perform_request('' % (depth))return response", "docstring": "Retrieves a list of locations available in the account.", "id": "f811:c0:m41"} {"signature": "def update_server(self, datacenter_id, server_id, **kwargs):", "body": "data = {}for attr, value in kwargs.items():if attr == '':boot_volume_properties = {\"\": value}boot_volume_entities = {\"\": boot_volume_properties}data.update(boot_volume_entities)else:data[self._underscore_to_camelcase(attr)] = valueresponse = self._perform_request(url='' % (datacenter_id,server_id),method='',data=json.dumps(data))return response", "docstring": "Updates a server with the parameters provided.\n\n:param datacenter_id: The unique ID of the data center.\n:type datacenter_id: ``str``\n\n:param server_id: The unique ID of the server.\n:type server_id: ``str``", "id": "f811:c0:m53"} {"signature": "def ask(question, options, default):", "body": "assert default in optionsquestion += \"\".format(\"\".join(o.upper() if o == default else o for o in options))selected = Nonewhile selected not in options:selected = input(question).strip().lower()if selected == \"\":selected = defaultelse:if selected not in options:question = \"\".format(\"\".join(options[:-]), options[-],comma='' if len(options) > else '',)return selected", "docstring": "Ask the user a question with a list of allowed answers (like yes or no).\n\nThe user is presented with a question and asked to select an answer from\nthe given options list. The default will be returned if the user enters\nnothing. The user is asked to repeat his answer if his answer does not\nmatch any of the allowed anwsers.\n\n:param question: Question to present to the user (without question mark)\n:type question: ``str``\n\n:param options: List of allowed anwsers\n:type options: ``list``\n\n:param default: Default answer (if the user enters no text)\n:type default: ``str``", "id": "f813:m0"} {"signature": "def wait_for_requests(pbclient, request_ids=None,timeout=, initial_wait=, scaleup=):", "body": "done = dict()if not request_ids:print(\"\")return donetotal_wait = wait_period = initial_waitnext_scaleup = scaleup * wait_periodwait = Truewhile wait:for request_id in request_ids:if request_id in done:continuerequest_status = pbclient.get_request(request_id, status=True)state = request_status['']['']if state == \"\":done[request_id] = (, state, request_status[''][''])print(\"\".format(request_id, state))if state == '':done[request_id] = (, state, request_status[''][''])print(\"\".format(request_id, state))if len(done) == len(request_ids):wait = Falseelse:print(\"\".format(len(done), len(request_ids), wait_period))sleep(wait_period)total_wait += wait_periodif timeout != and total_wait > timeout:wait = Falsenext_scaleup -= wait_periodif next_scaleup == :wait_period += initial_waitnext_scaleup = scaleup * wait_periodprint(\"\".format(wait_period, next_scaleup))if len(done) != len(request_ids):for request_id in request_ids:if request_id in done:continuedone[request_id] = (-, state, \"\")return done", "docstring": "Waits for a list of requests to finish until timeout.\ntimeout==0 is interpreted as infinite wait time.\nReturns a dict of request_id -> result.\nresult is a tuple (return code, request status, message) where return code\n0 : request successful\n1 : request failed\n-1 : timeout exceeded\nThe wait_period is increased every scaleup steps to adjust for long\nrunning requests.", "id": "f814:m2"} {"signature": "def _nsattr(self, attr, ns=None):", "body": "if ns is None:return attrreturn '' + self._ns[ns] + '' + attr", "docstring": "returns an attribute name w/ namespace prefix", "id": "f816:c1:m2"} {"signature": "def __init__(self, file=None):", "body": "self._ns = {'': \"\",'': \"\",'': \"\"\"\",'': \"\",'': \"\"\"\"}self.file = fileself.root = Noneself.name = Noneself.osid = Noneself.licenseType = \"\"self.cpus = Noneself.ram = Noneself.disks = []self.lans = dict()self.nics = []self.resourceTypes = {'': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': ''} self.osTypeOther = {'': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '',} self.osTypeLinux = {'': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '',} self.osTypeWindows = {'': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': '','': ''}", "docstring": "OVF meta data initializer", "id": "f816:c1:m0"} {"signature": "def main(argv=None): ", "body": "if argv is None:argv = sys.argvelse:sys.argv.extend(argv)program_name = os.path.basename(sys.argv[])program_version = \"\" % __version__program_build_date = str(__updated__)program_version_message = '' % (program_version, program_build_date)program_shortdesc = __import__('').__doc__.split(\"\")[]program_license =", "docstring": "Command line options.", "id": "f819:m7"} {"signature": "def wait_for_datacenter(client, data_center_id):", "body": "total_sleep_time = seconds = while True:state = client.get_datacenter(data_center_id)['']['']if verbose:print(\"\".format(state))if state == \"\":breaktime.sleep(seconds)total_sleep_time += secondsif total_sleep_time == :seconds = elif total_sleep_time == :seconds = ", "docstring": "Poll the data center to become available (for the next provisionig job)", "id": "f820:m5"} {"signature": "def getServerStates(pbclient=None, dc_id=None, serverid=None, servername=None):", "body": "if pbclient is None:raise ValueError(\"\")if dc_id is None:raise ValueError(\"\")server = Noneif serverid is None:if servername is None:raise ValueError(\"\")server_info = select_where(getServerInfo(pbclient, dc_id),['', '', '', ''],name=servername)if len(server_info) > :raise NameError(\"\".format(servername))if len(server_info) == :server = server_info[]else:try:server_info = pbclient.get_server(dc_id, serverid, )server = dict(id=server_info[''],name=server_info[''][''],state=server_info[''][''],vmstate=server_info[''][''])except Exception:ex = sys.exc_info()[]if ex.args[] is not None and ex.args[] == :print(\"\".format(serverid))server = Noneelse:raise exreturn server", "docstring": "gets states of a server", "id": "f820:m3"} {"signature": "def wait_for_server(pbclient=None, dc_id=None, serverid=None,indicator='', state='', timeout=):", "body": "if pbclient is None:raise ValueError(\"\")if dc_id is None:raise ValueError(\"\")if serverid is None:raise ValueError(\"\")total_sleep_time = seconds = while total_sleep_time < timeout:time.sleep(seconds)total_sleep_time += secondsif total_sleep_time == :seconds = elif total_sleep_time == :seconds = server = getServerStates(pbclient, dc_id, serverid)if server[indicator] == state:breakreturn server", "docstring": "wait for a server/VM to reach a defined state for a specified time\nindicator := {state|vmstate} specifies if server or VM stat is tested\nstate specifies the status the indicator should have", "id": "f820:m4"} {"signature": "def getServerInfo(pbclient=None, dc_id=None):", "body": "if pbclient is None:raise ValueError(\"\")if dc_id is None:raise ValueError(\"\")server_info = []servers = pbclient.list_servers(dc_id, )for server in servers['']:props = server['']info = dict(id=server[''], name=props[''],state=server[''][''],vmstate=props[''])server_info.append(info)return server_info", "docstring": "gets info of servers of a data center", "id": "f822:m5"} {"signature": "def main(argv=None):", "body": "if argv is None:argv = sys.argvelse:sys.argv.extend(argv)program_name = os.path.basename(sys.argv[])program_version = \"\" % __version__program_build_date = str(__updated__)program_version_message = '' % (program_version, program_build_date)program_shortdesc = __import__('').__doc__.split(\"\")[]program_license =", "docstring": "Parse command line options and dump a datacenter to snapshots and file.", "id": "f822:m10"} {"signature": "def getServerStates(pbclient=None, dc_id=None, serverid=None, servername=None):", "body": "if pbclient is None:raise ValueError(\"\")if dc_id is None:raise ValueError(\"\")server = Noneif serverid is None:if servername is None:raise ValueError(\"\")server_info = select_where(getServerInfo(pbclient, dc_id),['', '', '', ''],name=servername)if len(server_info) > :raise NameError(\"\".format(servername))if len(server_info) == :server = server_info[]else:try:server_info = pbclient.get_server(dc_id, serverid, )server = dict(id=server_info[''],name=server_info[''][''],state=server_info[''][''],vmstate=server_info[''][''])except Exception:ex = sys.exc_info()[]if ex.args[] is not None and ex.args[] == :print(\"\".format(serverid))server = Noneelse:raise exreturn server", "docstring": "gets states of a server", "id": "f822:m7"} {"signature": "def wait_for_request(pbclient, request_id,timeout=, initial_wait=, scaleup=):", "body": "total_wait = wait_period = initial_waitnext_scaleup = scaleup * wait_periodwait = Truewhile wait:request_status = pbclient.get_request(request_id, status=True)state = request_status['']['']if state == \"\":return(, state, request_status[''][''])if state == '':return(, state, request_status[''][''])print(\"\".format(request_id, state, wait_period))sleep(wait_period)total_wait += wait_periodif timeout != and total_wait > timeout:wait = Falsenext_scaleup -= wait_periodif next_scaleup == :wait_period += initial_waitnext_scaleup = scaleup * wait_periodprint(\"\".format(wait_period, next_scaleup))return(-, state, \"\")", "docstring": "Waits for a request to finish until timeout.\ntimeout==0 is interpreted as infinite wait time.\nReturns a tuple (return code, request status, message) where return code\n0 : request successful\n1 : request failed\n-1 : timeout exceeded\nThe wait_period is increased every scaleup steps to adjust for long\nrunning requests.", "id": "f822:m3"} {"signature": "def wait_for_requests(pbclient, request_ids=None,timeout=, initial_wait=, scaleup=):", "body": "done = dict()if not request_ids:print(\"\")return donetotal_wait = wait_period = initial_waitnext_scaleup = scaleup * wait_periodwait = Truewhile wait:for request_id in request_ids:if request_id in done:continuerequest_status = pbclient.get_request(request_id, status=True)state = request_status['']['']if state == \"\":done[request_id] = (, state, request_status[''][''])print(\"\".format(request_id, state))if state == '':done[request_id] = (, state, request_status[''][''])print(\"\".format(request_id, state))if len(done) == len(request_ids):wait = Falseelse:print(\"\".format(len(done), len(request_ids), wait_period))sleep(wait_period)total_wait += wait_periodif timeout != and total_wait > timeout:wait = Falsenext_scaleup -= wait_periodif next_scaleup == :wait_period += initial_waitnext_scaleup = scaleup * wait_periodprint(\"\".format(wait_period, next_scaleup))if len(done) != len(request_ids):for request_id in request_ids:if request_id in done:continuedone[request_id] = (-, state, \"\")return done", "docstring": "Waits for a list of requests to finish until timeout.\ntimeout==0 is interpreted as infinite wait time.\nReturns a dict of request_id -> result.\nresult is a tuple (return code, request status, message) where return code\n0 : request successful\n1 : request failed\n-1 : timeout exceeded\nThe wait_period is increased every scaleup steps to adjust for long\nrunning requests.", "id": "f822:m4"} {"signature": "def getLogin(filename, user, passwd):", "body": "if filename is None:return (user, passwd)if os.path.exists(filename):print(\"\".format(filename))with open(filename, \"\") as loginfile:encoded_cred = loginfile.read()print(\"\".format(encoded_cred))decoded_cred = b64decode(encoded_cred)login = decoded_cred.split('', )return (login[], login[])else:if user is None or passwd is None:raise ValueError(\"\")print(\"\".format(filename))with open(filename, \"\") as loginfile:encoded_cred = b64encode(user+\"\"+passwd)print(\"\".format(encoded_cred))loginfile.write(encoded_cred)return (user, passwd)", "docstring": "write user/passwd to login file or get them from file.\nThis method is not Py3 safe (byte vs. str)", "id": "f827:m0"} {"signature": "def main(argv=None):", "body": "if argv is None:argv = sys.argvelse:sys.argv.extend(argv)program_name = os.path.basename(sys.argv[])program_version = \"\" % __version__program_build_date = str(__updated__)program_version_message = '' % (program_version,program_build_date)program_shortdesc = __import__('').__doc__.split(\"\")[]program_license =", "docstring": "Command line options.", "id": "f827:m5"} {"signature": "def main(argv=None):", "body": "if argv is None:argv = sys.argvelse:sys.argv.extend(argv)program_name = os.path.basename(sys.argv[])program_version = \"\" % __version__program_build_date = str(__updated__)program_version_message = '' % (program_version,program_build_date)program_shortdesc = __import__('').__doc__.split(\"\")[]program_license =", "docstring": "Parse command line options and create a server/volume composite.", "id": "f828:m2"} {"signature": "def getLogin(filename, user, passwd):", "body": "if filename is None:return (user, passwd)if os.path.exists(filename):print(\"\".format(filename))with open(filename, \"\") as loginfile:encoded_cred = loginfile.read()decoded_cred = b64decode(encoded_cred)login = decoded_cred.split('', )return (login[], login[])else:if user is None or passwd is None:raise ValueError(\"\")print(\"\".format(filename))with open(filename, \"\") as loginfile:encoded_cred = b64encode(user+\"\"+passwd)loginfile.write(encoded_cred)return (user, passwd)", "docstring": "write user/passwd to login file or get them from file.\nThis method is not Py3 safe (byte vs. str)", "id": "f828:m0"} {"signature": "def wait_for_request(pbclient, request_id,timeout=, initial_wait=, scaleup=):", "body": "total_wait = wait_period = initial_waitnext_scaleup = scaleup * wait_periodwait = Truewhile wait:request_status = pbclient.get_request(request_id, status=True)state = request_status['']['']if state == \"\":return(, state, request_status[''][''])if state == '':return(, state, request_status[''][''])if verbose > :print(\"\".format(request_id, state, wait_period))sleep(wait_period)total_wait += wait_periodif timeout != and total_wait > timeout:wait = Falsenext_scaleup -= wait_periodif next_scaleup == :wait_period += initial_waitnext_scaleup = scaleup * wait_periodif verbose > :print(\"\".format(wait_period, next_scaleup))return(-, state, \"\")", "docstring": "Waits for a request to finish until timeout.\ntimeout==0 is interpreted as infinite wait time.\nReturns a tuple (return code, request status, message) where return code\n0 : request successful\n1 : request failed\n-1 : timeout exceeded\nThe wait_period is increased every scaleup steps to adjust for long\nrunning requests.", "id": "f828:m1"} {"signature": "def __init__(self, hub, zid, online, red, green, blue, level):", "body": "self._hub = hubself._zid = zidself._online = online == self._red = int(red)self._green = int(green)self._blue = int(blue)self._level = int(level)", "docstring": "Construct a Bulb (light) based on current values.", "id": "f831:c1:m0"} {"signature": "def turn_off(self):", "body": "command = \"\".format(self._zid)response = self._hub.send_command(command)_LOGGER.debug(\"\", repr(command), response)return response", "docstring": "Turn bulb off (zero brightness).", "id": "f831:c1:m7"} {"signature": "@propertydef zid(self):", "body": "return self._zid", "docstring": "Return the bulb ID.", "id": "f831:c1:m3"} {"signature": "def set_brightness(self, brightness):", "body": "command = \"\".format(self._zid, brightness)response = self._hub.send_command(command)_LOGGER.debug(\"\", repr(command), response)return response", "docstring": "Set brightness of bulb.", "id": "f831:c1:m9"} {"signature": "@propertydef brightness(self):", "body": "self.update()return self._level", "docstring": "Return the brightness level.", "id": "f831:c1:m1"} {"signature": "def set_all(self, red, green, blue, brightness):", "body": "command = \"\".format(self._zid, red, green, blue,brightness)response = self._hub.send_command(command)_LOGGER.debug(\"\", repr(command), response)return response", "docstring": "Set color and brightness of bulb.", "id": "f831:c1:m10"} {"signature": "@propertydef available(self):", "body": "response = self.send_command(\"\")return \"\" in response", "docstring": "Check if hub is responsive.", "id": "f831:c0:m2"} {"signature": "def demo():", "body": "hub = Hub()if hub.available:LOGGER.info(\"\")else:LOGGER.info(\"\")bulbs = hub.get_lights()light = get_bulb(ZID_TO_TEST, bulbs)if light is not None:if light.available:LOGGER.info(\"\")assert light.availablelight.turn_on()time.sleep(SECONDS_TO_WAIT)assert light.is_onlight.update()light.update()light.set_rgb_color(, , )time.sleep(SECONDS_TO_WAIT)light.update()assert light.rgb_color == [, , ]light.set_brightness()time.sleep(SECONDS_TO_WAIT)assert light.brightness == assert light.is_onlight.turn_off()time.sleep(SECONDS_TO_WAIT)assert not light.is_onlight.set_all(, , , )time.sleep(SECONDS_TO_WAIT)LOGGER.info(\"\", light.rgb_color,light.brightness)assert light.brightness == assert light.rgb_color == [, , ]light.turn_off()else:LOGGER.info(\"\")else:LOGGER.error(\"\")", "docstring": "Demo some specific functionality. Needs to be customised.", "id": "f833:m1"} {"signature": "def get_bulb(zid, bulbs):", "body": "for bulb in bulbs:if bulb.zid == zid:return bulbreturn None", "docstring": "Retrieve a bulb by its zid from a list of Bulb objects.", "id": "f833:m2"} {"signature": "def _generate_token( self, length= ):", "body": "return ''.join( choice( ascii_letters + digits ) for x in range( length ) )", "docstring": "_generate_token - internal function for generating randomized alphanumberic\nstrings of a given length", "id": "f835:c1:m6"} {"signature": "def authenticate_token( self, token ):", "body": "token_data = self.data_store.fetch( '', token=token )if not token_data:raise Proauth2Error( '','' )return token_data['']", "docstring": "authenticate_token checks the passed token and returns the user_id it is\nassociated with. it is assumed that this method won't be directly exposed to\nthe oauth client, but some kind of framework or wrapper. this allows the\nframework to have the user_id without doing additional DB calls.", "id": "f835:c1:m4"} {"signature": "def request_access_token( self, client_id, key, code, grant_type,redirect_uri=None, method='' ):", "body": "if grant_type != '':raise Proauth2Error( '','' )self._auth( client_id, key, method )user_id = self._validate_request_code( code, client_id )access_token = self._generate_token( )self.data_store.store( '', token=access_token, user_id=user_id,client_id=client_id )return { '':access_token, '':'' }", "docstring": "request_access_token validates the client_id and client_secret, using the\nprovided method, then generates an access_token, stores it with the user_id\nfrom the nonce, and returns a dictionary containing an access_token and\nbearer token.\n---\nfrom the spec, it looks like there are different types of\ntokens, but i don't understand the disctintions, so someone else can fix\nthis if need be.\nregarding the method: it appears that it is intended for there to be\nmultiple ways to verify the client_id. my assumption is that you use the\nsecret as the salt and pass the hashed of the client_id or something, and\nthen compare hashes on the server end. currently the only implemented method\nis direct comparison of the client_ids and client_secrets.\nadditional methods can be added to proauth2.auth_methods", "id": "f835:c1:m3"} {"signature": "def _auth( self, client_id, key, method ):", "body": "available = auth_methods.keys()if method not in available:raise Proauth2Error( '',''''% ( method, ''.join( available ) ) )client = self.data_store.fetch( '', client_id=client_id )if not client: raise Proauth2Error( '' )if not auth_methods[method]( key, client[''] ):raise Proauth2Error( '' )", "docstring": "_auth - internal method to ensure the client_id and client_secret passed with\nthe nonce match", "id": "f835:c1:m7"} {"signature": "def revoke_token( self, token ):", "body": "self.data_store.remove( '', token=token )", "docstring": "revoke_token removes the access token from the data_store", "id": "f835:c1:m5"} {"signature": "def request_authorization( self, client_id, user_id, response_type,redirect_uri=None, scope=None, state=None,expires= ):", "body": "if response_type != '':raise Proauth2Error( '','', state=state )client = self.data_store.fetch( '', client_id=client_id )if not client: raise Proauth2Error( '' )if redirect_uri and client[''] != redirect_uri:raise Proauth2Error( '', \"\" )nonce_code = self._generate_token()expires = time() + expirestry:self.data_store.store( '', code=nonce_code,client_id=client_id, expires=expires,user_id=user_id, scope=scope )except Proauth2Error as e:e.state = stateraise ereturn { '':nonce_code, '':state }", "docstring": "request_authorization generates a nonce, and stores it in the data_store along with the\nclient_id, user_id, and expiration timestamp.\nIt then returns a dictionary containing the nonce as \"code,\" and the passed\nstate.\n---\nresponse_type MUST be \"code.\" this is directly from the OAuth2 spec.\nthis probably doesn't need to be checked here, but if it's in the spec I\nguess it should be verified somewhere.\nscope has not been implemented here. it will be stored, but there is no\nscope-checking built in here at this time.\nif a redirect_uri is passed, it must match the registered redirect_uri.\nagain, this is per spec.", "id": "f835:c1:m2"} {"signature": "def _validate_request_code( self, code, client_id):", "body": "nonce = self.data_store.fetch( '', code=code )if not nonce:raise Proauth2Error( '', '' % code )if client_id != nonce['']: raise Proauth2Error( '', '' % code )user_id = nonce['']expires = nonce['']self.data_store.remove( '', code=code, client_id=client_id,user_id=user_id )if time() > expires:raise Proauth2Error( '', '' % code )return user_id", "docstring": "_validate_request_code - internal method for verifying the the given nonce.\nalso removes the nonce from the data_store, as they are intended for\none-time use.", "id": "f835:c1:m8"} {"signature": "@enginedef register_app(self, name, redirect_uri, callback):", "body": "client_id = self._generate_token()client_secret = self._generate_token()yield Task(self.data_store.store, '', client_id=client_id,client_secret=client_secret, name=name,redirect_uri=redirect_uri)callback({'':client_id, '':client_secret})", "docstring": "register_app takes an application name and redirect_uri\nIt generates client_id (client_key) and client_secret,\nthen stores all of the above in the data_store,\nand returns a dictionary containing the client_id and client_secret.", "id": "f836:c0:m1"} {"signature": "def _generate_token(self, length=):", "body": "return ''.join(choice(ascii_letters + digits) for x in range(length))", "docstring": "_generate_token - internal function for generating randomized alphanumberic\nstrings of a given length", "id": "f836:c0:m8"} {"signature": "def __init__(self, database='', host='', port=,user=None, pwd=None):", "body": "if user and pwd:connection_string = '' %(user, pwd, host, port)else:connection_string = '' %(host, port)self.db = MotorClient(connection_string).open_sync()[database]", "docstring": "initialize a mongodb connection to mongodb://user:pass@host:port\nuse database", "id": "f838:c0:m0"} {"signature": "@enginedef store(self, collection, **kwargs):", "body": "callback = kwargs.pop('')key = validate(collection, **kwargs)data = yield Task(self.fetch, collection, **{key: kwargs[key]})if data is not None:raise Proauth2Error('')yield Op(self.db[collection].insert, kwargs)callback()", "docstring": "validate the passed values in kwargs based on the collection,\nstore them in the mongodb collection", "id": "f838:c0:m3"} {"signature": "def fetch( self, collection, **kwargs ):", "body": "return self.db[collection].find_one( kwargs )", "docstring": "return one record from the collection whose parameters match kwargs\n---\nkwargs should be a dictionary whose keys match column names (in\ntraditional SQL / fields in NoSQL) and whose values are the values of\nthose fields.\ne.g. kwargs={name='my application name',client_id=12345}", "id": "f839:c0:m1"} {"signature": "def remove( self, collection, **kwargs ):", "body": "self.db[collection].remove( kwargs )", "docstring": "remove records from collection whose parameters match kwargs", "id": "f839:c0:m2"} {"signature": "def validate( table, **data ):", "body": "if table not in good.keys():raise Proauth2Error( '', '' % table )for req in good[table]['']:if not data.get( req, None ):raise Proauth2Error( '','' % req )for key in data.keys():if key not in good[table][''] andkey not in good[table]['']:raise Proauth2Error( '', '' % key )return good[table]['']", "docstring": "theoretically, any data store can be implemented to work with this package,\nwhich means basic data validation must be done in-package, so that weird\nstuff can't be stored in the data store.\nthis function raises an exception if an invalid table name is passed, not\nall of the required fields are in the data kwargs, or if a field that was\npassed is not expected.\nit also returns the key field name, for ensuring uniqueness (again, that may\nnot be built into whatever data store is impelemented.)", "id": "f840:m0"} {"signature": "def direct_auth( key, secret ):", "body": "if key == secret: return Truereturn False", "docstring": "directly compare the stored secret and the passed secret.", "id": "f841:m0"} {"signature": "def clean_data_dir_async(self, *, data_dir):", "body": "return super().__getattr__('')(data_dir=data_dir)", "docstring": "\u6e05\u7406\u6570\u636e\u76ee\u5f55 (\u5f02\u6b65\u7248\u672c)\n\n------------\n\n:param str data_dir: \u6536\u5230\u6e05\u7406\u7684\u76ee\u5f55\u540d\uff0c\u652f\u6301 `image`\u3001`record`\u3001`show`\u3001`bface`\n:return: None\n:rtype: None\n\n------------\n\n\u7528\u4e8e\u6e05\u7406\u79ef\u6512\u4e86\u592a\u591a\u65e7\u6587\u4ef6\u7684\u6570\u636e\u76ee\u5f55\uff0c\u5982 `image`\u3002\n\nHTTP API v3.3.4 \u65b0\u589e", "id": "f847:c0:m36"} {"signature": "def get_cookies(self):", "body": "return super().__getattr__('')()", "docstring": "\u83b7\u53d6 Cookies\n\n------------\n\n:return: { \"cookies\": (Cookies: str)}\n:rtype: dict[ str, str ]\n\n------------\n\n======== =========== =========\n\u54cd\u5e94\u6570\u636e\n--------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n======== =========== =========\nstr cookies Cookies\n======== =========== =========", "id": "f847:c0:m28"} {"signature": "def set_group_card(self, *, group_id, user_id, card=None):", "body": "return super().__getattr__('')(group_id=group_id, user_id=user_id, card=card)", "docstring": "\u8bbe\u7f6e\u7fa4\u540d\u7247\uff08\u7fa4\u5907\u6ce8\uff09\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param int user_id: \u8981\u8bbe\u7f6e\u7684 QQ \u53f7\n:param str | None card: \u7fa4\u540d\u7247\u5185\u5bb9\uff0c\u4e0d\u586b\u6216\u7a7a\u5b57\u7b26\u4e32\u8868\u793a\u5220\u9664\u7fa4\u540d\u7247\n:return: None\n:rtype: None", "id": "f847:c0:m17"} {"signature": "def _get_friend_list(self):", "body": "return super().__getattr__('')()", "docstring": "\u83b7\u53d6\u597d\u53cb\u5217\u8868 (\u5b9e\u9a8c\u6027\u529f\u80fd)\n\n------------\n\n:return: [{ \"friend_group_id\": (\u597d\u53cb\u5206\u7ec4 ID: int), \"friend_group_name\": (\u597d\u53cb\u5206\u7ec4\u540d\u79f0: str), \"friends\": (\u5206\u7ec4\u4e2d\u7684\u597d\u53cb: [{ \"nickname\": (\u597d\u53cb\u6635\u79f0: str), \"remark\": (\u597d\u53cb\u5907\u6ce8: str), \"user_id\": (\u597d\u53cb QQ \u53f7: int) }, ...]) }, ...]\n:rtype: list[ dict[ str, int | str | list[ dict[ str, int | str ] ] ] ]\n\n------------\n\n\u54cd\u5e94\u6570\u636e\u4ee5 **\u5217\u8868** \u5305\u88c5\u7684\u5b57\u5178\u7684\u5f62\u5f0f\u63d0\u4f9b\u3002`( List[ Dict[ ...] ] )`\n\n======== ================== ===============================\n\u54cd\u5e94\u6570\u636e\n-------------------------------------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n======== ================== ===============================\nint friend_group_id \u597d\u53cb\u5206\u7ec4 ID\nstr friend_group_name \u597d\u53cb\u5206\u7ec4\u540d\u79f0\nlist friends \u5206\u7ec4\u4e2d\u7684\u597d\u53cb\n======== ================== ===============================\n\n\u5176\u4e2d\uff0c\u597d\u53cb\u4fe1\u606f\u7ed3\u6784\u4ee5 **\u5b57\u5178** \u7684\u5f62\u5f0f\u5b58\u50a8\u5728\u54cd\u5e94\u6570\u636e\u4e2d\u7684\u5206\u7ec4\u4e2d\u7684\u597d\u53cb `friends` \u7684 **\u5217\u8868** \u4e2d\u3002`( List[ Dict[ ...] ] )`\n\n======== ================== ===============================\n\u597d\u53cb\u4fe1\u606f\u7ed3\u6784\n-------------------------------------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n======== ================== ===============================\nstr nickname \u597d\u53cb\u6635\u79f0\nstr remark \u597d\u53cb\u5907\u6ce8\nint user_id \u597d\u53cb QQ \u53f7\n======== ================== ===============================", "id": "f847:c0:m37"} {"signature": "def set_group_ban(self, *, group_id, user_id, duration= * ):", "body": "return super().__getattr__('')(group_id=group_id, user_id=user_id, duration=duration)", "docstring": "\u7fa4\u7ec4\u5355\u4eba\u7981\u8a00\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param int user_id: \u8981\u7981\u8a00\u7684 QQ \u53f7\n:param int duration: \u7981\u8a00\u65f6\u957f\uff0c\u5355\u4f4d\u79d2\uff0c0 \u8868\u793a\u53d6\u6d88\u7981\u8a00\n:return: None\n:rtype: None", "id": "f847:c0:m12"} {"signature": "def delete_msg(self, *, message_id):", "body": "return super().__getattr__('')(message_id=message_id)", "docstring": "\u64a4\u56de\u6d88\u606f\n\n------------\n\n:param int message_id: \u6d88\u606f ID\n:return: None\n:rtype: None", "id": "f847:c0:m9"} {"signature": "def send_like(self, *, user_id, times=):", "body": "return super().__getattr__('')(user_id=user_id, times=times)", "docstring": "\u53d1\u9001\u597d\u53cb\u8d5e\n\n------------\n\n:param int user_id: \u5bf9\u65b9 QQ \u53f7\n:param int times: \u8d5e\u7684\u6b21\u6570\uff0c\u6bcf\u4e2a\u597d\u53cb\u6bcf\u5929\u6700\u591a 10 \u6b21\n:return: None\n:rtype: None", "id": "f847:c0:m10"} {"signature": "def send_msg(self, *, message_type, user_id=None, group_id=None, discuss_id=None, message, auto_escape=False):", "body": "return super().__getattr__('')(message_type=message_type, user_id=user_id, group_id=group_id,discuss_id=discuss_id, message=message, auto_escape=auto_escape)", "docstring": "\u53d1\u9001\u6d88\u606f\n\n------------\n\n:param str message_type: \u6d88\u606f\u7c7b\u578b\uff0c\u652f\u6301 `private`\u3001`group`\u3001`discuss`\uff0c\u5206\u522b\u5bf9\u5e94\u79c1\u804a\u3001\u7fa4\u7ec4\u3001\u8ba8\u8bba\u7ec4\n:param int user_id: \u5bf9\u65b9 QQ \u53f7\uff08\u6d88\u606f\u7c7b\u578b\u4e3a `private` \u65f6\u9700\u8981\uff09\n:param int group_id: \u7fa4\u53f7\uff08\u6d88\u606f\u7c7b\u578b\u4e3a `group` \u65f6\u9700\u8981\uff09\n:param int discuss_id: \u8ba8\u8bba\u7ec4 ID\uff08\u9700\u8981\u4ece\u4e0a\u62a5\u6d88\u606f\u4e2d\u83b7\u53d6\uff0c\u6d88\u606f\u7c7b\u578b\u4e3a `discuss` \u65f6\u9700\u8981\uff09\n:param str | list[ dict[ str, unknown ] ] message: \u8981\u53d1\u9001\u7684\u5185\u5bb9\n:param bool auto_escape: \u6d88\u606f\u5185\u5bb9\u662f\u5426\u4f5c\u4e3a\u7eaf\u6587\u672c\u53d1\u9001\uff08\u5373\u4e0d\u89e3\u6790 CQ \u7801\uff09\uff0c`message` \u6570\u636e\u7c7b\u578b\u4e3a `list` \u65f6\u65e0\u6548\n:return: {\"message_id\": int \u6d88\u606fID}\n:rtype: dict[string, int]", "id": "f847:c0:m7"} {"signature": "def send_discuss_msg(self, *, discuss_id, message, auto_escape=False):", "body": "return super().__getattr__('')(discuss_id=discuss_id, message=message, auto_escape=auto_escape)", "docstring": "\u53d1\u9001\u8ba8\u8bba\u7ec4\u6d88\u606f\n\n------------\n\n:param int discuss_id: \u8ba8\u8bba\u7ec4 ID\uff08\u6b63\u5e38\u60c5\u51b5\u4e0b\u770b\u4e0d\u5230\uff0c\u9700\u8981\u4ece\u8ba8\u8bba\u7ec4\u6d88\u606f\u4e0a\u62a5\u7684\u6570\u636e\u4e2d\u83b7\u5f97\uff09\n:param str | list[ dict[ str, unknown ] ] message: \u8981\u53d1\u9001\u7684\u5185\u5bb9\n:param bool auto_escape: \u6d88\u606f\u5185\u5bb9\u662f\u5426\u4f5c\u4e3a\u7eaf\u6587\u672c\u53d1\u9001\uff08\u5373\u4e0d\u89e3\u6790 CQ \u7801\uff09\uff0c`message` \u6570\u636e\u7c7b\u578b\u4e3a `list` \u65f6\u65e0\u6548\n:return: {\"message_id\": int \u6d88\u606fID}\n:rtype: dict[string, int]", "id": "f847:c0:m5"} {"signature": "def get_login_info(self):", "body": "return super().__getattr__('')()", "docstring": "\u83b7\u53d6\u767b\u5f55\u53f7\u4fe1\u606f\n\n------------\n\n:return: { \"user_id\": (QQ \u53f7: int), \"nickname\": (QQ \u6635\u79f0: str) }\n:rtype: dict[ str, int | str ]\n\n------------\n\n========= ========= =========\n\u54cd\u5e94\u6570\u636e\n-------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n========= ========= =========\nint user_id QQ \u53f7\nstr nickname QQ \u6635\u79f0\n========= ========= =========", "id": "f847:c0:m23"} {"signature": "def set_group_anonymous(self, *, group_id, enable=True):", "body": "return super().__getattr__('')(group_id=group_id, enable=enable)", "docstring": "\u7fa4\u7ec4\u533f\u540d\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param bool enable: \u662f\u5426\u5141\u8bb8\u533f\u540d\u804a\u5929\n:return: None\n:rtype: None", "id": "f847:c0:m16"} {"signature": "def clean_data_dir(self, *, data_dir):", "body": "return super().__getattr__('')(data_dir=data_dir)", "docstring": "\u6e05\u7406\u6570\u636e\u76ee\u5f55\n\n------------\n\n:param str data_dir: \u6536\u5230\u6e05\u7406\u7684\u76ee\u5f55\u540d\uff0c\u652f\u6301 `image`\u3001`record`\u3001`show`\u3001`bface`\n:return: None\n:rtype: None\n\n------------\n\n\u7528\u4e8e\u6e05\u7406\u79ef\u6512\u4e86\u592a\u591a\u65e7\u6587\u4ef6\u7684\u6570\u636e\u76ee\u5f55\uff0c\u5982 `image`\u3002\n\nHTTP API v3.3.4 \u65b0\u589e", "id": "f847:c0:m35"} {"signature": "def send_group_msg_async(self, *, group_id, message, auto_escape=False):", "body": "return super().__getattr__('')(group_id=group_id, message=message, auto_escape=auto_escape)", "docstring": "\u53d1\u9001\u7fa4\u6d88\u606f (\u5f02\u6b65\u7248\u672c)\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param str | list[ dict[ str, unknown ] ] message: \u8981\u53d1\u9001\u7684\u5185\u5bb9\n:param bool auto_escape: \u6d88\u606f\u5185\u5bb9\u662f\u5426\u4f5c\u4e3a\u7eaf\u6587\u672c\u53d1\u9001\uff08\u5373\u4e0d\u89e3\u6790 CQ \u7801\uff09\uff0c`message` \u6570\u636e\u7c7b\u578b\u4e3a `list` \u65f6\u65e0\u6548\n:return: None\n:rtype: None", "id": "f847:c0:m4"} {"signature": "def get_version_info(self):", "body": "return super().__getattr__('')()", "docstring": "\u83b7\u53d6\u9177 Q \u53ca HTTP API \u63d2\u4ef6\u7684\u7248\u672c\u4fe1\u606f\n\n------------\n\n:return: { \"coolq_directory\": (\u9177Q\u6839\u76ee\u5f55\u8def\u5f84: str), \"coolq_edition\": (\u9177Q\u7248\u672c: str in ['air', 'pro']), \"plugin_version\": (API\u63d2\u4ef6\u7248\u672c: str), \"plugin_build_number\": (API\u63d2\u4ef6build\u53f7: int), \"plugin_build_configuration\": (API\u63d2\u4ef6\u7f16\u8bd1\u914d\u7f6e: str in ['debug', 'release']) }\n:rtype: dict[ str, int | str ]\n\n\n------------\n\n======== ========================== ===============================\n\u54cd\u5e94\u6570\u636e\n---------------------------------------------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n======== ========================== ===============================\nstr coolq_directory \u9177 Q \u6839\u76ee\u5f55\u8def\u5f84\nstr coolq_edition \u9177 Q \u7248\u672c\uff0c`air` \u6216 `pro`\nstr plugin_version HTTP API \u63d2\u4ef6\u7248\u672c\uff0c\u4f8b\u5982 2.1.3\nint plugin_build_number HTTP API \u63d2\u4ef6 build \u53f7\nstr plugin_build_configuration HTTP API \u63d2\u4ef6\u7f16\u8bd1\u914d\u7f6e\uff0c`debug` \u6216 `release`\n======== ========================== ===============================", "id": "f847:c0:m32"} {"signature": "def send_msg_async(self, *, message_type, user_id=None, group_id=None, discuss_id=None, message, auto_escape=False):", "body": "return super().__getattr__('')(message_type=message_type, user_id=user_id, group_id=group_id,discuss_id=discuss_id, message=message, auto_escape=auto_escape)", "docstring": "\u53d1\u9001\u6d88\u606f (\u5f02\u6b65\u7248\u672c)\n\n------------\n\n:param str message_type: \u6d88\u606f\u7c7b\u578b\uff0c\u652f\u6301 `private`\u3001`group`\u3001`discuss`\uff0c\u5206\u522b\u5bf9\u5e94\u79c1\u804a\u3001\u7fa4\u7ec4\u3001\u8ba8\u8bba\u7ec4\n:param int user_id: \u5bf9\u65b9 QQ \u53f7\uff08\u6d88\u606f\u7c7b\u578b\u4e3a `private` \u65f6\u9700\u8981\uff09\n:param int group_id: \u7fa4\u53f7\uff08\u6d88\u606f\u7c7b\u578b\u4e3a `group` \u65f6\u9700\u8981\uff09\n:param int discuss_id: \u8ba8\u8bba\u7ec4 ID\uff08\u9700\u8981\u4ece\u4e0a\u62a5\u6d88\u606f\u4e2d\u83b7\u53d6\uff0c\u6d88\u606f\u7c7b\u578b\u4e3a `discuss` \u65f6\u9700\u8981\uff09\n:param str | list[ dict[ str, unknown ] ] message: \u8981\u53d1\u9001\u7684\u5185\u5bb9\n:param bool auto_escape: \u6d88\u606f\u5185\u5bb9\u662f\u5426\u4f5c\u4e3a\u7eaf\u6587\u672c\u53d1\u9001\uff08\u5373\u4e0d\u89e3\u6790 CQ \u7801\uff09\uff0c`message` \u6570\u636e\u7c7b\u578b\u4e3a `list` \u65f6\u65e0\u6548\n:return: None\n:rtype: None", "id": "f847:c0:m8"} {"signature": "def set_group_whole_ban(self, *, group_id, enable=True):", "body": "return super().__getattr__('')(group_id=group_id, enable=enable)", "docstring": "\u7fa4\u7ec4\u5168\u5458\u7981\u8a00\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param bool enable: \u662f\u5426\u7981\u8a00\n:return: None\n:rtype: None", "id": "f847:c0:m14"} {"signature": "def send_private_msg(self, *, user_id, message, auto_escape=False):", "body": "return super().__getattr__('')(user_id=user_id, message=message, auto_escape=auto_escape)", "docstring": "\u53d1\u9001\u79c1\u804a\u6d88\u606f\n\n------------\n\n:param int user_id: \u5bf9\u65b9 QQ \u53f7\n:param str | list[ dict[ str, unknown ] ] message: \u8981\u53d1\u9001\u7684\u5185\u5bb9\n:param bool auto_escape: \u6d88\u606f\u5185\u5bb9\u662f\u5426\u4f5c\u4e3a\u7eaf\u6587\u672c\u53d1\u9001\uff08\u5373\u4e0d\u89e3\u6790 CQ \u7801\uff09\uff0c`message` \u6570\u636e\u7c7b\u578b\u4e3a `list` \u65f6\u65e0\u6548\n:return: {\"message_id\": int \u6d88\u606fID}\n:rtype: dict[string, int]", "id": "f847:c0:m1"} {"signature": "def set_group_leave(self, *, group_id, is_dismiss=False):", "body": "return super().__getattr__('')(group_id=group_id, is_dismiss=is_dismiss)", "docstring": "\u9000\u51fa\u7fa4\u7ec4\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param bool is_dismiss: \u662f\u5426\u89e3\u6563\uff0c\u5982\u679c\u767b\u5f55\u53f7\u662f\u7fa4\u4e3b\uff0c\u5219\u4ec5\u5728\u6b64\u9879\u4e3a true \u65f6\u80fd\u591f\u89e3\u6563\n:return: None\n:rtype: None", "id": "f847:c0:m18"} {"signature": "def set_group_special_title(self, *, group_id, user_id, special_title, duration=-):", "body": "return super().__getattr__('')(group_id=group_id, user_id=user_id, special_title=special_title, duration=duration)", "docstring": "\u8bbe\u7f6e\u7fa4\u7ec4\u4e13\u5c5e\u5934\u8854\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:param int user_id: \u8981\u8bbe\u7f6e\u7684 QQ \u53f7\n:param str special_title: \u4e13\u5c5e\u5934\u8854\uff0c\u4e0d\u586b\u6216\u7a7a\u5b57\u7b26\u4e32\u8868\u793a\u5220\u9664\u4e13\u5c5e\u5934\u8854\uff0c\u53ea\u80fd\u4fdd\u7559\u524d6\u4e2a\u82f1\u6587\u4e0e\u6c49\u5b57\uff0cEmoji \u6839\u636e\u5b57\u7b26\u5b9e\u9645\u5b57\u7b26\u957f\u5ea6\u5360\u7528\u53ea\u80fd\u653e\u6700\u591a3\u4e2a\u751a\u81f3\u66f4\u5c11\uff0c\u8d85\u51fa\u957f\u5ea6\u90e8\u5206\u4f1a\u88ab\u622a\u65ad\n:param int duration: \u4e13\u5c5e\u5934\u8854\u6709\u6548\u671f\uff0c\u5355\u4f4d\u79d2\uff0c-1 \u8868\u793a\u6c38\u4e45\uff0c\u4e0d\u8fc7\u6b64\u9879\u4f3c\u4e4e\u6ca1\u6709\u6548\u679c\uff0c\u53ef\u80fd\u662f\u53ea\u6709\u67d0\u4e9b\u7279\u6b8a\u7684\u65f6\u95f4\u957f\u5ea6\u6709\u6548\uff0c\u6709\u5f85\u6d4b\u8bd5\n:return: None\n:rtype: None", "id": "f847:c0:m19"} {"signature": "def get_group_member_list(self, *, group_id):", "body": "return super().__getattr__('')(group_id=group_id)", "docstring": "\u83b7\u53d6\u7fa4\u6210\u5458\u5217\u8868\n\n------------\n\n:param int group_id: \u7fa4\u53f7\n:return: [{ \"group_id\": (\u7fa4\u53f7: int), \"user_id\": (QQ \u53f7: int), \"nickname\": (\u6635\u79f0: str), \"card\": (\u7fa4\u540d\u7247/\u5907\u6ce8: str), \"sex\": (\u6027\u522b: str in ['male', 'female', 'unknown']), \"age\": (\u5e74\u9f84: int), \"area\": (\u5730\u533a: str), \"join_time\": (\u52a0\u7fa4\u65f6\u95f4\u6233: int), \"last_sent_time\": (\u6700\u540e\u53d1\u8a00\u65f6\u95f4\u6233: int), \"level\": (\u6210\u5458\u7b49\u7ea7: str), \"role\": (\u89d2\u8272: str in ['owner', 'admin', 'member']), \"unfriendly\": (\u662f\u5426\u4e0d\u826f\u8bb0\u5f55\u6210\u5458: bool), \"title\": (\u4e13\u5c5e\u5934\u8854: str), \"title_expire_time\": (\u4e13\u5c5e\u5934\u8854\u8fc7\u671f\u65f6\u95f4\u6233: int), \"card_changeable\": (\u662f\u5426\u5141\u8bb8\u4fee\u6539\u7fa4\u540d\u7247: bool) }, ...]\n:rtype: list[ dict[ str, int | str | bool ] ]\n\n------------\n\n\u54cd\u5e94\u6570\u636e\u4ee5 **\u5217\u8868** \u5305\u88c5\u7684\u5b57\u5178\u7684\u5f62\u5f0f\u63d0\u4f9b\u3002`( List[ Dict[ ...] ] )`\n\n======== =================== ======================================\n \u54cd\u5e94\u6570\u636e\n---------------------------------------------------------------------\n\u6570\u636e\u7c7b\u578b \u5b57\u6bb5\u540d \u8bf4\u660e\n======== =================== ======================================\nint group_id \u7fa4\u53f7\nint user_id QQ \u53f7\nstr nickname \u6635\u79f0\nstr card \u7fa4\u540d\u7247/\u5907\u6ce8\nstr sex \u6027\u522b\uff0c`male` \u6216 `female` \u6216 `unknown`\nint age \u5e74\u9f84\nstr area \u5730\u533a\nint join_time \u52a0\u7fa4\u65f6\u95f4\u6233\nint last_sent_time \u6700\u540e\u53d1\u8a00\u65f6\u95f4\u6233\nstr level \u6210\u5458\u7b49\u7ea7\nstr role \u89d2\u8272\uff0c`owner` \u6216 `admin` \u6216 `member`\nbool unfriendly \u662f\u5426\u4e0d\u826f\u8bb0\u5f55\u6210\u5458\nstr title \u4e13\u5c5e\u5934\u8854\nint title_expire_time \u4e13\u5c5e\u5934\u8854\u8fc7\u671f\u65f6\u95f4\u6233\nbool card_changeable \u662f\u5426\u5141\u8bb8\u4fee\u6539\u7fa4\u540d\u7247\n======== =================== ======================================\n\n**\u5907\u6ce8:** \u54cd\u5e94\u5185\u5bb9\u4e3a\u5305\u542b\u5b57\u5178\u7684\u5217\u8868 *( List[ Dict[] ] )* \uff0c\u6bcf\u4e2a\u5143\u7d20\u7684\u5185\u5bb9\u548c `get_group_member_info` \u63a5\u53e3\u76f8\u540c\uff0c\u4f46\u5bf9\u4e8e\u540c\u4e00\u4e2a\u7fa4\u7ec4\u7684\u540c\u4e00\u4e2a\u6210\u5458\uff0c\u83b7\u53d6\u5217\u8868\u65f6\u548c\u83b7\u53d6\u5355\u72ec\u7684\u6210\u5458\u4fe1\u606f\u65f6\uff0c\u67d0\u4e9b\u5b57\u6bb5\u53ef\u80fd\u6709\u6240\u4e0d\u540c\uff0c\u4f8b\u5982 `area`\u3001`title` \u7b49\u5b57\u6bb5\u5728\u83b7\u53d6\u5217\u8868\u65f6\u65e0\u6cd5\u83b7\u5f97\uff0c\u5177\u4f53\u5e94\u4ee5\u5355\u72ec\u7684\u6210\u5458\u4fe1\u606f\u4e3a\u51c6\u3002", "id": "f847:c0:m27"} {"signature": "def set_discuss_leave(self, *, discuss_id):", "body": "return super().__getattr__('')(discuss_id=discuss_id)", "docstring": "\u9000\u51fa\u8ba8\u8bba\u7ec4\n\n------------\n\n:param int discuss_id: \u8ba8\u8bba\u7ec4 ID\uff08\u6b63\u5e38\u60c5\u51b5\u4e0b\u770b\u4e0d\u5230\uff0c\u9700\u8981\u4ece\u8ba8\u8bba\u7ec4\u6d88\u606f\u4e0a\u62a5\u7684\u6570\u636e\u4e2d\u83b7\u5f97\uff09\n:return: None\n:rtype: None", "id": "f847:c0:m20"} {"signature": "def _import_module(name):", "body": "__import__(name)return sys.modules[name]", "docstring": "Import module, returning the module after the last dot.", "id": "f851:m1"} {"signature": "def iteritems(d, **kw):", "body": "return iter(getattr(d, _iteritems)(**kw))", "docstring": "Return an iterator over the (key, value) pairs of a dictionary.", "id": "f851:m6"} {"signature": "def _add_doc(func, doc):", "body": "func.__doc__ = doc", "docstring": "Add documentation to a function.", "id": "f851:m0"} {"signature": "def scanAllProcessesForMapping(searchPortion, isExactMatch=False, ignoreCase=False):", "body": "pids = getAllRunningPids()mappingResults = [scanProcessForMapping(pid, searchPortion, isExactMatch, ignoreCase) for pid in pids]ret = {}for i in range(len(pids)):if mappingResults[i] is not None:ret[pids[i]] = mappingResults[i]return ret", "docstring": "scanAllProcessesForMapping - Scans all processes on the system for a given search pattern.\n\n @param searchPortion - A mapping for which to search, example: libc or python or libz.so.1. Give empty string to return all mappings.\n @param isExactMatch Default False - If match should be exact, otherwise a partial match is performed.\n @param ignoreCase Default False - If True, search will be performed case-insensitively\n\n@return - - A dictionary of pid -> mappingResults for each pid that matched the search pattern. For format of \"mappingResults\", @see scanProcessForMapping", "id": "f857:m9"} {"signature": "def getProcessCommandLineStr(pid):", "body": "try:with open('' %(int(pid),), '') as f:cmdline = f.read()return cmdline.replace('', '')except:return None", "docstring": "getProcessCommandLineStr - Gets a the commandline (program + arguments) of a given pid\n\n@param pid - Process ID\n\n@return - None if process not found or can't be determined. Otherwise a string of commandline.\n\n@note Caution, args may have spaces in them, and you cannot surmise from this method. If you care (like trying to replay a command), use getProcessCommandLineList instead", "id": "f857:m2"} {"signature": "def scanProcessForMapping(pid, searchPortion, isExactMatch=False, ignoreCase=False):", "body": "try: try:pid = int(pid)except ValueError as e:sys.stderr.write('' %(str(type(pid)),))raise ewith open('' %(pid,), '') as f:contents = f.read()lines = contents.split('')matchedMappings = []if isExactMatch is True:if ignoreCase is False:isMatch = lambda searchFor, searchIn : bool(searchFor == searchIn)else:isMatch = lambda searchFor, searchIn : bool(searchFor.lower() == searchIn.lower())else:if ignoreCase is False:isMatch = lambda searchFor, searchIn : bool(searchFor in searchIn)else:isMatch = lambda searchFor, searchIn : bool(searchFor.lower() in searchIn.lower())for line in lines:portion = ''.join(line.split('')[:]).lstrip()if isMatch(searchPortion, portion):matchedMappings.append('' + line)if len(matchedMappings) == :return Nonecmdline = getProcessCommandLineStr(pid)owner = getProcessOwnerStr(pid)return {'' : searchPortion,'' : pid,'' : owner,'' : cmdline,'' : matchedMappings,}except OSError:return Noneexcept IOError:return Noneexcept FileNotFoundError:return Noneexcept PermissionError:return None", "docstring": "scanProcessForMapping - Searches a given pid's mappings for a certain pattern.\n\n @param pid - A running process ID on this system\n @param searchPortion - A mapping for which to search, example: libc or python or libz.so.1. Give empty string to return all mappings.\n @param isExactMatch Default False - If match should be exact, otherwise a partial match is performed.\n @param ignoreCase Default False - If True, search will be performed case-insensitively\n\n @return - If result is found, the following dict is returned. If no match found on the given pid, or pid is not found running, None is returned.\n {\n 'searchPortion' : The passed search pattern\n 'pid' : The passed pid (as an integer)\n 'owner' : String of process owner, or uid if no mapping can be found, or \"unknown\" if neither could be determined.\n 'cmdline' : Commandline string\n 'matchedMappings' : All mappings likes that matched the given search pattern\n }", "id": "f857:m8"} {"signature": "def getProcessCwd(pid):", "body": "try:cwd = os.readlink('' %(int(pid), ))return cwdexcept:return None", "docstring": "getProcessCwd - Gets the cwd (current working directory) of a given pid\n\n@param pid - Process ID\n\n@return - None if process not found or can't be determined. Otherwise, a string of the CWD", "id": "f857:m4"} {"signature": "def getAllRunningPids():", "body": "return [int(x) for x in os.listdir('') if x.isdigit()]", "docstring": "getAllRunningPids - Gets list of all pids that are running on a given system\n\n@return > - A list of pids (process IDs).", "id": "f857:m5"} {"signature": "def getProcessCommandLineList(pid):", "body": "try:with open('' %(int(pid),), '') as f:cmdline = f.read()return cmdline.split('')except:return None", "docstring": "getProcessCommandLineList - Gets the commandline (program + argumentS) of a given pid as a list.\n\n@param pid - Process ID\n\n@return - None if process not found or can't be determined. Otherwise a list representing argv. First argument is process name, remainder are arguments.\n\n@note - Use this if you care about whether a process had a space in the commands", "id": "f857:m3"} {"signature": "def run(self):", "body": "config = config_creator()debug = config.debugbranch_thread_sleep = config.branch_thread_sleepwhile :url = self.branch_queue.get()if debug:print(''.format(url))branch_spider = self.branch_spider(url)sleep(random.randrange(*branch_thread_sleep))branch_spider.request_page()if debug:print(''.format(url))self.branch_queue.task_done()", "docstring": "run your main spider here\n as for branch spider result data, you can return everything or do whatever with it\n in your own code\n\n :return: None", "id": "f867:c0:m1"} {"signature": "def run(self):", "body": "global existed_urls_listconfig = config_creator()debug = config.debugmain_thread_sleep = config.main_thread_sleepbranch_thread_num = config.branch_thread_numwhile :url = self.main_queue.get()if debug:print(''.format(url))main_spider = self.main_spider(url)sleep(random.randrange(*main_thread_sleep))links = main_spider.request_urls()try:assert type(links) in VALIDATE_URLSexcept AssertionError:error_message('')links = list()branch_queue = queue.Queue(branch_thread_num)for i in range(branch_thread_num):branch_thread = BranchThread(branch_queue=branch_queue,branch_spider=self.branch_spider)branch_thread.daemon = Truebranch_thread.start()for link in links:if link not in existed_urls_list:existed_urls_list.append(link)branch_queue.put(link)branch_queue.join()if debug:print(''.format(url))self.main_queue.task_done()", "docstring": "run your main spider here, and get a list/tuple of url as result\n then make the instance of branch thread\n\n :return: None", "id": "f869:c0:m1"} {"signature": "def error_message(message=''):", "body": "print(colorful_text(message, Fore.RED))", "docstring": "print the error message in red color\n\n :param message: error message\n :return: None", "id": "f872:m1"} {"signature": "def colorful_text(text, color=Fore.RESET):", "body": "return color + text + Fore.RESET", "docstring": "make target text colorful\n\n :param text: target text\n :param color\n :return: colored text", "id": "f872:m0"} {"signature": "def value_from_datadict(self, *args, **kwargs):", "body": "value = super(RichTextWidget, self).value_from_datadict(*args, **kwargs)if value is not None:value = self.get_sanitizer()(value)return value", "docstring": "Pass the submitted value through the sanitizer before returning it.", "id": "f907:c0:m4"} {"signature": "def clean(self, value, model_instance):", "body": "value = self.to_python(value)if value is not None:value = self.get_sanitizer()(value)self.validate(value, model_instance)self.run_validators(value)return value", "docstring": "Convert the value's type, sanitize it, and run validation. Validation\nerrors from to_python() and validate() are propagated. Return the\ncorrect value if no error is raised.", "id": "f908:c0:m2"} {"signature": "def setup(app):", "body": "lexer = MarkdownLexer()for alias in lexer.aliases:app.add_lexer(alias, lexer)return dict(version=__version__)", "docstring": "Initializer for Sphinx extension API.\n\n See http://www.sphinx-doc.org/en/stable/extdev/index.html#dev-extensions.", "id": "f916:m0"} {"signature": "def _build_metadata(): ", "body": "expected_keys = ('', '', '', '', '', '', '')metadata = {}with io.open(srcfile('', package_name, ''), encoding='') as handle:pkg_init = handle.read()metadata[''] = re.search(r'', pkg_init, re.DOTALL|re.MULTILINE).group()for line in pkg_init.splitlines():match = re.match(r\"\"\"\"\"\".format(''.join(expected_keys)), line)if match:metadata[match.group()] = match.group()if not all(i in metadata for i in expected_keys):raise RuntimeError(\"\".format(name, ''.join(sorted(set(expected_keys) - set(metadata.keys()))),))text = metadata[''].strip()if text:metadata[''], text = text.split('', )metadata[''] = ''.join(metadata[''].split()).strip() + '' metadata[''] = textwrap.dedent(text).strip()metadata[''] = metadata[''].replace('', '').strip().split()requirements_files = dict(install = '',setup = '',test = '',)requires = {}for key, filename in requirements_files.items():requires[key] = []if os.path.exists(srcfile(filename)):with io.open(srcfile(filename), encoding='') as handle:for line in handle:line = line.strip()if line and not line.startswith(''):if any(line.startswith(i) for i in ('', '', '')):line = line.split('')[]requires[key].append(line)if not any('' == re.split('', i.lower())[] for i in requires['']):requires[''].append('') console_scripts = []for path, dirs, files in os.walk(srcfile('', package_name)):dirs = [i for i in dirs if not i.startswith('')]if '' in files:path = path[len(srcfile('') + os.sep):]appname = path.split(os.sep)[-]with io.open(srcfile('', path, ''), encoding='') as handle:for line in handle.readlines():match = re.match(r\"\"\"\"\"\", line)if match:appname = match.group()console_scripts.append(''.format(appname, path.replace(os.sep, '')))candidate_files = ['', '','', '', '', '','', '', '',]data_files = defaultdict(list)for filename in candidate_files:if os.path.exists(srcfile(filename)):data_files[''].append(filename)classifiers = []for classifiers_txt in ('', ''):classifiers_txt = srcfile(classifiers_txt)if os.path.exists(classifiers_txt):with io.open(classifiers_txt, encoding='') as handle:classifiers = [i.strip() for i in handle if i.strip() and not i.startswith('')]breakentry_points.setdefault('', []).extend(console_scripts)metadata.update(dict(name = name,package_dir = {'': ''},packages = find_packages(srcfile(''), exclude=['']),data_files = data_files.items(),zip_safe = False,include_package_data = True,install_requires = requires[''],setup_requires = requires[''],tests_require = requires[''],classifiers = classifiers,cmdclass = dict(test = PyTest,),entry_points = entry_points,))return metadata", "docstring": "Return project's metadata as a dict.", "id": "f921:m1"} {"signature": "def predict_peptides(self, peptides):", "body": "from mhcflurry.encodable_sequences import EncodableSequencesbinding_predictions = []encodable_sequences = EncodableSequences.create(peptides)for allele in self.alleles:predictions_df = self.predictor.predict_to_dataframe(encodable_sequences, allele=allele)for (_, row) in predictions_df.iterrows():binding_prediction = BindingPrediction(allele=allele,peptide=row.peptide,affinity=row.prediction,percentile_rank=(row.prediction_percentileif '' in row else nan),prediction_method_name=\"\")binding_predictions.append(binding_prediction)return BindingPredictionCollection(binding_predictions)", "docstring": "Predict MHC affinity for peptides.", "id": "f925:c0:m1"} {"signature": "def predict(self, sequences):", "body": "with tempfile.NamedTemporaryFile(suffix=\"\", mode=\"\") as input_fd:for (i, sequence) in enumerate(sequences):input_fd.write(\"\" % i)input_fd.write(sequence)input_fd.write(\"\")input_fd.flush()try:output = subprocess.check_output([\"\", input_fd.name])except subprocess.CalledProcessError as e:logging.error(\"\" % (e, e.output))raiseparsed = self.parse_netchop(output)assert len(parsed) == len(sequences),\"\" % (len(sequences), len(parsed))assert [len(x) for x in parsed] == [len(x) for x in sequences]return parsed", "docstring": "Return netChop predictions for each position in each sequence.\n\nParameters\n-----------\nsequences : list of string\n Amino acid sequences to predict cleavage for\n\nReturns\n-----------\nlist of list of float\n\nThe i'th list corresponds to the i'th sequence. Each list gives\nthe cleavage probability for each position in the sequence.", "id": "f928:c0:m0"} {"signature": "def create_input_peptides_files(peptides,max_peptides_per_file=None,group_by_length=False):", "body": "if group_by_length:peptide_lengths = {len(p) for p in peptides}peptide_groups = {l: [] for l in peptide_lengths}for p in peptides:peptide_groups[len(p)].append(p)else:peptide_groups = {\"\": peptides}file_names = []for key, group in peptide_groups.items():n_peptides = len(group)if not max_peptides_per_file:max_peptides_per_file = n_peptidesinput_file = Nonefor i, p in enumerate(group):if i % max_peptides_per_file == :if input_file is not None:file_names.append(input_file.name)input_file.close()input_file = make_writable_tempfile(prefix_number=i // max_peptides_per_file,prefix_name=key,suffix=\"\")input_file.write(\"\" % p)if input_file is not None:file_names.append(input_file.name)input_file.close()return file_names", "docstring": "Creates one or more files containing one peptide per line,\nreturns names of files.", "id": "f929:m1"} {"signature": "def __init__(self,peptide,allele,affinity,percentile_rank,source_sequence_name=None,offset=,log_affinity=None,prediction_method_name=\"\"):", "body": "if invalid_affinity(affinity) and np.isfinite(log_affinity):affinity = ** (-log_affinity + )if invalid_affinity(affinity):raise ValueError(\"\" % (affinity,peptide,allele))if invalid_percentile_rank(percentile_rank):raise ValueError(\"\" % (percentile_rank, peptide, allele))self.source_sequence_name = source_sequence_nameself.offset = offsetself.allele = alleleself.peptide = peptideself.affinity = affinityself.percentile_rank = percentile_rankself.prediction_method_name = prediction_method_name", "docstring": "Parameters\n----------\npeptide : str\n Short amino acid sequence\n\nallele : str\n HLA allele, e.g. HLA-A*02:01\n\naffinity : float\n Predicted binding affinity\n\npercentile_rank : float\n Percentile rank of the binding affinity for that allele\n\nsource_sequence_name : str\n Name of sequence from which peptide was extracted\n\noffset : int\n Base0 starting position in source sequence that all epitopes were\n extracted from\n\nlog_affinity : float, optional\n NetMHC sometimes gives invalid IC50 values but we can still\n reconstruct the value from its (1.0 - log_50000(IC50)) score.\n\nprediction_method_name : str, optional\n Name of predictor used to generate this prediction.", "id": "f930:c0:m0"} {"signature": "def clone_with_updates(self, **kwargs):", "body": "fields_dict = self.to_dict()fields_dict.update(kwargs)return BindingPrediction(**fields_dict)", "docstring": "Returns new BindingPrediction with updated fields", "id": "f930:c0:m2"} {"signature": "def run_multiple_commands_redirect_stdout(multiple_args_dict,print_commands=True,process_limit=-,polling_freq=,**kwargs):", "body": "assert len(multiple_args_dict) > assert all(len(args) > for args in multiple_args_dict.values())assert all(hasattr(f, '') for f in multiple_args_dict.keys())if process_limit < :logger.debug(\"\" % cpu_count())process_limit = cpu_count()start_time = time.time()processes = Queue(maxsize=process_limit)def add_to_queue(process):process.start()if print_commands:handler = logging.FileHandler(process.redirect_stdout_file.name)handler.setLevel(logging.DEBUG)logger.addHandler(handler)logger.debug(\"\".join(process.args))logger.removeHandler(handler)processes.put(process)for f, args in multiple_args_dict.items():p = AsyncProcess(args,redirect_stdout_file=f,**kwargs)if not processes.full():add_to_queue(p)else:while processes.full():to_remove = []for possibly_done in processes.queue:if possibly_done.poll() is not None:possibly_done.wait()to_remove.append(possibly_done)if to_remove:for process_to_remove in to_remove:processes.queue.remove(process_to_remove)breaktime.sleep(polling_freq)add_to_queue(p)while not processes.empty():processes.get().wait()elapsed_time = time.time() - start_timelogger.info(\"\",len(multiple_args_dict),elapsed_time)", "docstring": "Run multiple shell commands in parallel, write each of their\nstdout output to files associated with each command.\n\nParameters\n----------\nmultiple_args_dict : dict\n A dictionary whose keys are files and values are args list.\n Run each args list as a subprocess and write stdout to the\n corresponding file.\n\nprint_commands : bool\n Print shell commands before running them.\n\nprocess_limit : int\n Limit the number of concurrent processes to this number. 0\n if there is no limit, -1 to use max number of processors\n\npolling_freq : int\n Number of seconds between checking for done processes, if\n we have a process limit", "id": "f931:m1"} {"signature": "def poll(self):", "body": "if self.process is None:self.start()return self.process.poll()", "docstring": "Peeks at whether the process is done or not, without\nwaiting for it. Leaves exception handling and such to wait().", "id": "f931:c0:m2"} {"signature": "def predict_subsequences(self, sequence_dict, peptide_lengths=None):", "body": "sequence_dict = check_sequence_dictionary(sequence_dict)peptide_lengths = self._check_peptide_lengths(peptide_lengths)binding_predictions = []expected_peptides = set([])normalized_alleles = []for key, amino_acid_sequence in sequence_dict.items():for l in peptide_lengths:for i in range(len(amino_acid_sequence) - l + ):expected_peptides.add(amino_acid_sequence[i:i + l])self._check_peptide_inputs(expected_peptides)for allele in self.alleles:allele = normalize_allele_name(allele, omit_dra1=True)normalized_alleles.append(allele)request = self._get_iedb_request_params(amino_acid_sequence, allele)logger.info(\"\",self.url,request)response_df = _query_iedb(request, self.url)for _, row in response_df.iterrows():binding_predictions.append(BindingPrediction(source_sequence_name=key,offset=row[''] - ,allele=row[''],peptide=row[''],affinity=row[''],percentile_rank=row[''],prediction_method_name=\"\" + self.prediction_method))self._check_results(binding_predictions,alleles=normalized_alleles,peptides=expected_peptides)return BindingPredictionCollection(binding_predictions)", "docstring": "Given a dictionary mapping unique keys to amino acid sequences,\n run MHC binding predictions on all candidate epitopes extracted from\n sequences and return a EpitopeCollection.\n\n Parameters\n ----------\n fasta_dictionary : dict or string\n Mapping of protein identifiers to protein amino acid sequences.\n If string then converted to dictionary.", "id": "f933:c0:m4"} {"signature": "def prepare_allele_name(self, allele_name):", "body": "return allele_name.replace(\"\", \"\")", "docstring": "How does the predictor expect to see allele names?", "id": "f934:c0:m2"} {"signature": "def __init__(self,program_name,alleles,parse_output_fn,supported_alleles_flag,input_file_flag,length_flag,allele_flag,peptide_mode_flags=[\"\"],tempdir_flag=None,extra_flags=[],max_peptides_per_file= ** ,process_limit=-,default_peptide_lengths=[],group_peptides_by_length=False,min_peptide_length=,max_peptide_length=None,):", "body": "require_string(program_name, \"\")self.program_name = program_nameif supported_alleles_flag is not None:require_string(supported_alleles_flag, \"\")self.supported_alleles_flag = supported_alleles_flagrequire_string(input_file_flag, \"\")self.input_file_flag = input_file_flagrequire_string(length_flag, \"\")self.length_flag = length_flagrequire_string(allele_flag, \"\")self.allele_flag = allele_flagrequire_iterable_of(peptide_mode_flags, string_types)self.peptide_mode_flags = peptide_mode_flagsif tempdir_flag is not None:require_string(tempdir_flag, \"\")self.tempdir_flag = tempdir_flagrequire_iterable_of(extra_flags, string_types)self.extra_flags = extra_flagsrequire_integer(max_peptides_per_file,\"\")self.max_peptides_per_file = max_peptides_per_filerequire_integer(process_limit, \"\")self.process_limit = process_limitself.parse_output_fn = parse_output_fnif isinstance(default_peptide_lengths, int):default_peptide_lengths = [default_peptide_lengths]self.group_peptides_by_length = group_peptides_by_lengthif self.supported_alleles_flag:valid_alleles = self._determine_supported_alleles(self.program_name,self.supported_alleles_flag)else:try:run_command([self.program_name])except:raise SystemError(\"\" % self.program_name)valid_alleles = Nonetry:BasePredictor.__init__(self,alleles=alleles,valid_alleles=valid_alleles,default_peptide_lengths=default_peptide_lengths,min_peptide_length=min_peptide_length,max_peptide_length=max_peptide_length)except UnsupportedAllele as e:if self.supported_alleles_flag:additional_message = (\"\" % (self.program_name,self.supported_alleles_flag))else:additional_message = \"\"raise UnsupportedAllele(str(e) + additional_message)", "docstring": "Parameters\n----------\nprogram_name : str\n Name of prediction program to run\n (e.g. \"netMHCcons\" or \"netMHCIIpan\")\n\nalleles : list of str\n MHC alleles\n\nsupported_alleles_flag : str\n Flag to pass to the predictor to get a list of supported alleles\n (e.g. \"-A\", \"-list\", \"-listMHC\")\n\nparse_output_fn : fn\n Takes the stdout string from the predictor and returns a collection\n of BindingPrediction objects\n\ninput_file_flag : str\n How to specify the input FASTA file of source sequences (e.g. \"-f\")\n\nlength_flag : str\n How to specify the desired predicted peptide length (e.g. \"-length\")\n\nallele_flag : str\n How to specify the allele we want predictions for (e.g. \"-a\")\n\npeptide_mode_flags : list of str\n How to switch from the default FASTA subsequences input mode to\n where peptides are explicitly given one per line of a text file.\n\ntempdir_flag : str, optional\n How to specify the predictor's temporary directory (e.g. \"-tdir\")\n\nextra_flags : list of str\n Extra flags to pass to the predictor\n\nmax_peptides_per_file : int, optional\n Maximum number of lines per file when predicting peptides directly.\n\nprocess_limit : int, optional\n Maximum number of parallel processes to start\n (0 for no limit, -1 for use all available processors)\n\ndefault_peptide_lengths : list of int, optional\n When making predictions across subsequences of protein sequences,\n what peptide lengths to predict for.\n\ngroup_peptides_by_length : bool\n Run commandline predictor on groups of peptides of equal length\n\nmin_peptide_length : int\n Shortest peptide this predictor can handle\n\nmax_peptide_length : int\n Longest peptide this predictor can handle", "id": "f934:c0:m0"} {"signature": "@staticmethoddef _determine_supported_alleles(command, supported_allele_flag):", "body": "try:supported_alleles_output = check_output([command, supported_allele_flag])supported_alleles_str = supported_alleles_output.decode(\"\", \"\")assert len(supported_alleles_str) > ,'' % commandsupported_alleles = set([])for line in supported_alleles_str.split(\"\"):line = line.strip()if not line.startswith('') and len(line) > :try:supported_alleles.add(normalize_allele_name(line))except AlleleParseError as error:logger.info(\"\", line, error)continueif len(supported_alleles) == :raise ValueError(\"\")return supported_allelesexcept Exception as e:logger.exception(e)raise SystemError(\"\" % (command,supported_allele_flag))", "docstring": "Try asking the commandline predictor (e.g. netMHCpan)\nwhich alleles it supports.", "id": "f934:c0:m1"} {"signature": "def predict_peptides(self, peptides):", "body": "raise NotImplementedError(\"\" % (self.__class__.__name__))", "docstring": "Given a list of peptide sequences, returns a BindingPredictionCollection", "id": "f935:c0:m3"} {"signature": "def _check_peptide_lengths(self, peptide_lengths=None):", "body": "if not peptide_lengths:peptide_lengths = self.default_peptide_lengthsif not peptide_lengths:raise ValueError((\"\"\"\"))if isinstance(peptide_lengths, int):peptide_lengths = [peptide_lengths]require_iterable_of(peptide_lengths, int)for peptide_length in peptide_lengths:if (self.min_peptide_length is not None andpeptide_length < self.min_peptide_length):raise ValueError(\"\" % (peptide_length,self.min_peptide_length))elif (self.max_peptide_length is not None andpeptide_length > self.max_peptide_length):raise ValueError(\"\" % (peptide_length,self.max_peptide_length))return peptide_lengths", "docstring": "If peptide lengths not specified, then try using the default\nlengths associated with this predictor object. If those aren't\na valid non-empty sequence of integers, then raise an exception.\nOtherwise return the peptide lengths.", "id": "f935:c0:m5"} {"signature": "def main(args_list=None):", "body": "args = parse_args(args_list)binding_predictions = run_predictor(args)df = binding_predictions.to_dataframe()logger.info('', df)if args.output_csv:df.to_csv(args.output_csv, index=False)print(\"\" % args.output_csv)", "docstring": "Script to make pMHC binding predictions from amino acid sequences.\n\nUsage example:\n mhctools\n --sequence SFFPIQQQQQAAALLLI \\\n --sequence SILQQQAQAQQAQAASSSC \\\n --extract-subsequences \\\n --mhc-predictor netmhc \\\n --mhc-alleles HLA-A0201 H2-Db \\\n --mhc-predictor netmhc \\\n --output-csv epitope.csv", "id": "f939:m4"} {"signature": "def parse_int_list(string):", "body": "integers = []for comma_part in string.split(\"\"):for substring in comma_part.split(\"\"):if len(substring) == :continueif \"\" in substring:left, right = substring.split(\"\")left_val = int(left.strip())right_val = int(right.strip())integers.extend(range(left_val, right_val + ))else:integers.append(int(substring.strip()))return integers", "docstring": "Parses a string of numbers and ranges into a list of integers. Ranges\nare separated by dashes and inclusive of both the start and end number.\n\nExample:\n parse_int_list(\"8 9 10,11-13\") == [8,9,10,11,12,13]", "id": "f941:m0"} {"signature": "def parse_netmhcpan4_stdout(stdout,prediction_method_name=\"\",sequence_key_mapping=None):", "body": "return parse_netmhcpan3_stdout(stdout=stdout,prediction_method_name=prediction_method_name,sequence_key_mapping=sequence_key_mapping)", "docstring": "# NetMHCpan version 4.0\n\n# Tmpdir made /var/folders/jc/fyrvcrcs3sb8g4mkdg6nl_t80000gp/T//netMHCpanuH3SvY\n# Input is in PEPTIDE format\n\n# Make binding affinity predictions\n\nHLA-A02:01 : Distance to training data 0.000 (using nearest neighbor HLA-A02:01)\n\n# Rank Threshold for Strong binding peptides 0.500\n# Rank Threshold for Weak binding peptides 2.000\n-----------------------------------------------------------------------------------\n Pos HLA Peptide Core Of Gp Gl Ip Il Icore Identity Score Aff(nM) %Rank BindLevel\n-----------------------------------------------------------------------------------\n 1 HLA-A*02:01 SIINFEKL SIINF-EKL 0 0 0 5 1 SIINFEKL PEPLIST 0.1141340 14543.1 18.9860\n-----------------------------------------------------------------------------------\n\nProtein PEPLIST. Allele HLA-A*02:01. Number of high binders 0. Number of weak binders 0. Number of peptides 1", "id": "f943:m8"} {"signature": "def NetMHCpan(alleles,program_name=\"\",process_limit=-,default_peptide_lengths=[],extra_flags=[]):", "body": "with open(os.devnull, '') as devnull:output = check_output([program_name, \"\", \"\"],stderr=devnull)output_str = output.decode(\"\", \"\")common_kwargs = {\"\": alleles,\"\": default_peptide_lengths,\"\": program_name,\"\": process_limit,\"\": extra_flags,}if \"\" in output_str:return NetMHCpan28(**common_kwargs)elif \"\" in output_str:return NetMHCpan3(**common_kwargs)elif \"\" in output_str:return NetMHCpan4(**common_kwargs)else:raise RuntimeError(\"\")", "docstring": "This function wraps NetMHCpan28 and NetMHCpan3 to automatically detect which class\nto use, with the help of the miraculous and strange '--version' netmhcpan argument.", "id": "f950:m0"} {"signature": "def create_tables(self):", "body": "for cls in self:cls.create_table(fail_silently=True)", "docstring": "Create database tables", "id": "f967:c0:m1"} {"signature": "def get_paginator(self):", "body": "return self.paginator", "docstring": "Return pagination for our model", "id": "f967:c8:m1"} {"signature": "def to_cursor_ref(self):", "body": "fields = self._meta.get_primary_keys()assert fieldsvalues = {field.name:self.__data__[field.name] for field in fields}return values", "docstring": "Returns dict of values to uniquely reference this item", "id": "f967:c4:m4"} {"signature": "def get_database(self, model):", "body": "for router in self.routers:r = router.get_database(model)if r is not None:return rreturn self.get('')", "docstring": "Find matching database router", "id": "f967:c1:m3"} {"signature": "@classmethoddef from_cursor_ref(self, cursor):", "body": "return self.get(**cursor)", "docstring": "Returns model instance from unique cursor reference", "id": "f967:c4:m5"} {"signature": "@classmethoddef get_or_none(cls, **kwargs):", "body": "try:return cls.get(**kwargs)except cls.DoesNotExist:return None", "docstring": "XXX: needs unit test", "id": "f967:c4:m2"} {"signature": "def refetch(self):", "body": "ref = self.to_cursor_ref()return self.from_cursor_ref(ref)", "docstring": "Return new model instance with fresh data from database\nOnly works on models which have a primary or compound key\nSee https://github.com/coleifer/peewee/issues/638\n\nXXX: Add support for models without PK", "id": "f967:c4:m6"} {"signature": "def list(self, filters, cursor, count):", "body": "assert isinstance(filters, dict), \"\"assert isinstance(cursor, dict), \"\"query = self.get_query()assert isinstance(query, peewee.Query)paginator = self.get_paginator()assert isinstance(paginator, Pagination)count += pquery = paginator.filter_query(query, cursor, count)items = [ item for item in pquery ]next_item = items.pop()next_cursor = next_item.to_cursor_ref()''''''return items, next_cursor", "docstring": "List items from query", "id": "f967:c8:m3"} {"signature": "def utcnow_no_ms():", "body": "return datetime.datetime.utcnow().replace(microsecond=)", "docstring": "Returns utcnow without microseconds", "id": "f967:m0"} {"signature": "@classmethoddef paginate_query(self, query, count, offset=None, sort=None):", "body": "assert isinstance(query, peewee.Query)assert isinstance(count, int)assert isinstance(offset, (str, int, type(None)))assert isinstance(sort, (list, set, tuple, type(None)))fields = query.model._meta.get_primary_keys()if len(fields) == :raise peewee.ProgrammingError('')if len(fields) > :raise peewee.ProgrammingError('')if offset is not None:query = query.where(fields[] >= offset)order_bys = []if sort:for field, direction in sort:if not isinstance(direction, str):raise ValueError(\"\".format(field))direction = direction.lower().strip()if direction not in ['', '']:raise ValueError(\"\".format(field))order_by = peewee.SQL(field)order_by = getattr(order_by, direction)()order_bys += [order_by]order_bys += [fields[].asc()]query = query.order_by(*order_bys)query = query.limit(count)return query", "docstring": "Apply pagination to query\n\n:attr query: Instance of `peewee.Query`\n:attr count: Max rows to return\n:attr offset: Pagination offset, str/int\n:attr sort: List of tuples, e.g. [('id', 'asc')]\n\n:returns: Instance of `peewee.Query`", "id": "f967:c7:m0"} {"signature": "def retrieve(self, cursor):", "body": "assert isinstance(cursor, dict), \"\"query = self.get_query()assert isinstance(query, peewee.Query)queryreturn query.get(**cursor)", "docstring": "Retrieve items from query", "id": "f967:c8:m4"} {"signature": "def get_query(self):", "body": "return self.query", "docstring": "Return query for our model", "id": "f967:c8:m0"} {"signature": "def populate_models(self):", "body": "fake = Faker()fake.seed()cities = ['', '', '', '']items = []for x in range():city = cities[x % len(cities)]items += [dict(name=fake.name(), city=city)]Person.insert_many(items).execute()assert Person.select().count() == ", "docstring": "Populate test models with (predictable) fake data", "id": "f970:c1:m0"} {"signature": "def upgrade(self):", "body": "if not self.is_valid:raise PolyaxonDeploymentConfigError(''.format(self.deployment_type))if self.is_kubernetes:self.upgrade_on_kubernetes()elif self.is_docker_compose:self.upgrade_on_docker_compose()elif self.is_docker:self.upgrade_on_docker()elif self.is_heroku:self.upgrade_on_heroku()", "docstring": "Upgrade deployment.", "id": "f1019:c0:m23"} {"signature": "def install(self):", "body": "if not self.is_valid:raise PolyaxonDeploymentConfigError(''.format(self.deployment_type))if self.is_kubernetes:self.install_on_kubernetes()elif self.is_docker_compose:self.install_on_docker_compose()elif self.is_docker:self.install_on_docker()elif self.is_heroku:self.install_on_heroku()", "docstring": "Install polyaxon using the current config to the correct platform.", "id": "f1019:c0:m18"} {"signature": "def teardown(self, hooks=True):", "body": "if not self.is_valid:raise PolyaxonDeploymentConfigError(''.format(self.deployment_type))if self.is_kubernetes:self.teardown_on_kubernetes(hooks=hooks)elif self.is_docker_compose:self.teardown_on_docker_compose()elif self.is_docker:self.teardown_on_docker(hooks=hooks)elif self.is_heroku:self.teardown_on_heroku(hooks=hooks)", "docstring": "Teardown Polyaxon.", "id": "f1019:c0:m28"} {"signature": "@classmethoddef find_matching(cls, path, patterns):", "body": "for pattern in patterns:if pattern.match(path):yield pattern", "docstring": "Yield all matching patterns for path.", "id": "f1025:c1:m3"} {"signature": "@staticmethoddef _matches_patterns(path, patterns):", "body": "for glob in patterns:try:if PurePath(path).match(glob):return Trueexcept TypeError:passreturn False", "docstring": "Given a list of patterns, returns a if a path matches any pattern.", "id": "f1025:c1:m9"} {"signature": "@classmethoddef _ignore_path(cls, path, ignore_list=None, white_list=None):", "body": "ignore_list = ignore_list or []white_list = white_list or []return (cls._matches_patterns(path, ignore_list) andnot cls._matches_patterns(path, white_list))", "docstring": "Returns a whether a path should be ignored or not.", "id": "f1025:c1:m10"} {"signature": "@click.group()@click.option('', '', is_flag=True, default=False, help='')@click.pass_context@clean_outputsdef cli(context, verbose):", "body": "configure_logger(verbose or GlobalConfigManager.get_value(''))non_check_cmds = ['', '', '', '', '', '', '']if context.invoked_subcommand not in non_check_cmds:check_cli_version()", "docstring": "Polyaxon CLI tool to:\n\n * Parse, Validate, and Check Polyaxonfiles.\n\n * Interact with Polyaxon server.\n\n * Run and Monitor experiments.\n\n Check the help available for each command listed below.", "id": "f1033:m0"} {"signature": "def pprint(value):", "body": "click.echo(json.dumps(value,sort_keys=True,indent=,separators=('', '')))", "docstring": "Prints as formatted JSON", "id": "f1039:m3"} {"signature": "@config.command()@click.option('', type=bool, help='')@click.option('', type=str, help='')@click.option('', type=int, help='')@click.option('', type=int, help='')@click.option('', type=bool, help='')@click.option('', type=bool,help='')@clean_outputsdef set(verbose, host,http_port,ws_port,use_https,verify_ssl):", "body": "_config = GlobalConfigManager.get_config_or_default()if verbose is not None:_config.verbose = verboseif host is not None:_config.host = hostif http_port is not None:_config.http_port = http_portif ws_port is not None:_config.ws_port = ws_portif use_https is not None:_config.use_https = use_httpsif verify_ssl is False:_config.verify_ssl = verify_sslGlobalConfigManager.set_config(_config)Printer.print_success('')CliConfigManager.purge()", "docstring": "Set the global config values.\n\n Example:\n\n \\b\n ```bash\n $ polyaxon config set --hots=localhost http_port=80\n ```", "id": "f1040:m3"} {"signature": "@click.group(invoke_without_command=True)@click.option('', '', is_flag=True, help='')@clean_outputsdef config(list): ", "body": "if list:_config = GlobalConfigManager.get_config_or_default()Printer.print_header('')dict_tabulate(_config.to_dict())", "docstring": "Set and get the global configurations.", "id": "f1040:m1"} {"signature": "@config.command()@click.argument('', type=str, nargs=-)@clean_outputsdef get(keys):", "body": "_config = GlobalConfigManager.get_config_or_default()if not keys:returnprint_values = {}for key in keys:if hasattr(_config, key):print_values[key] = getattr(_config, key)else:click.echo(''.format(key))dict_tabulate(print_values, )", "docstring": "Get the global config values by keys.\n\n Example:\n\n \\b\n ```bash\n $ polyaxon config get host http_port\n ```", "id": "f1040:m2"} {"signature": "@click.command()@clean_outputsdef upgrade():", "body": "try:pip_upgrade(PROJECT_CLI_NAME)except Exception as e:logger.error(e)", "docstring": "Install/Upgrade polyaxon-cli.", "id": "f1042:m8"} {"signature": "@admin.command()@click.option('', '', type=click.Path(exists=True),help='')@click.option('', type=click.Path(exists=True),help='')@click.option('', is_flag=True, default=False,help='')@click.option('', is_flag=True, default=False,help='')@clean_outputsdef deploy(file, manager_path, check, dry_run): ", "body": "config = read_deployment_config(file)manager = DeployManager(config=config,filepath=file,manager_path=manager_path,dry_run=dry_run)exception = Noneif check:manager.check()Printer.print_success('')else:try:manager.install()except Exception as e:Printer.print_error('')exception = eif exception:Printer.print_error(''.format(exception))", "docstring": "Deploy polyaxon.", "id": "f1043:m2"} {"signature": "@click.group()@click.option('', '', type=str, help=\"\")@click.option('', '', type=int, help=\"\")@click.pass_context@clean_outputsdef build(ctx, project, build): ", "body": "ctx.obj = ctx.obj or {}ctx.obj[''] = projectctx.obj[''] = build", "docstring": "Commands for build jobs.", "id": "f1045:m1"} {"signature": "@build.command()@click.pass_context@clean_outputsdef unbookmark(ctx):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))try:PolyaxonClient().build_job.unbookmark(user, project_name, _build)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Unbookmark build job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon build unbookmark\n ```\n\n \\b\n ```bash\n $ polyaxon build -b 2 unbookmark\n ```", "id": "f1045:m7"} {"signature": "@build.command()@click.pass_context@clean_outputsdef bookmark(ctx):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))try:PolyaxonClient().build_job.bookmark(user, project_name, _build)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Bookmark build job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon build bookmark\n ```\n\n \\b\n ```bash\n $ polyaxon build -b 2 bookmark\n ```", "id": "f1045:m6"} {"signature": "@build.command()@click.option('', '', is_flag=True, help='')@click.pass_context@clean_outputsdef resources(ctx, gpu):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))try:message_handler = Printer.gpu_resources if gpu else Printer.resourcesPolyaxonClient().build_job.resources(user,project_name,_build,message_handler=message_handler)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()", "docstring": "Get build job resources.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon build -b 2 resources\n ```\n\n For GPU resources\n\n \\b\n ```bash\n $ polyaxon build -b 2 resources --gpu\n ```", "id": "f1045:m9"} {"signature": "@build.command()@click.option('', '', is_flag=True, default=False,help=\"\"\"\")@click.pass_context@clean_outputsdef stop(ctx, yes):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))if not yes and not click.confirm(\"\"\"\".format(_build)):click.echo('')sys.exit()try:PolyaxonClient().build_job.stop(user, project_name, _build)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Stop build job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon build stop\n ```\n\n \\b\n ```bash\n $ polyaxon build -b 2 stop\n ```", "id": "f1045:m5"} {"signature": "@build.command()@click.pass_context@clean_outputsdef delete(ctx):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))if not click.confirm(\"\".format(_build)):click.echo('')sys.exit()try:response = PolyaxonClient().build_job.delete_build(user, project_name, _build)BuildJobManager.purge()except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()if response.status_code == :Printer.print_success(\"\".format(_build))", "docstring": "Delete build job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon build delete\n ```\n\n \\b\n ```bash\n $ polyaxon build -b 2 delete\n ```", "id": "f1045:m3"} {"signature": "@build.command()@click.option('', '', is_flag=True, help=\"\")@click.option('', '', is_flag=True, default=False,help=\"\")@click.option('', is_flag=True, default=False,help=\"\")@click.pass_context@clean_outputsdef logs(ctx, past, follow, hide_time):", "body": "user, project_name, _build = get_build_or_local(ctx.obj.get(''), ctx.obj.get(''))if past:try:response = PolyaxonClient().build_job.logs(user, project_name, _build, stream=False)get_logs_handler(handle_job_info=False,show_timestamp=not hide_time,stream=False)(response.content.decode().split(''))print()if not follow:returnexcept (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:if not follow:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()try:PolyaxonClient().build_job.logs(user,project_name,_build,message_handler=get_logs_handler(handle_job_info=False, show_timestamp=not hide_time))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_build))Printer.print_error(''.format(e))sys.exit()", "docstring": "Get build logs.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon build -b 2 logs\n ```\n\n \\b\n ```bash\n $ polyaxon build logs\n ```", "id": "f1045:m10"} {"signature": "@click.group()@click.option('', '', type=str, help=\"\")@click.option('', '', type=int, help=\"\")@click.pass_context@clean_outputsdef job(ctx, project, job): ", "body": "ctx.obj = ctx.obj or {}ctx.obj[''] = projectctx.obj[''] = job", "docstring": "Commands for jobs.", "id": "f1046:m1"} {"signature": "@job.command()@click.pass_context@clean_outputsdef get(ctx):", "body": "user, project_name, _job = get_job_or_local(ctx.obj.get(''), ctx.obj.get(''))try:response = PolyaxonClient().job.get_job(user, project_name, _job)cache.cache(config_manager=JobManager, response=response)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()get_job_details(response)", "docstring": "Get job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job --job=1 get\n ```\n\n \\b\n ```bash\n $ polyaxon job --job=1 --project=project_name get\n ```", "id": "f1046:m2"} {"signature": "@job.command()@click.pass_context@clean_outputsdef bookmark(ctx):", "body": "user, project_name, _job = get_job_or_local(ctx.obj.get(''), ctx.obj.get(''))try:PolyaxonClient().job.bookmark(user, project_name, _job)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Bookmark job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job bookmark\n ```\n\n \\b\n ```bash\n $ polyaxon job -xp 2 bookmark\n ```", "id": "f1046:m12"} {"signature": "@job.command()@click.option('', '', is_flag=True, help='')@click.pass_context@clean_outputsdef resources(ctx, gpu):", "body": "user, project_name, _job = get_job_or_local(ctx.obj.get(''), ctx.obj.get(''))try:message_handler = Printer.gpu_resources if gpu else Printer.resourcesPolyaxonClient().job.resources(user,project_name,_job,message_handler=message_handler)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()", "docstring": "Get job resources.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job -j 2 resources\n ```\n\n For GPU resources\n\n \\b\n ```bash\n $ polyaxon job -j 2 resources --gpu\n ```", "id": "f1046:m9"} {"signature": "@job.command()@click.pass_context@clean_outputsdef unbookmark(ctx):", "body": "user, project_name, _job = get_job_or_local(ctx.obj.get(''), ctx.obj.get(''))try:PolyaxonClient().job.unbookmark(user, project_name, _job)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Unbookmark job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job unbookmark\n ```\n\n \\b\n ```bash\n $ polyaxon job -xp 2 unbookmark\n ```", "id": "f1046:m13"} {"signature": "@job.command()@click.option('', '', is_flag=True, default=False,help=\"\")@click.option('', '', multiple=True, type=click.Path(exists=True),help=\"\")@click.option('', is_flag=True, default=False,help=\"\")@click.pass_context@clean_outputsdef restart(ctx, copy, file, u): ", "body": "config = Noneupdate_code = Noneif file:config = rhea.read(file)if u:ctx.invoke(upload, sync=False)update_code = Trueuser, project_name, _job = get_job_or_local(ctx.obj.get(''), ctx.obj.get(''))try:if copy:response = PolyaxonClient().job.copy(user, project_name, _job, config=config, update_code=update_code)else:response = PolyaxonClient().job.restart(user, project_name, _job, config=config, update_code=update_code)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()get_job_details(response)", "docstring": "Restart job.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job --job=1 restart\n ```", "id": "f1046:m6"} {"signature": "@click.command()@clean_outputsdef whoami():", "body": "try:user = PolyaxonClient().auth.get_user()except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()click.echo(\"\".format(**user.to_dict()))", "docstring": "Show current logged Polyaxon user.", "id": "f1053:m2"} {"signature": "@tensorboard.command()@click.option('', '', multiple=True, type=click.Path(exists=True),help='')@click.pass_context@clean_outputsdef start(ctx, file): ", "body": "specification = Nonejob_config = Noneif file:specification = check_polyaxonfile(file, log=False).specificationif specification:check_polyaxonfile_kind(specification=specification, kind=specification._TENSORBOARD)job_config = specification.parsed_datauser, project_name = get_project_or_local(ctx.obj.get(''))group = ctx.obj.get('')experiment = ctx.obj.get('')if experiment:try:response = PolyaxonClient().experiment.start_tensorboard(username=user,project_name=project_name,experiment_id=experiment,job_config=job_config)obj = ''.format(experiment)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(experiment))Printer.print_error(''.format(e))sys.exit()elif group:try:response = PolyaxonClient().experiment_group.start_tensorboard(username=user,project_name=project_name,group_id=group,job_config=job_config)obj = ''.format(group)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(group))Printer.print_error(''.format(e))sys.exit()else:try:response = PolyaxonClient().project.start_tensorboard(username=user,project_name=project_name,job_config=job_config)obj = ''.format(project_name)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()if response.status_code == :Printer.print_header(\"\".format(obj))click.echo(get_tensorboard_url(user=user,project_name=project_name,experiment=experiment,group=group))sys.exit()if response.status_code != :Printer.print_error('')sys.exit()Printer.print_success(''.format(obj))clint.textui.puts(\"\")clint.textui.puts(\"\")with clint.textui.indent():clint.textui.puts(get_tensorboard_url(user, project_name, experiment, group))", "docstring": "Start a tensorboard deployment for project/experiment/experiment group.\n\n Project tensorboard will aggregate all experiments under the project.\n\n Experiment group tensorboard will aggregate all experiments under the group.\n\n Experiment tensorboard will show all metrics for an experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example: using the default tensorflow image 1.4.1.\n\n \\b\n ```bash\n $ polyaxon tensorboard start\n ```\n\n Example: with custom image and resources\n\n \\b\n ```bash\n $ polyaxon tensorboard start -f file -f file_override ...\n ```\n\n Example: starting a tensorboard for an experiment group\n\n \\b\n ```bash\n $ polyaxon tensorboard -g 1 start -f file\n ```\n\n Example: starting a tensorboard for an experiment\n\n \\b\n ```bash\n $ polyaxon tensorboard -xp 112 start -f file\n ```", "id": "f1055:m3"} {"signature": "@click.command()@click.option('', '', type=str)@click.option('', '', multiple=True, type=click.Path(exists=True),help='')@click.option('', type=str,help='')@click.option('', type=str, help='')@click.option('', type=str,help='')@click.option('', type=int,help=\"\")@click.option('', is_flag=True, default=False,help='')@click.option('', is_flag=True, default=False,help='')@click.pass_context@clean_outputsdef run(ctx, project, file, name, tags, description, ttl, u, l): ", "body": "if not file:file = PolyaxonFile.check_default_path(path='')if not file:file = ''specification = check_polyaxonfile(file, log=False).specificationspec_cond = (specification.is_experiment orspecification.is_group orspecification.is_job orspecification.is_build)if not spec_cond:Printer.print_error(''''.format(specification.kind))if specification.is_notebook:click.echo('')elif specification.is_tensorboard:click.echo('')sys.exit()if u:if project:Printer.print_error('')click.echo('')sys.exit()ctx.invoke(upload, sync=False)user, project_name = get_project_or_local(project)project_client = PolyaxonClient().projecttags = validate_tags(tags)def run_experiment():click.echo('')experiment = ExperimentConfig(name=name,description=description,tags=tags,config=specification.parsed_data,ttl=ttl)try:response = PolyaxonClient().project.create_experiment(user,project_name,experiment)cache.cache(config_manager=ExperimentManager, response=response)Printer.print_success(''.format(response.id))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()def run_group():click.echo('')experiments_def = specification.experiments_defget_group_experiments_info(**experiments_def)experiment_group = ExperimentGroupConfig(name=name,description=description,tags=tags,content=specification._data) try:response = project_client.create_experiment_group(user,project_name,experiment_group)cache.cache(config_manager=GroupManager, response=response)Printer.print_success(''.format(response.id))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()def run_job():click.echo('')job = JobConfig(name=name,description=description,tags=tags,config=specification.parsed_data,ttl=ttl)try:response = project_client.create_job(user,project_name,job)cache.cache(config_manager=JobManager, response=response)Printer.print_success(''.format(response.id))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()def run_build():click.echo('')job = JobConfig(name=name,description=description,tags=tags,config=specification.parsed_data,ttl=ttl)try:response = project_client.create_build(user,project_name,job)cache.cache(config_manager=BuildJobManager, response=response)Printer.print_success(''.format(response.id))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()logs = Noneif specification.is_experiment:run_experiment()logs = experiment_logselif specification.is_group:run_group()elif specification.is_job:run_job()logs = job_logselif specification.is_build:run_build()logs = build_logsif l and logs:ctx.obj = {'': ''.format(user, project_name)}ctx.invoke(logs)", "docstring": "Run polyaxonfile specification.\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon run -f file -f file_override ...\n ```\n\n Upload before running\n\n \\b\n ```bash\n $ polyaxon run -f file -u\n ```\n\n Run and set description and tags for this run\n\n \\b\n ```bash\n $ polyaxon run -f file -u --description=\"Description of the current run\" --tags=\"foo, bar, moo\"\n ```\n Run and set a unique name for this run\n\n \\b\n ```bash\n polyaxon run --name=foo\n ```\n\n Run for a specific project\n\n \\b\n ```bash\n $ polyaxon run -p project1 -f file.yaml\n ```", "id": "f1057:m0"} {"signature": "@user.command()@click.argument('', type=str)@clean_outputsdef delete(username):", "body": "try:PolyaxonClient().user.delete_user(username)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(username))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\".format(username))", "docstring": "Delete a user.\n\n Example:\n\n \\b\n ```bash\n $ polyaxon user delete david\n ```", "id": "f1058:m2"} {"signature": "@click.group()@click.option('', '', type=str, help=\"\")@click.option('', '', type=int, help=\"\")@click.pass_context@clean_outputsdef experiment(ctx, project, experiment): ", "body": "ctx.obj = ctx.obj or {}ctx.obj[''] = projectctx.obj[''] = experiment", "docstring": "Commands for experiments.", "id": "f1059:m1"} {"signature": "@experiment.command()@click.option('', type=str,help='')@click.option('', type=str, help='')@click.option('', type=str, help='')@click.pass_context@clean_outputsdef update(ctx, name, description, tags):", "body": "user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))update_dict = {}if name:update_dict[''] = nameif description:update_dict[''] = descriptiontags = validate_tags(tags)if tags:update_dict[''] = tagsif not update_dict:Printer.print_warning('')sys.exit()try:response = PolyaxonClient().experiment.update_experiment(user, project_name, _experiment, update_dict)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")get_experiment_details(response)", "docstring": "Update experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon experiment -xp 2 update --description=\"new description for my experiments\"\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 2 update --tags=\"foo, bar\" --name=\"unique-name\"\n ```", "id": "f1059:m4"} {"signature": "@experiment.command()@click.pass_context@clean_outputsdef outputs(ctx):", "body": "user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))try:PolyaxonClient().experiment.download_outputs(user, project_name, _experiment)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()Printer.print_success('')", "docstring": "Download outputs for experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon experiment -xp 1 outputs\n ```", "id": "f1059:m12"} {"signature": "@experiment.command()@click.option('', '', is_flag=True, default=False,help=\"\"\"\")@click.pass_context@clean_outputsdef stop(ctx, yes):", "body": "user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))if not yes and not click.confirm(\"\"\"\".format(_experiment)):click.echo('')sys.exit()try:PolyaxonClient().experiment.stop(user, project_name, _experiment)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Stop experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon experiment stop\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 2 stop\n ```", "id": "f1059:m5"} {"signature": "@experiment.command()@click.pass_context@clean_outputsdef unbookmark(ctx):", "body": "user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))try:PolyaxonClient().experiment.unbookmark(user, project_name, _experiment)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Unbookmark experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon experiment unbookmark\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 2 unbookmark\n ```", "id": "f1059:m14"} {"signature": "@experiment.command()@click.option('', '', type=int, help=\"\")@click.option('', '', is_flag=True, help=\"\")@click.option('', '', is_flag=True, default=False,help=\"\")@click.option('', is_flag=True, default=False,help=\"\")@click.pass_context@clean_outputsdef logs(ctx, job, past, follow, hide_time):", "body": "def get_experiment_logs():if past:try:response = PolyaxonClient().experiment.logs(user, project_name, _experiment, stream=False)get_logs_handler(handle_job_info=True,show_timestamp=not hide_time,stream=False)(response.content.decode().split(''))print()if not follow:returnexcept (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:if not follow:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()try:PolyaxonClient().experiment.logs(user,project_name,_experiment,message_handler=get_logs_handler(handle_job_info=True,show_timestamp=not hide_time))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()def get_experiment_job_logs():if past:try:response = PolyaxonClient().experiment_job.logs(user,project_name,_experiment,_job,stream=False)get_logs_handler(handle_job_info=True,show_timestamp=not hide_time,stream=False)(response.content.decode().split(''))print()if not follow:returnexcept (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:if not follow:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()try:PolyaxonClient().experiment_job.logs(user,project_name,_experiment,_job,message_handler=get_logs_handler(handle_job_info=True,show_timestamp=not hide_time))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_job))Printer.print_error(''.format(e))sys.exit()user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))if job:_job = get_experiment_job_or_local(job)get_experiment_job_logs()else:get_experiment_logs()", "docstring": "Get experiment or experiment job logs.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples for getting experiment logs:\n\n \\b\n ```bash\n $ polyaxon experiment logs\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 10 -p mnist logs\n ```\n\n Examples for getting experiment job logs:\n\n \\b\n ```bash\n $ polyaxon experiment -xp 1 -j 1 logs\n ```", "id": "f1059:m11"} {"signature": "@experiment.command()@click.option('', '', is_flag=True, default=False,help=\"\")@click.option('', '', multiple=True, type=click.Path(exists=True),help=\"\")@click.option('', is_flag=True, default=False,help=\"\")@click.pass_context@clean_outputsdef restart(ctx, copy, file, u): ", "body": "config = Noneupdate_code = Noneif file:config = rhea.read(file)if u:ctx.invoke(upload, sync=False)update_code = Trueuser, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))try:if copy:response = PolyaxonClient().experiment.copy(user, project_name, _experiment, config=config, update_code=update_code)Printer.print_success(''.format(response.id))else:response = PolyaxonClient().experiment.restart(user, project_name, _experiment, config=config, update_code=update_code)Printer.print_success(''.format(response.id))except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()", "docstring": "Restart experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon experiment --experiment=1 restart\n ```", "id": "f1059:m6"} {"signature": "@experiment.command()@click.pass_context@clean_outputsdef delete(ctx):", "body": "user, project_name, _experiment = get_project_experiment_or_local(ctx.obj.get(''),ctx.obj.get(''))if not click.confirm(\"\".format(_experiment)):click.echo('')sys.exit()try:response = PolyaxonClient().experiment.delete_experiment(user, project_name, _experiment)ExperimentManager.purge()except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_experiment))Printer.print_error(''.format(e))sys.exit()if response.status_code == :Printer.print_success(\"\".format(_experiment))", "docstring": "Delete experiment.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon experiment delete\n ```", "id": "f1059:m3"} {"signature": "@notebook.command()@click.option('', type=bool,help='')@click.option('', '', is_flag=True, default=False,help='''')@click.pass_context@clean_outputsdef stop(ctx, commit, yes):", "body": "user, project_name = get_project_or_local(ctx.obj.get(''))if not yes and not click.confirm(\"\"\"\".format(user, project_name)):click.echo('')sys.exit()if commit is None:commit = Truetry:PolyaxonClient().project.stop_notebook(user, project_name, commit)Printer.print_success('')except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()", "docstring": "Stops the notebook deployment for this project if it exists.\n\n Uses [Caching](/references/polyaxon-cli/#caching)", "id": "f1060:m4"} {"signature": "@notebook.command()@click.pass_context@clean_outputsdef url(ctx):", "body": "user, project_name = get_project_or_local(ctx.obj.get(''))try:response = PolyaxonClient().project.get_project(user, project_name)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()if response.has_notebook:click.echo(get_notebook_url(user, project_name))else:Printer.print_warning(''.format(project_name))click.echo('')", "docstring": "Prints the notebook url for this project.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon notebook url\n ```", "id": "f1060:m2"} {"signature": "@notebook.command()@click.option('', '', multiple=True, type=click.Path(exists=True),help='')@click.option('', is_flag=True, default=False,help='')@click.pass_context@clean_outputsdef start(ctx, file, u): ", "body": "specification = Nonejob_config = Noneif file:specification = check_polyaxonfile(file, log=False).specificationif u:ctx.invoke(upload, sync=False)if specification:check_polyaxonfile_kind(specification=specification, kind=specification._NOTEBOOK)job_config = specification.parsed_datauser, project_name = get_project_or_local(ctx.obj.get(''))try:response = PolyaxonClient().project.start_notebook(user, project_name, job_config)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()if response.status_code == :Printer.print_header(\"\")click.echo(get_notebook_url(user, project_name))sys.exit()if response.status_code != :Printer.print_error('')sys.exit()Printer.print_success(''.format(project_name))clint.textui.puts(\"\")clint.textui.puts(\"\")with clint.textui.indent():clint.textui.puts(get_notebook_url(user, project_name))", "docstring": "Start a notebook deployment for this project.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon notebook start -f file -f file_override ...\n ```\n\n Example: upload before running\n\n \\b\n ```bash\n $ polyaxon -p user12/mnist notebook start -f file -u\n ```", "id": "f1060:m3"} {"signature": "@project.command()@click.option('', type=int, help='')@click.option('', '', type=str,help='')@click.option('', '', type=str, help='')@click.pass_context@clean_outputsdef tensorboards(ctx, query, sort, page):", "body": "user, project_name = get_project_or_local(ctx.obj.get(''))page = page or try:response = PolyaxonClient().project.list_tensorboards(username=user,project_name=project_name,query=query,sort=sort,page=page)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()meta = get_meta_response(response)if meta:Printer.print_header(''.format(user, project_name))Printer.print_header('')dict_tabulate(meta)else:Printer.print_header(''.format(user,project_name))objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True))for o in response['']]objects = list_dicts_to_tabulate(objects)if objects:Printer.print_header(\"\")objects.pop('', None)dict_tabulate(objects, is_list_dict=True)", "docstring": "List tensorboard jobs for this project.\n\n Uses [Caching](/references/polyaxon-cli/#caching)", "id": "f1061:m11"} {"signature": "@project.command()@click.pass_context@clean_outputsdef unbookmark(ctx):", "body": "user, project_name = get_project_or_local(ctx.obj.get(''))try:PolyaxonClient().project.unbookmark(user, project_name)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(user, project_name))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\".format(user, project_name))", "docstring": "Unbookmark project.\n\n Uses [Caching](/references/polyaxon-cli/#caching)", "id": "f1061:m16"} {"signature": "@project.command()@click.pass_context@clean_outputsdef get(ctx):", "body": "user, project_name = get_project_or_local(ctx.obj.get(''))try:response = PolyaxonClient().project.get_project(user, project_name)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(project_name))Printer.print_error(''.format(e))sys.exit()get_project_details(response)", "docstring": "Get info for current project, by project_name, or user/project_name.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n To get current project:\n\n \\b\n ```bash\n $ polyaxon project get\n ```\n\n To get a project by name\n\n \\b\n ```bash\n $ polyaxon project get user/project\n ```", "id": "f1061:m4"} {"signature": "@project.command()@click.option('', required=True, type=str,help='')@click.option('', type=str, help='')@click.option('', type=str, help='')@click.option('', is_flag=True, help='')@click.option('', is_flag=True, help='')@click.pass_context@clean_outputsdef create(ctx, name, description, tags, private, init):", "body": "try:tags = tags.split('') if tags else Noneproject_dict = dict(name=name, description=description, is_public=not private, tags=tags)project_config = ProjectConfig.from_dict(project_dict)except ValidationError:Printer.print_error('')sys.exit()try:_project = PolyaxonClient().project.create_project(project_config)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(name))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\".format(_project.name))if init:ctx.obj = {}ctx.invoke(init_project, project=name)", "docstring": "Create a new project.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon project create --name=cats-vs-dogs --description=\"Image Classification with DL\"\n ```", "id": "f1061:m2"} {"signature": "@bookmark.command()@click.option('', type=int, help='')@click.pass_context@clean_outputsdef builds(ctx, page):", "body": "user = get_username_or_local(ctx.obj.get(''))page = page or try:response = PolyaxonClient().bookmark.builds(username=user, page=page)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(user))Printer.print_error(''.format(e))sys.exit()meta = get_meta_response(response)if meta:Printer.print_header(''.format(user))Printer.print_header('')dict_tabulate(meta)else:Printer.print_header(''.format(user))objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True))for o in response['']]objects = list_dicts_to_tabulate(objects)if objects:Printer.print_header(\"\")dict_tabulate(objects, is_list_dict=True)", "docstring": "List bookmarked builds for user.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon bookmark builds\n ```\n\n \\b\n ```bash\n $ polyaxon bookmark -u adam builds\n ```", "id": "f1062:m5"} {"signature": "@click.group()@click.option('', '', type=str)@click.pass_context@clean_outputsdef bookmark(ctx, username): ", "body": "ctx.obj = ctx.obj or {}ctx.obj[''] = username", "docstring": "Commands for bookmarks.", "id": "f1062:m0"} {"signature": "@bookmark.command()@click.option('', type=int, help='')@click.pass_context@clean_outputsdef groups(ctx, page):", "body": "user = get_username_or_local(ctx.obj.get(''))page = page or try:response = PolyaxonClient().bookmark.groups(username=user, page=page)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(user))Printer.print_error(''.format(e))sys.exit()meta = get_meta_response(response)if meta:Printer.print_header(''.format(user))Printer.print_header('')dict_tabulate(meta)else:Printer.print_header(''.format(user))objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True))for o in response['']]objects = list_dicts_to_tabulate(objects)if objects:Printer.print_header(\"\")dict_tabulate(objects, is_list_dict=True)", "docstring": "List bookmarked experiment groups for user.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon bookmark groups\n ```\n\n \\b\n ```bash\n $ polyaxon bookmark -u adam groups\n ```", "id": "f1062:m2"} {"signature": "@group.command()@click.pass_context@clean_outputsdef get(ctx):", "body": "user, project_name, _group = get_project_group_or_local(ctx.obj.get(''),ctx.obj.get(''))try:response = PolyaxonClient().experiment_group.get_experiment_group(user, project_name, _group)cache.cache(config_manager=GroupManager, response=response)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_group))Printer.print_error(''.format(e))sys.exit()get_group_details(response)", "docstring": "Get experiment group by uuid.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon group -g 13 get\n ```", "id": "f1064:m2"} {"signature": "@group.command()@click.option('', '', is_flag=True, default=False,help='''')@click.option('', is_flag=True, default=False,help='')@click.pass_context@clean_outputsdef stop(ctx, yes, pending):", "body": "user, project_name, _group = get_project_group_or_local(ctx.obj.get(''),ctx.obj.get(''))if not yes and not click.confirm(\"\"\"\".format(_group)):click.echo('')sys.exit()try:PolyaxonClient().experiment_group.stop(user, project_name, _group, pending=pending)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_group))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Stop experiments in the group.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples: stop only pending experiments\n\n \\b\n ```bash\n $ polyaxon group stop --pending\n ```\n\n Examples: stop all unfinished\n\n \\b\n ```bash\n $ polyaxon group stop\n ```\n\n \\b\n ```bash\n $ polyaxon group -g 2 stop\n ```", "id": "f1064:m6"} {"signature": "@group.command()@click.pass_context@clean_outputsdef unbookmark(ctx):", "body": "user, project_name, _group = get_project_group_or_local(ctx.obj.get(''),ctx.obj.get(''))try:PolyaxonClient().experiment_group.unbookmark(user, project_name, _group)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_group))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")", "docstring": "Unbookmark group.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon group unbookmark\n ```\n\n \\b\n ```bash\n $ polyaxon group -g 2 unbookmark\n ```", "id": "f1064:m9"} {"signature": "@group.command()@click.pass_context@clean_outputsdef delete(ctx):", "body": "user, project_name, _group = get_project_group_or_local(ctx.obj.get(''),ctx.obj.get(''))if not click.confirm(\"\".format(_group)):click.echo('')sys.exit()try:response = PolyaxonClient().experiment_group.delete_experiment_group(user, project_name, _group)GroupManager.purge()except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_group))Printer.print_error(''.format(e))sys.exit()if response.status_code == :Printer.print_success(\"\".format(_group))", "docstring": "Delete experiment group.\n\n Uses [Caching](/references/polyaxon-cli/#caching)", "id": "f1064:m3"} {"signature": "@group.command()@click.option('', type=str,help='')@click.option('', type=str, help='')@click.option('', type=str, help='')@click.pass_context@clean_outputsdef update(ctx, name, description, tags):", "body": "user, project_name, _group = get_project_group_or_local(ctx.obj.get(''),ctx.obj.get(''))update_dict = {}if name:update_dict[''] = nameif description:update_dict[''] = descriptiontags = validate_tags(tags)if tags:update_dict[''] = tagsif not update_dict:Printer.print_warning('')sys.exit()try:response = PolyaxonClient().experiment_group.update_experiment_group(user, project_name, _group, update_dict)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(_group))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\")get_group_details(response)", "docstring": "Update experiment group.\n\n Uses [Caching](/references/polyaxon-cli/#caching)\n\n Example:\n\n \\b\n ```bash\n $ polyaxon group -g 2 update --description=\"new description for this group\"\n ```\n\n \\b\n ```bash\n $ polyaxon update --tags=\"foo, bar\"\n ```", "id": "f1064:m4"} {"signature": "@click.group()@clean_outputsdef superuser():", "body": "", "docstring": "Commands for superuser role management.", "id": "f1065:m0"} {"signature": "@superuser.command()@click.argument('', type=str)@clean_outputsdef grant(username):", "body": "try:PolyaxonClient().user.grant_superuser(username)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(username))Printer.print_error(''.format(e))sys.exit()Printer.print_success(\"\".format(username))", "docstring": "Grant superuser role to a user.\n\n Example:\n\n \\b\n ```bash\n $ polyaxon superuser grant david\n ```", "id": "f1065:m1"} {"signature": "@click.command()@click.option('', '', type=int, help='')@clean_outputsdef cluster(node):", "body": "cluster_client = PolyaxonClient().clusterif node:try:node_config = cluster_client.get_node(node)except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error(''.format(node))Printer.print_error(''.format(e))sys.exit()get_node_info(node_config)else:try:cluster_config = cluster_client.get_cluster()except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:Printer.print_error('')Printer.print_error(''.format(e))sys.exit()get_cluster_info(cluster_config)", "docstring": "Get cluster and nodes info.", "id": "f1066:m2"} {"signature": "def export(defn):", "body": "globals()[defn.__name__] = defn__all__.append(defn.__name__)return defn", "docstring": "Decorator to explicitly mark functions that are exposed in a lib.", "id": "f1072:m0"} {"signature": "def __load_linktype__(link_type):", "body": "try:filep, pathname, description = imp.find_module(link_type, sys.path)link_type_module = imp.load_module(link_type, filep, pathname,description)except ImportError:return Nonefinally:if filep:filep.close()return link_type_module", "docstring": "Given a string for a given module, attempt to load it.", "id": "f1075:m4"} {"signature": "def clookup(ll_type):", "body": "res = __get_ll_type__(ll_type)if res:return res[]else:return res", "docstring": "Given an ll_type, retrieve the linklayer constructor to decode\nthe packets.", "id": "f1075:m3"} {"signature": "def __get_ll_type__(ll_type):", "body": "res = [llt for llt in __LL_TYPES__if llt[] == ll_type]assert len(res) < , ''if res:return res[]else:return None", "docstring": "Given an lltype value, retrieve its definition.", "id": "f1075:m0"} {"signature": "def slookup(ll_type):", "body": "res = __get_ll_type__(ll_type)if res:return res[]else:return res", "docstring": "Given an ll_type, retrieve the short name for the link layer.", "id": "f1075:m2"} {"signature": "def _generate_packets(file_h, header, layers=):", "body": "hdrp = ctypes.pointer(header)while True:pkt = _read_a_packet(file_h, hdrp, layers)if pkt:yield pktelse:break", "docstring": "Read packets one by one from the capture file. Expects the file\nhandle to point to the location immediately after the header (24\nbytes).", "id": "f1076:m6"} {"signature": "def init_capfile(self, layers=):", "body": "self.capfile = savefile.load_savefile(open('', ''),layers=layers)", "docstring": "Initialise capture file.", "id": "f1078:c0:m0"} {"signature": "@classmethoddef setUpClass(cls):", "body": "print('')", "docstring": "Print a start message when loading the test suite.", "id": "f1081:c0:m0"} {"signature": "@classmethoddef setUpClass(cls):", "body": "print('')", "docstring": "Print a start message when loading the test suite.", "id": "f1082:c0:m0"} {"signature": "def init_capfile(self, layers=):", "body": "tfile = create_pcap()self.capfile = savefile.load_savefile(tfile, layers=layers)tfile.close()if os.path.exists(tfile.name):os.unlink(tfile.name)", "docstring": "Initialise the capture file.", "id": "f1083:c0:m0"} {"signature": "def create_pcap():", "body": "tfile = tempfile.NamedTemporaryFile()if sys.version_info[] >= : capture = pickle.loads(base64.b64decode(fixture.TESTPCAP3))else: capture = pickle.loads(fixture.TESTPCAP2.decode(''))with open(tfile.name, '') as f:f.write(capture)return tfile", "docstring": "Create a capture file from the test fixtures.", "id": "f1083:m0"} {"signature": "def setUp(self):", "body": "if not self.capfile:self.init_capfile()", "docstring": "Set up a default capture file.", "id": "f1083:c0:m2"} {"signature": "def parse_ipv4(address):", "body": "raw = struct.pack('', address)octets = struct.unpack('', raw)[::-]ipv4 = b''.join([('' % o).encode('') for o in bytearray(octets)])return ipv4", "docstring": "Given a raw IPv4 address (i.e. as an unsigned integer), return it in\ndotted quad notation.", "id": "f1087:m0"} {"signature": "def payload_type(ethertype):", "body": "if ethertype == :from pcapfile.protocols.network.ip import IPreturn (IP, '')", "docstring": "Returns the appropriate payload constructor based on the supplied\nEtherType.", "id": "f1088:m1"} {"signature": "def strip_llc(self, idx):", "body": "llc = {}snap = llc_dsap = struct.unpack('', self._packet[idx:idx + ])[]llc[''] = llc_dsap >> llc[''] = llc_dsap & idx += llc_ssap = struct.unpack('', self._packet[idx:idx + ])[]llc[''] = llc_ssap >> llc[''] = llc_ssap & idx += if llc_dsap == snap and llc_ssap == snap:llc_control = struct.unpack('', self._packet[idx:idx + ])[]llc[''] = llc_control >> llc[''] = llc_control & idx += llc[''] = self._packet[idx:idx + ]idx += llc[''] = self._packet[idx:idx + ]return , llcelse:return , llc", "docstring": "strip(4 or 8 byte) logical link control headers\n :return: int\n number of processed bytes\n :return: dict\n llc information\n see -> http://www.wildpackets.com/resources/compendium/ethernet/frame_snap_iee8023\n ABBRVS.\n ssap: source service access point\n dsap: destination service access point\n SNAP(Subnetwork Acess Protocol)", "id": "f1089:c4:m4"} {"signature": "def strip_dbm_tx_power(self, idx):", "body": "idx = Radiotap.align(idx, )dbm_tx_power, = struct.unpack_from('', self._rtap, idx)return idx + , dbm_tx_power", "docstring": "strip(1 byte) dbm_tx_power\n :return: int\n idx\n :return: int", "id": "f1089:c1:m15"} {"signature": "def __init__(self, frame, no_rtap=False):", "body": "Wifi.__init__(self, frame, no_rtap)self.tagged_params = []self._raw_tagged_params = Noneself.timestamp = Noneself.interval = Noneself.fixed_capabils = None", "docstring": "Constructor Method\n :frame: ctypes.Structure\n :subtype: int", "id": "f1089:c5:m0"} {"signature": "def strip_rx_flags(self, idx):", "body": "rx_flags = collections.namedtuple('', ['', ''])idx = Radiotap.align(idx, )flags, = struct.unpack_from('', self._rtap, idx)flag_bits = format(flags, '')[::-]rx_flags.reserved = int(flag_bits[])rx_flags.badplcp = int(flag_bits[])return idx + , rx_flags", "docstring": "strip(2 byte) radiotap.rxflags\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m19"} {"signature": "def strip_mac_addrs(self):", "body": "qos_idx, seq_idx = , sa, ta, ra, da, bssid = None, None, None, None, Noneif self.to_ds == and self.from_ds == :(ra, ta, da) = struct.unpack('', self._packet[:])sa = struct.unpack('', self._packet[:])[]qos_idx = seq_idx = elif self.to_ds == and self.from_ds == :(ra, ta, sa) = struct.unpack('', self._packet[:])qos_idx = seq_idx = elif self.to_ds == and self.from_ds == :(ra, ta, da) = struct.unpack('', self._packet[:])qos_idx = seq_idx = elif self.to_ds == and self.from_ds == :(ra, ta, bssid) = struct.unpack('', self._packet[:])qos_idx = seq_idx = if ta is not None:ta = Wifi.get_mac_addr(ta)if ra is not None:ra = Wifi.get_mac_addr(ra)if sa is not None:sa = Wifi.get_mac_addr(sa)if da is not None:da = Wifi.get_mac_addr(da)if bssid is not None:bssid = Wifi.get_mac_addr(bssid)return seq_idx, qos_idx, sa, ta, ra, da, bssid", "docstring": "strip mac address(each 6 byte) information.\n (wlan.ta, wlan.ra, wlan.sa, wlan.da)\n (transmitter, receiver, source, destination)\n :return: int\n index of sequence control\n :return: int\n index after mac addresses\n :return: str\n source address (sa)\n :return: str\n transmitter address (ta)\n :return: str\n receiver address (ra)\n :return: str\n destination address (da)\n :return: str\n basic service sed identifier (bssid)", "id": "f1089:c2:m4"} {"signature": "def __init__(self, rtap_bytes):", "body": "super(Radiotap, self).__init__()self._raw = {} self._bits = {} idx = self._rtap = rtap_bytesself.vers = Radiotap.strip_vers(self._rtap[idx:idx + ])idx += self.pad = Radiotap.strip_pad(self._rtap[idx:idx + ])idx += self.len = Radiotap.strip_len(self._rtap[idx:idx + ])idx += self.present, self.present_bits = Radiotap.strip_present(self._rtap[idx:idx + ])idx += if self.present.tsft: idx, self.mactime = self.strip_tsft(idx)if self.present.flags: idx, self.flags = self.strip_flags(idx)if self.present.rate: idx, self.rate = self.strip_rate(idx)if self.present.channel: idx, self.chan = self.strip_chan(idx)if self.present.fhss: idx, self.fhss = self.strip_fhss(idx)if self.present.dbm_antsignal: idx, self.dbm_antsignal = self.strip_dbm_antsignal(idx)if self.present.dbm_antnoise: idx, self.dbm_antnoise = self.strip_dbm_antnoise(idx)if self.present.lock_quality: idx, self.lock_quality = self.strip_lock_quality(idx)if self.present.tx_attenuation: idx, self.tx_attenuation = self.strip_tx_attenuation(idx)if self.present.db_tx_attenuation: idx, self.db_tx_attenuation = self.strip_db_tx_attenuation(idx)if self.present.dbm_tx_power: idx, self.dbm_tx_power = self.strip_dbm_tx_power(idx)if self.present.antenna: idx, self.antenna = self.strip_antenna(idx)if self.present.db_antsignal: idx, self.db_antsignal = self.strip_db_antsignal(idx)if self.present.db_antnoise: idx, self.db_antnoise = self.strip_db_antnoise(idx)if self.present.rxflags: idx, self.rxflags = self.strip_rx_flags(idx)if self.present.txflags: idx, self.txflags = self.strip_tx_flags(idx)if self.present.rts_retries: idx, self.rts_retries = self.strip_rts_retries(idx)if self.present.data_retries: idx, self.data_retries = self.strip_data_retries(idx)if self.present.xchannel: idx, self.xchannel = self.strip_xchannel(idx)if self.present.mcs: idx, self.mcs = self.strip_mcs(idx)if self.present.ampdu: idx, self.ampdu = self.strip_ampdu(idx)if self.present.vht: idx, self.vht = self.strip_vht(idx)self.prot_type = self.extract_protocol()", "docstring": "Constructor method.\n :rtap_bytes: ctypes.Structure", "id": "f1089:c1:m0"} {"signature": "@staticmethoddef strip_cntrl(payload):", "body": "cntrl = struct.unpack('', payload)[]cntrl_bits = format(cntrl, '')[::-]ackpolicy = int(cntrl_bits[])multitid = int(cntrl_bits[])return ackpolicy, multitid", "docstring": "strip(2 byte) wlan.ba.control\n :payload: ctypes.structure\n :return: int\n multitid (tid: traffic indicator)\n :return: int\n ackpolicy", "id": "f1089:c12:m2"} {"signature": "def strip_rts_retries(self, idx):", "body": "rts_retries, = struct.unpack_from('', self._rtap, idx)return idx + , rts_retries", "docstring": "strip(1 byte) rts_retries\n :idx: int\n :return: int\n idx\n :return: int", "id": "f1089:c1:m21"} {"signature": "def __init__(self, frame, no_rtap=False):", "body": "Control.__init__(self, frame, no_rtap)(ra_mac, ta_mac) = struct.unpack('', self._packet[:])self.ra = self.ta = Noneself.ackpolicy = self.multitid = Noneself.ssc_frag = self.ssc_seq = Noneself.bitmap_str = Noneself.acked_seqs = []self.ra = Wifi.get_mac_addr(ra_mac)self.ta = Wifi.get_mac_addr(ta_mac)idx = payload = self._packet[idx:idx + ]self.ackpolicy, self.multitid = BACK.strip_cntrl(payload)idx += payload = self._packet[idx:idx + ]self.ssc_seq, self.ssc_frag = BACK.strip_ssc(payload)idx += payload = self._packet[idx:idx + ]self.bitmap_str = BACK.strip_bitmap_str(payload)idx += self.acked_seqs = BACK.extract_acked_seqs(self.bitmap_str, self.ssc_seq)", "docstring": "Constructor method.\n :frame: ctypes.Structure", "id": "f1089:c12:m0"} {"signature": "def strip_ccmp(self, idx):", "body": "ccmp_extiv = Noneif len(self._packet[idx:]) >= :raw_bytes = self._packet[idx:idx + ]ccmp_extiv, = struct.unpack_from('', raw_bytes, )return , ccmp_extiv", "docstring": "strip(8 byte) wlan.ccmp.extiv\n CCMP Extended Initialization Vector\n :return: int\n number of processed bytes\n :return: ctypes.raw\n ccmp vector", "id": "f1089:c4:m2"} {"signature": "def strip_xchannel(self, idx):", "body": "xchannel = collections.namedtuple('', ['', '', '', ''])flags = collections.namedtuple('', ['', '', '', '', '', '','', '', '', '', '', '','', '', ''])idx = Radiotap.align(idx, )flag_val, freq, channel, max_power = struct.unpack_from('', self._rtap, idx)xchannel.freq = freqxchannel.channel = channelxchannel.max_power = max_powerbits = format(flag_val, '')[::-]flags.turbo = int(bits[])flags.cck = int(bits[])flags.ofdm = int(bits[])flags.two_g = int(bits[])flags.five_g = int(bits[])flags.passive = int(bits[])flags.dynamic = int(bits[])flags.gfsk = int(bits[])flags.gsm = int(bits[])flags.sturbo = int(bits[])flags.half = int(bits[])flags.quarter = int(bits[])flags.ht_20 = int(bits[])flags.ht_40u = int(bits[])flags.ht_40d = int(bits[])xchannel.flags = flagsreturn idx + , xchannel", "docstring": "strip(7 bytes) radiotap.xchannel.channel(1 byte),\n radiotap.xchannel.freq(2 bytes) and radiotap.xchannel.flags(4 bytes)\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m23"} {"signature": "def strip_flags(self, idx):", "body": "flags = collections.namedtuple('', ['', '', '', '', '','', '', ''])val, = struct.unpack_from('', self._rtap, idx)bits = format(val, '')[::-]flags.cfp = int(bits[])flags.preamble = int(bits[])flags.wep = int(bits[])flags.fragmentation = int(bits[])flags.fcs = int(bits[])flags.datapad = int(bits[])flags.badfcs = int(bits[])flags.shortgi = int(bits[])return idx + , flags", "docstring": "strip(1 byte) radiotap.flags\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m6"} {"signature": "@staticmethoddef align(val, align):", "body": "return (val + align - ) & ~(align - )", "docstring": ":val: int\n:align: int\n:return: int", "id": "f1089:c1:m28"} {"signature": "@staticmethoddef strip_present(payload):", "body": "present = collections.namedtuple('', ['', '', '', '', '','', '', '','', '', '','', '', '', '','', '', '', '','', '', '', '', '', ''])val = struct.unpack('', payload)[]bits = format(val, '')[::-]present.tsft = int(bits[]) present.flags = int(bits[]) present.rate = int(bits[]) present.channel = int(bits[]) present.fhss = int(bits[]) present.dbm_antsignal = int(bits[]) present.dbm_antnoise = int(bits[]) present.lock_quality = int(bits[]) present.tx_attenuation = int(bits[]) present.db_tx_attenuation = int(bits[]) present.dbm_tx_power = int(bits[]) present.antenna = int(bits[]) present.db_antsignal = int(bits[]) present.db_antnoise = int(bits[]) present.rxflags = int(bits[]) present.txflags = int(bits[]) present.rts_retries = int(bits[]) present.data_retries = int(bits[]) present.xchannel = int(bits[]) present.mcs = int(bits[]) present.ampdu = int(bits[]) present.vht = int(bits[]) present.rtap_ns = int(bits[]) present.ven_ns = int(bits[]) present.ext = int(bits[]) return present, bits", "docstring": "strip(4 byte) radiotap.present. Those are flags that\n identify existence of incoming radiotap meta-data.\n :idx: int\n :return: str\n :return: namedtuple", "id": "f1089:c1:m4"} {"signature": "def __init__(self, frame, no_rtap=False):", "body": "Management.__init__(self, frame, no_rtap)idx = self.timestamp = self.interval = Noneself.ta = self.ra = self.bssid = Noneself.seq_num = self.frag_num = Noneself.ess = self.ibss = Noneself.privacy = Noneself.priv = self.short_preamble = self.pbcc = self.chan_agility = Noneself.spec_man = self.short_slot = self.apsd = self.radio_meas = Noneself.dss_ofdm = self.del_back = self.imm_back = Noneseq_idx, _, _, self.ta, self.ra, _, self.bssid = self.strip_mac_addrs()idx = seq_idxself.seq_num, self.frag_num = self.strip_seq_cntrl(idx)idx += payload = self._packet[idx:idx + ]timestamp, interval, fixed_capabils = self.strip_fixed_params(payload)if all([timestamp, interval, fixed_capabils]):self.timestamp = timestampself.interval = intervalself.set_fixed_capabils(fixed_capabils)idx += else:logging.warning(\"\")returnif idx < len(self._packet):self._raw_tagged_params = self._packet[idx:]is_out_bound, tagged_params = self.parse_tagged_params(self._raw_tagged_params)if len(tagged_params):self.tagged_params = tagged_paramsif is_out_bound:logging.warning(\"\")", "docstring": "Constructor method.\n :frame: ctypes.Structure", "id": "f1089:c8:m0"} {"signature": "def get_vendor_ies(self, mac_block=None, oui_type=None):", "body": "vendor_ies = []if mac_block is not None:if Management.is_valid_mac_oui(mac_block):mac_block = mac_block.upper()if '' in mac_block:mac_block.replace('', '')else:logging.warning(\"\")return Nonefor elem in self.tagged_params:tag_num = elem['']if MNGMT_TAGS[tag_num] == '':if mac_block is None:vendor_ies.append(elem)elif elem[''][''] == mac_block.encode(''):if oui_type is None:vendor_ies.append(elem)elif elem[''][''] == oui_type:vendor_ies.append(elem)return vendor_ies", "docstring": "vendor information element querying\n :mac_block: str\n first 3 bytes of mac addresses in format of\n 00-11-22 or 00:11:22 or 001122\n :oui_type: int\n vendors ie type\n :return: int\n is valid mac_block format\n -1 is unknown\n :return: dict[]\n list of oui information elements\n -1 on error (invalid v", "id": "f1089:c5:m10"} {"signature": "def strip_msdu(self, idx):", "body": "padding = len_payload = msdu = {'': {},'': None,'': None,'': None,'': }(da_mac, sa_mac) = struct.unpack('', self._packet[idx:idx + ])msdu[''] = Wifi.get_mac_addr(da_mac)msdu[''] = Wifi.get_mac_addr(sa_mac)idx += msdu[''] = struct.unpack('', self._packet[idx:idx + ])[]idx += offset, msdu[''] = self.strip_llc(idx)idx += offsetlen_payload = msdu[''] - offsetmsdu[''] = self._packet[idx:idx + len_payload]padding = - (len_payload % )return msdu, msdu[''] + padding + ", "docstring": "strip single mac servis data unit(msdu)\n see -> https://mrncciew.com/2014/11/01/cwap-802-11-data-frame-aggregation/\n :idx: int\n :return: dict\n msdu\n :return: int\n number of processed bytes", "id": "f1089:c4:m3"} {"signature": "def strip_fhss(self, idx):", "body": "fhss = collections.namedtuple('', ['', ''])fhss.hopset, fhss.pattern, = struct.unpack_from('', self._rtap, idx)return idx + , fhss", "docstring": "strip (2 byte) radiotap.fhss.hopset(1 byte) and\n radiotap.fhss.pattern(1 byte)\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m9"} {"signature": "def strip_tx_attenuation(self, idx):", "body": "idx = Radiotap.align(idx, )tx_attenuation, = struct.unpack_from('', self._rtap, idx)return idx + , tx_attenuation", "docstring": "strip(1 byte) tx_attenuation\n :idx: int\n :return: int\n idx\n :return: int", "id": "f1089:c1:m13"} {"signature": "@staticmethoddef get_interval(payload):", "body": "if len(payload) != :return Noneinterval = struct.unpack('', payload)[]return interval", "docstring": "strip wlan_mgt.fixed.beacoN(2 bytes)\n beacon interval\n :payload: ctypes.structure\n :return: int\n None on error", "id": "f1089:c5:m6"} {"signature": "def extract_protocol(self):", "body": "if self.present.mcs:return ''if self.present.vht:return ''if self.present.channel and hasattr(self, ''):if self.chan.five_g:if self.chan.ofdm:return ''elif self.chan.two_g:if self.chan.cck:return ''elif self.chan.ofdm or self.chan.dynamic:return ''return ''", "docstring": "extract 802.11 protocol from radiotap.channel.flags\n :return: str\n protocol name\n one of below in success\n [.11a, .11b, .11g, .11n, .11ac]\n None in fail", "id": "f1089:c1:m27"} {"signature": "@staticmethoddef extract_acked_seqs(bitmap, ssc_seq):", "body": "acked_seqs = []for idx, val in enumerate(bitmap):if int(val) == :seq = (ssc_seq + idx) % acked_seqs.append(seq)return acked_seqs", "docstring": "extracts acknowledged sequences from bitmap and\n starting sequence number.\n :bitmap: str\n :ssc_seq: int\n :return: int[]\n acknowledged sequence numbers", "id": "f1089:c12:m5"} {"signature": "def strip_data_retries(self, idx):", "body": "data_retries, = struct.unpack_from('', self._rtap, idx)return idx + , data_retries", "docstring": "strip(1 byte) data_retries\n :idx: int\n :return: int\n idx\n :return: int", "id": "f1089:c1:m22"} {"signature": "def __init__(self, frame, no_rtap=False):", "body": "Wifi.__init__(self, frame, no_rtap)", "docstring": "Constructor method.\n :packet: ctypes.Structure\n :no_rtap: Bool\n shall parse radiotap headers", "id": "f1089:c3:m0"} {"signature": "def strip_db_tx_attenuation(self, idx):", "body": "idx = Radiotap.align(idx, )db_tx_attenuation, = struct.unpack_from('', self._rtap, idx)return idx + , db_tx_attenuation", "docstring": "strip(1 byte) db_tx_attenuation\n :return: int\n idx\n :return: int", "id": "f1089:c1:m14"} {"signature": "def strip_qos_cntrl(self, idx, prot_type):", "body": "qos_cntrl, = struct.unpack('', self._packet[idx:idx + ])qos_cntrl_bits = format(qos_cntrl, '')[::-]qos_pri = qos_cntrl & qos_bit = int(qos_cntrl_bits[])qos_ack = int(qos_cntrl_bits[:], )amsdupresent = if prot_type == '':amsdupresent = int(qos_cntrl_bits[])return , qos_pri, qos_bit, qos_ack, amsdupresent", "docstring": "strip(2 byte) wlan.qos\n :idx: int\n :prot_type: string\n 802.11 protocol type(.11ac, .11a, .11n, etc)\n :return: int\n number of processed bytes\n :return: int\n qos priority\n :return: int\n qos bit\n :return: int\n qos acknowledgement\n :return: int\n amsdupresent(aggregated mac service data unit)", "id": "f1089:c4:m1"} {"signature": "def WIFI(frame, no_rtap=False):", "body": "pack = Nonetry:pack = WiHelper.get_wifi_packet(frame, no_rtap)except Exception as e:logging.exception(e)return pack", "docstring": "calls wifi packet discriminator and constructor.\n :frame: ctypes.Structure\n :no_rtap: Bool\n :return: packet object in success\n :return: int\n -1 on known error\n :return: int\n -2 on unknown error", "id": "f1089:m0"} {"signature": "def strip_vht(self, idx):", "body": "vht = collections.namedtuple('', ['', '', '', '','', '', '','', '', '', '', '', '','', '', '', '','', '', '', '', ''])user = collections.namedtuple('', ['', '', ''])idx = Radiotap.align(idx, )known, flags, bw = struct.unpack_from('', self._rtap, idx)mcs_nss_0, mcs_nss_1, mcs_nss_2, mcs_nss_3 = struct.unpack_from('', self._rtap, idx + )coding, group_id, partial_id = struct.unpack_from('', self._rtap, idx + )known_bits = format(known, '')[::-]vht.known_bits = known_bitsvht.have_stbc = int(known_bits[]) vht.have_txop_ps = int(known_bits[]) vht.have_gi = int(known_bits[]) vht.have_sgi_nsym_da = int(known_bits[]) vht.have_ldpc_extra = int(known_bits[]) vht.have_beamformed = int(known_bits[]) vht.have_bw = int(known_bits[]) vht.have_gid = int(known_bits[]) vht.have_paid = int(known_bits[]) flag_bits = format(flags, '')[::-]vht.flag_bits = flag_bitsvht.stbc = int(flag_bits[])vht.txop_ps = int(flag_bits[])vht.gi = int(flag_bits[])vht.sgi_nysm_da = int(flag_bits[])vht.ldpc_extra = int(flag_bits[])vht.beamformed = int(flag_bits[])vht.group_id = group_idvht.partial_id = partial_idvht.bw = bwvht.user_0 = user(None, None, None)vht.user_1 = user(None, None, None)vht.user_2 = user(None, None, None)vht.user_3 = user(None, None, None)for (i, mcs_nss) in enumerate([mcs_nss_0, mcs_nss_1, mcs_nss_2, mcs_nss_3]):if mcs_nss:nss = mcs_nss & >> mcs = (mcs_nss & ) >> coding = (coding & **i) >> iif i == :vht.user_0 = user(nss, mcs, coding)elif i == :vht.user_1 = user(nss, mcs, coding)elif i == :vht.user_2 = user(nss, mcs, coding)elif i == :vht.user_3 = user(nss, mcs, coding)return idx + , vht", "docstring": "strip(12 byte) radiotap.vht\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m26"} {"signature": "def __init__(self, frame, no_rtap=False):", "body": "Control.__init__(self, frame, no_rtap)ra_mac = struct.unpack('', self._packet[:])[]self.ra = Wifi.get_mac_addr(ra_mac)", "docstring": "Constructor method.\n :frame: ctypes.Structure", "id": "f1089:c11:m0"} {"signature": "def set_fixed_capabils(self, capabils):", "body": "self.ess = capabils['']self.ibss = capabils['']self.priv = capabils['']self.short_preamble = capabils['']self.pbcc = capabils['']self.chan_agility = capabils['']self.spec_man = capabils['']self.short_slot = capabils['']self.apsd = capabils['']self.radio_meas = capabils['']self.dss_ofdm = capabils['']self.del_back = capabils['']self.imm_back = capabils['']", "docstring": "set keys of capabils into fields of object\n :capabils: dict", "id": "f1089:c5:m9"} {"signature": "def __init__(self, frame, no_rtap=False, parse_amsdu=True):", "body": "Data.__init__(self, frame, no_rtap)idx = self.sa = self.ta = self.ra = self.da = Noneself.seq_num = self.frag_num = Noneself.qos_pri = self.qos_bit = self.qos_ack = Noneself.ccmp_extiv = Noneself.payload = []seq_idx, qos_idx, self.sa, self.ta, self.ra, self.da, _ = self.strip_mac_addrs()self.seq_num, self.frag_num = self.strip_seq_cntrl(seq_idx)idx = qos_idxincr, self.qos_pri, self.qos_bit, self.qos_ack, self.amsdupresent =self.strip_qos_cntrl(idx, self.radiotap.prot_type)idx += incrif self.wep == :incr, self.ccmp_extiv = self.strip_ccmp(idx)idx += incrif parse_amsdu:if self.amsdupresent != and self.wep == :while idx < len(self._packet):msdu, offset = self.strip_msdu(idx)self.payload.append(msdu)idx += offsetelse:if self.wep == :msdu = {}offset, llc = self.strip_llc(idx)msdu[''] = llcmsdu[''] = self._packet[idx + offset:]self.payload.append(msdu)else:self.payload.append({'': self._packet[idx:]})", "docstring": "Constructor method.\n :frame: ctypes.Structure\n :parse_amsdu: Bool\n shall parse aggregated mac service data unit", "id": "f1089:c4:m0"} {"signature": "@staticmethoddef strip_len(payload):", "body": "return struct.unpack('', payload)[]", "docstring": "strip(2 byte) radiotap.length\n :payload: ctypes.Structure\n :return: int", "id": "f1089:c1:m3"} {"signature": "def strip_mcs(self, idx):", "body": "mcs = collections.namedtuple('', ['', '', '', '', '','', '', '', '',''])idx = Radiotap.align(idx, )known, flags, index = struct.unpack_from('', self._rtap, idx)bits = format(flags, '')[::-]mcs.known = known mcs.index = index mcs.have_bw = int(bits[]) mcs.have_mcs = int(bits[]) mcs.have_gi = int(bits[]) mcs.have_format = int(bits[]) mcs.have_fec = int(bits[]) mcs.have_stbc = int(bits[]) mcs.have_ness = int(bits[]) mcs.ness_bit1 = int(bits[]) return idx + , mcs", "docstring": "strip(3 byte) radiotap.mcs which contains 802.11n bandwidth,\n mcs(modulation and coding scheme) and stbc(space time block coding)\n information.\n :idx: int\n :return: int\n idx\n :return: collections.namedtuple", "id": "f1089:c1:m24"} {"signature": "@staticmethoddef get_mac_addr(mac_addr):", "body": "mac_addr = bytearray(mac_addr)mac = b''.join([('' % o).encode('') for o in mac_addr])return mac", "docstring": "converts bytes to mac addr format\n :mac_addr: ctypes.structure\n :return: str\n mac addr in format\n 11:22:33:aa:bb:cc", "id": "f1089:c2:m2"} {"signature": "def print_loading(self, wait, message):", "body": "tags = ['', '', '', '']for i in range(wait):time.sleep()sys.stdout.write(\"\" % {'': message,'': tags[i % ]})sys.stdout.flush()passsys.stdout.write(\"\" % message)sys.stdout.flush()pass", "docstring": "print loading message on screen\n\n.. note::\n\n loading message only write to `sys.stdout`\n\n\n:param int wait: seconds to wait\n:param str message: message to print\n:return: None", "id": "f1105:c0:m2"} {"signature": "def warn(self, *args, **kwargs):", "body": "return self.warn_message(*args, **kwargs)", "docstring": "alias for `warn_message`", "id": "f1105:c0:m4"} {"signature": "def warn_message(self, message, fh=None, prefix=\"\", suffix=\"\"):", "body": "msg = prefix + message + suffixfh = fh or sys.stdoutif fh is sys.stdout:termcolor.cprint(msg, color=\"\")else:fh.write(msg)pass", "docstring": "print warn type message,\nif file handle is `sys.stdout`, print color message\n\n\n:param str message: message to print\n:param file fh: file handle,default is `sys.stdout`\n:param str prefix: message prefix,default is `[warn]`\n:param str suffix: message suffix ,default is `...`\n:return: None", "id": "f1105:c0:m3"} {"signature": "def print_message(self, message, fh=None):", "body": "return self.parser._print_message(message + \"\", fh)", "docstring": "print message on screen\n\n:param str message:\n:param file fh: file handle,default is None\n:return: None", "id": "f1105:c0:m1"} {"signature": "@classmethoddef add_arguments(cls):", "body": "return [(('',), dict(help='')),(('',), dict(nargs='', default='', help='')),(('',),dict(action='', help='')),(('',),dict(action='', help='')),]pass", "docstring": "Create project.\n\nBy default cliez find github first,\nif not found,then try to search bitbucket\n\nif user define `--local` option. search local path first.\n\nif user define `--bitbucket`, search bitbucket first,\nthen search github.", "id": "f1106:c0:m1"} {"signature": "def render(self, match_string, new_string):", "body": "current_dir = self.options.dirif os.path.expanduser(current_dir) in ['', os.path.expanduser(\"\")]:self.error(\"\", -)passdef match_directory(path):\"\"\"\"\"\"skip = Falsefor include_dir in ['' % s for s inself.exclude_directories]:if path.find(include_dir) > -:skip = Truebreakpassreturn skipfor v in os.walk(current_dir):if os.path.basename(v[]) in self.exclude_directories:continueif match_directory(v[]):continuefor base_name in v[]:file_name = os.path.join(v[], base_name)try:with open(file_name, '') as fh:buffer = fh.read()buffer = buffer.replace(match_string, new_string)passwith open(file_name, '') as fh:fh.write(buffer)passexcept UnicodeDecodeError:continuepasspassredo_directories = []redo_files = []for v in os.walk(current_dir):if os.path.basename(v[]) in self.exclude_directories:continueif match_directory(v[]):continuefor sub_dir in v[]:if match_string in sub_dir:redo_directories.append(os.path.join(v[], sub_dir))passfor f in v[]:if match_string in f:redo_files.append(os.path.join(v[], f))passpassredo_directories.reverse()redo_files.reverse()for v in redo_files:dir_name = os.path.dirname(v)file_name = os.path.basename(v)shutil.move(v, os.path.join(dir_name,file_name.replace(match_string, new_string)))passfor v in redo_directories:shutil.move(v, v.replace(match_string, new_string))passpass", "docstring": "render template string to user string\n:param str match_string: template string,syntax: '___VAR___'\n:param str new_string: user string\n:return:", "id": "f1107:c0:m1"} {"signature": "def run(self, options):", "body": "self.logger.debug(\"\")depends = ['']nil_tools = []self.logger.info(\"\", depends)for v in depends:real_path = shutil.which(v)if real_path:self.print_message(\"\"\"\".format(v,real_path,termcolor.colored('',color='')))else:nil_tools.append(v)self.error_message(''.format(v, ''), prefix='',suffix='')passif nil_tools:self.print_message('')self.error(\"\")else:self.print_message(\"\"\"\")self.logger.debug(\"\")passpass", "docstring": ".. todo::\n\n check network connection\n\n:param Namespace options: parse result from argparse\n:return:", "id": "f1108:c0:m0"} {"signature": "def hump_to_underscore(name):", "body": "new_name = ''pos = for c in name:if pos == :new_name = c.lower()elif <= ord(c) <= :new_name += '' + c.lower()passelse:new_name += cpos += passreturn new_name", "docstring": "Convert Hump style to underscore\n\n:param name: Hump Character\n:return: str", "id": "f1110:m1"} {"signature": "def include_file(filename, global_vars=None, local_vars=None):", "body": "if global_vars is None:global_vars = sys._getframe().f_globalsif local_vars is None:local_vars = sys._getframe().f_localswith open(filename, '') as f:code = compile(f.read(), os.path.basename(filename), '')exec(code, global_vars, local_vars)pass", "docstring": ".. deprecated 2.1::\n Don't use this any more.\n\n It's not pythonic.\n\n\ninclude file like php include.\n\ninclude is very useful when we need to split large config file", "id": "f1110:m0"} {"signature": "def settings(path=None, with_path=None):", "body": "if path:Settings.bind(path, with_path=with_path)return Settings._wrapped", "docstring": "Get or set `Settings._wrapped`\n\n:param str path: a python module file,\n if user set it,write config to `Settings._wrapped`\n:param str with_path: search path\n:return: A instance of `Settings`", "id": "f1111:m0"} {"signature": "def check_exclusive_mode(self):", "body": "if self.options.exclusive_mode:import psutilcurrent_pid = os.getpid()current = psutil.Process(current_pid).cmdline()for pid in psutil.pids():p = psutil.Process(pid)try:if current_pid != pid and current == p.cmdline():self.error_message(\"\".format(p.pid))sys.exit(-)passexcept psutil.ZombieProcess:passexcept psutil.AccessDenied:passpasspasspass", "docstring": "\u68c0\u67e5\u662f\u5426\u662f\u72ec\u5360\u6a21\u5f0f\n\n\u53c2\u6570\u987a\u5e8f\u5fc5\u987b\u4e00\u81f4,\u4e5f\u5c31\u662f\u8bf4\u5982\u679c\u53c2\u6570\u987a\u5e8f\u4e0d\u4e00\u81f4,\u5219\u5224\u5b9a\u4e3a\u662f\u4e24\u4e2a\u4e0d\u540c\u7684\u8fdb\u7a0b\n\u8fd9\u4e48\u8bbe\u8ba1\u662f\u8003\u8651\u5230:\n\n- \u4e00\u822c\u800c\u8a00,\u6392\u4ed6\u6a21\u5f0f\u7684\u670d\u52a1\u542f\u52a8\u90fd\u662fcrontab\u7b49\u811a\u672c\u6765\u5b8c\u6210\u7684,\u4e0d\u5b58\u5728\u987a\u5e8f\u53d8\u66f4\u7684\u53ef\u80fd\n- \u8fd9\u5728\u8c03\u8bd5\u7684\u65f6\u5019\u53ef\u4ee5\u5e2e\u52a9\u6211\u4eec\u4e0d\u9700\u8981\u7ed3\u675f\u539f\u6709\u8fdb\u7a0b\u5c31\u53ef\u4ee5\u7ee7\u7eed\u8c03\u8bd5\n\n:return:", "id": "f1112:c0:m1"} {"signature": "def run(self, options):", "body": "self.set_signal()self.check_exclusive_mode()slot = self.Handle(self)i = while i < options.threads:t = threading.Thread(target=self.worker, args=[slot])if options.once is True or options.no_daemon is True:t.daemon = Falseelse:t.daemon = Truet.start()i += if options.once is False:while True:if threading.active_count() > :sleep()else:if threading.current_thread().name == \"\":sys.exit()pass", "docstring": "In general, you don't need to overwrite this method.\n\n:param options:\n:return:", "id": "f1112:c0:m2"} {"signature": "def message_mentions_me(self, data):", "body": "return (data.get('') == '' andself.full_name in data.get('', ''))", "docstring": "If you send a message that mentions me", "id": "f1114:c0:m3"} {"signature": "@staticmethoddef _unpack_message(msg):", "body": "return json.loads(msg.data)", "docstring": "Unpack the data from the message.\n\n Arguments:\n msg (:py:class:`aiohttp.websocket.Message`): The message to\n unpack.\n\n Returns:\n :py:class:`dict`: The loaded data.\n\n Raises:\n :py:class:`AttributeError`: If there is no data attribute.\n :py:class:`json.JSONDecodeError`: If the data isn't valid\n JSON.", "id": "f1114:c0:m10"} {"signature": "@classmethoddef _validate_first_message(cls, msg):", "body": "data = cls._unpack_message(msg)logger.debug(data)if data != cls.RTM_HANDSHAKE:raise SlackApiError(''.format(data))logger.info('')", "docstring": "Check the first message matches the expected handshake.\n\n Note:\n The handshake is provided as :py:attr:`RTM_HANDSHAKE`.\n\n Arguments:\n msg (:py:class:`aiohttp.Message`): The message to validate.\n\n Raises:\n :py:class:`SlackApiError`: If the data doesn't match the\n expected handshake.", "id": "f1114:c0:m11"} {"signature": "def message_is_to_me(self, data):", "body": "return (data.get('') == '' anddata.get('', '').startswith(self.address_as))", "docstring": "If you send a message directly to me", "id": "f1114:c0:m4"} {"signature": "def matches(self, data):", "body": "self.text = data.get('')return True", "docstring": "Whether the handler should handle the current message.\n\n Args:\n data: The data representing the current message.\n\n Returns:\n :py:class:`bool`: Whether it should be handled.", "id": "f1116:c0:m4"} {"signature": "async def execute_method(self, method, **params):", "body": "url = self.url_builder(method, url_params=params)logger.info('', method)response = await aiohttp.get(url)logger.info('', response.status)if response.status == :json = await response.json()logger.debug('', json)if json.get(''):return jsonraise SlackApiError(json[''])else:raise_for_status(response)", "docstring": "Execute a specified Slack Web API method.\n\n Arguments:\n method (:py:class:`str`): The name of the method.\n **params (:py:class:`dict`): Any additional parameters\n required.\n\n Returns:\n :py:class:`dict`: The JSON data from the response.\n\n Raises:\n :py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP\n request returns a code other than 200 (OK).\n SlackApiError: If the Slack API is reached but the response\n contains an error message.", "id": "f1117:c1:m0"} {"signature": "@classmethoddef method_exists(cls, method):", "body": "methods = cls.API_METHODSfor key in method.split(''):methods = methods.get(key)if methods is None:breakif isinstance(methods, str):logger.debug('', method, methods)return Truereturn False", "docstring": "Whether a given method exists in the known API.\n\n Arguments:\n method (:py:class:`str`): The name of the method.\n\n Returns:\n :py:class:`bool`: Whether the method is in the known API.", "id": "f1117:c1:m1"} {"signature": "def url_builder(self, endpoint, *, root=None, params=None, url_params=None):", "body": "if root is None:root = self.ROOTscheme, netloc, path, _, _ = urlsplit(root)return urlunsplit((scheme,netloc,urljoin(path, endpoint),urlencode(url_params or {}),'',)).format(**params or {})", "docstring": "Create a URL for the specified endpoint.\n\n Arguments:\n endpoint (:py:class:`str`): The API endpoint to access.\n root: (:py:class:`str`, optional): The root URL for the\n service API.\n params: (:py:class:`dict`, optional): The values for format\n into the created URL (defaults to ``None``).\n url_params: (:py:class:`dict`, optional): Parameters to add\n to the end of the URL (defaults to ``None``).\n\n Returns:\n :py:class:`str`: The resulting URL.", "id": "f1119:c0:m2"} {"signature": "@propertydef headers(self):", "body": "return {}", "docstring": "Get the headers for the service requests.\n\n Returns:\n :py:class:`dict`: The header mapping.", "id": "f1119:c0:m1"} {"signature": "def truncate(text, max_len=, end=''):", "body": "if len(text) <= max_len:return textreturn text[:max_len].rsplit('', maxsplit=)[] + end", "docstring": "Truncate the supplied text for display.\n\n Arguments:\n text (:py:class:`str`): The text to truncate.\n max_len (:py:class:`int`, optional): The maximum length of the\n text before truncation (defaults to 350 characters).\n end (:py:class:`str`, optional): The ending to use to show that\n the text was truncated (defaults to ``'...'``).\n\n Returns:\n :py:class:`str`: The truncated text.", "id": "f1120:m1"} {"signature": "def raise_for_status(response):", "body": "for err_name in web_exceptions.__all__:err = getattr(web_exceptions, err_name)if err.status_code == response.status:payload = dict(headers=response.headers,reason=response.reason,)if issubclass(err, web_exceptions._HTTPMove): raise err(response.headers[''], **payload)raise err(**payload)", "docstring": "Raise an appropriate error for a given response.\n\n Arguments:\n response (:py:class:`aiohttp.ClientResponse`): The API response.\n\n Raises:\n :py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate\n error for the response's status.", "id": "f1120:m0"} {"signature": "def resorted(values):", "body": "if not values:return valuesvalues = sorted(values)first_word = next((cnt for cnt, val in enumerate(values)if val and not val[].isdigit()),None)if first_word is None:return valueswords = values[first_word:]numbers = values[:first_word]return words + numbers", "docstring": "Sort values, but put numbers after alphabetically sorted words.\n\nThis function is here to make outputs diff-compatible with Aleph.\n\nExample::\n >>> sorted([\"b\", \"1\", \"a\"])\n ['1', 'a', 'b']\n >>> resorted([\"b\", \"1\", \"a\"])\n ['a', 'b', '1']\n\nArgs:\n values (iterable): any iterable object/list/tuple/whatever.\n\nReturns:\n list of sorted values, but with numbers after words", "id": "f1159:m0"} {"signature": "def get_subfields(self, datafield, subfield, i1=None, i2=None,exception=False):", "body": "if len(datafield) != :raise ValueError(\"\")if len(subfield) != :raise ValueError(\"\")if datafield not in self.datafields:if exception:raise KeyError(datafield + \"\")return []output = []for datafield in self.datafields[datafield]:if subfield not in datafield:continuefor sfield in datafield[subfield]:if i1 and sfield.i1 != i1:continueif i2 and sfield.i2 != i2:continueoutput.append(sfield)if not output and exception:raise KeyError(subfield + \"\")return output", "docstring": "Return content of given `subfield` in `datafield`.\n\nArgs:\n datafield (str): Section name (for example \"001\", \"100\", \"700\").\n subfield (str): Subfield name (for example \"a\", \"1\", etc..).\n i1 (str, default None): Optional i1/ind1 parameter value, which\n will be used for search.\n i2 (str, default None): Optional i2/ind2 parameter value, which\n will be used for search.\n exception (bool): If ``True``, :exc:`~exceptions.KeyError` is\n raised when method couldn't found given `datafield` /\n `subfield`. If ``False``, blank array ``[]`` is returned.\n\nReturns:\n list: of :class:`.MARCSubrecord`.\n\nRaises:\n KeyError: If the subfield or datafield couldn't be found.\n\nNote:\n MARCSubrecord is practically same thing as string, but has defined\n :meth:`.MARCSubrecord.i1` and :attr:`.MARCSubrecord.i2`\n methods.\n\n You may need to be able to get this, because MARC XML depends on\n i/ind parameters from time to time (names of authors for example).", "id": "f1161:c0:m11"} {"signature": "def _parse_control_fields(self, fields, tag_id=\"\"):", "body": "for field in fields:params = field.paramsif tag_id not in params:continueself.controlfields[params[tag_id]] = field.getContent().strip()", "docstring": "Parse control fields.\n\nArgs:\n fields (list): list of HTMLElements\n tag_id (str): parameter name, which holds the information, about\n field name this is normally \"tag\", but in case of\n oai_marc \"id\".", "id": "f1161:c0:m2"} {"signature": "def add_data_field(self, name, i1, i2, subfields_dict):", "body": "if i1 not in self.valid_i_chars:raise ValueError(\"\" + i1 + \"\")if i2 not in self.valid_i_chars:raise ValueError(\"\" + i2 + \"\")if len(name) != :raise ValueError(\"\")if not subfields_dict:raise ValueError(\"\")if not isinstance(subfields_dict, dict):raise ValueError(\"\")subrecords = []for key, val in subfields_dict.items():if len(key) > :raise KeyError(\"\")if not isinstance(val, list):val = [val]subfields = map(lambda x: MARCSubrecord(x, i1, i2, None),val)subfields_dict[key] = subfieldssubrecords.extend(subfields)subfields_dict[self.i1_name] = i1subfields_dict[self.i2_name] = i2if name in self.datafields:self.datafields[name].append(subfields_dict)else:self.datafields[name] = [subfields_dict]other_subfields = self.datafields[name]for record in subrecords:record.other_subfields = other_subfields", "docstring": "Add new datafield into :attr:`datafields` and take care of OAI MARC\ndifferencies.\n\nArgs:\n name (str): Name of datafield.\n i1 (char): Value of i1/ind1 parameter.\n i2 (char): Value of i2/ind2 parameter.\n subfields_dict (dict): Dictionary containing subfields (as list).\n\n`subfields_dict` is expected to be in this format::\n\n {\n \"field_id\": [\"subfield data\",],\n ...\n \"z\": [\"X0456b\"]\n }\n\nWarning:\n For your own good, use OrderedDict for `subfields_dict`, or\n constructor's `resort` parameter set to ``True`` (it is by\n default).\n\nWarning:\n ``field_id`` can be only one character long!", "id": "f1161:c0:m5"} {"signature": "def _parse_string(self, xml):", "body": "if not isinstance(xml, HTMLElement):xml = dhtmlparser.parseString(str(xml))record = xml.find(\"\")if not record:raise ValueError(\"\")record = record[]self.oai_marc = len(record.find(\"\")) > if not self.oai_marc:leader = record.find(\"\")if len(leader) >= :self.leader = leader[].getContent()if self.oai_marc:self._parse_control_fields(record.find(\"\"), \"\")self._parse_data_fields(record.find(\"\"), \"\", \"\")else:self._parse_control_fields(record.find(\"\"), \"\")self._parse_data_fields(record.find(\"\"), \"\", \"\")if self.oai_marc and \"\" in self.controlfields:self.leader = self.controlfields[\"\"]", "docstring": "Parse MARC XML document to dicts, which are contained in\nself.controlfields and self.datafields.\n\nArgs:\n xml (str or HTMLElement): input data\n\nAlso detect if this is oai marc format or not (see elf.oai_marc).", "id": "f1161:c0:m1"} {"signature": "def get_ctl_field(self, controlfield, alt=None):", "body": "if not alt:return self.controlfields[controlfield]return self.controlfields.get(controlfield, alt)", "docstring": "Method wrapper over :attr:`.controlfields` dictionary.\n\nArgs:\n controlfield (str): Name of the controlfield.\n alt (object, default None): Alternative value of the `controlfield`\n when `controlfield` couldn't be found.\n\nReturns:\n str: record from given `controlfield`", "id": "f1161:c0:m9"} {"signature": "def get_internal_urls(self):", "body": "internal_urls = self.get_subfields(\"\", \"\", i1=\"\", i2=\"\")internal_urls.extend(self.get_subfields(\"\", \"\"))internal_urls.extend(self.get_subfields(\"\", \"\"))return map(lambda x: x.replace(\"\", \"\"), internal_urls)", "docstring": "URL's, which may point to edeposit, aleph, kramerius and so on.\n\nFields ``856u40``, ``998a`` and ``URLu``.\n\nReturns:\n list: List of internal URLs.", "id": "f1163:c0:m26"} {"signature": "def get_authors(self):", "body": "authors = self._parse_persons(\"\", \"\")authors += self._parse_persons(\"\", \"\")authors += self._parse_persons(\"\", \"\")authors += self._parse_persons(\"\", \"\")return authors", "docstring": "Returns:\n list: Authors represented as :class:`.Person` objects.", "id": "f1163:c0:m13"} {"signature": "def get_ISSNs(self):", "body": "invalid_issns = set(self.get_invalid_ISSNs())return [self._clean_isbn(issn)for issn in self[\"\"]if self._clean_isbn(issn) not in invalid_issns]", "docstring": "Get list of VALID ISSNs (``022a``).\n\nReturns:\n list: List with *valid* ISSN strings.", "id": "f1163:c0:m20"} {"signature": "def get_pub_date(self, undefined=\"\"):", "body": "dates = self[\"\"] + self[\"\"]def clean_date(date):\"\"\"\"\"\"out = \"\"was_digit = Falsefor c in date:if c.isdigit() or (c == \"\" and was_digit) or c == \"\":out += cwas_digit = c.isdigit()return outdates = set([clean_date(date)for date in self[\"\"] + self[\"\"]])return _undefined_pattern(\"\".join(dates),lambda x: x.strip() == \"\",undefined)", "docstring": "Args:\n undefined (optional): Argument, which will be returned if the\n `pub_date` record is not found.\n\nReturns:\n str: Date of publication (month and year usually) or `undefined` \\\n if `pub_date` is not found.", "id": "f1163:c0:m9"} {"signature": "def _clean_isbn(self, isbn):", "body": "return isbn.strip().split(\"\", )[]", "docstring": "Clean ISBN from other information (binding).", "id": "f1163:c0:m16"} {"signature": "def _undefined_pattern(value, fn, undefined):", "body": "if fn(value):return undefinedreturn value", "docstring": "If ``fn(value) == True``, return `undefined`, else `value`.", "id": "f1163:m0"} {"signature": "def is_continuing(self):", "body": "return self.get_pub_type() == PublicationType.continuing", "docstring": "Returns:\n bool: True if the record is continuing.", "id": "f1163:c0:m30"} {"signature": "def get_pub_type(self):", "body": "INFO_CHAR_INDEX = SECOND_INFO_CHAR_I = if not len(self.leader) >= INFO_CHAR_INDEX + :return PublicationType.monographicif self.controlfields.get(\"\") == \"\":return PublicationType.continuinginfo_char = self.leader[INFO_CHAR_INDEX]multipart_n = self.get_subfields(\"\", \"\", exception=False)multipart_p = self.get_subfields(\"\", \"\", exception=False)if info_char in \"\":return PublicationType.monographicelif info_char in \"\":return PublicationType.continuingelif info_char == \"\" and (multipart_n or multipart_p):return PublicationType.multipart_monographelif info_char == \"\" and len(self.leader) >= SECOND_INFO_CHAR_I + :if self.leader[SECOND_INFO_CHAR_I] == \"\":return PublicationType.multipart_monographelif self.leader[SECOND_INFO_CHAR_I] == \"\":return PublicationType.single_unitreturn PublicationType.monographic", "docstring": "Returns:\n PublicationType: :class:`.PublicationType` enum **value**.", "id": "f1163:c0:m27"} {"signature": "@remove_hairs_decoratordef get_pub_order(self, undefined=\"\"):", "body": "return _undefined_pattern(\"\".join(self.get_subfields(\"\", \"\")),lambda x: x.strip() == \"\",undefined)", "docstring": "Args:\n undefined (optional): Argument, which will be returned if the\n `pub_order` record is not found.\n\nReturns:\n str: Information about order in which was the book published or \\\n `undefined` if `pub_order` is not found.", "id": "f1163:c0:m10"} {"signature": "def get_distributors(self):", "body": "return self.get_corporations(roles=[\"\"])", "docstring": "Returns:\n list: Distributors represented as :class:`.Corporation` object.", "id": "f1163:c0:m15"} {"signature": "def get(self, item, alt=None):", "body": "try:val = self[item]except ValueError:return altreturn val if val is not None else alt", "docstring": "Standard dict-like .get() method.\n\nArgs:\n item (str): See :meth:`.__getitem__` for details.\n alt (default None): Alternative value, if item is not found.\n\nReturns:\n obj: `item` or `alt`, if item is not found.", "id": "f1163:c0:m33"} {"signature": "def get_ISBNs(self):", "body": "invalid_isbns = set(self.get_invalid_ISBNs())valid_isbns = [self._clean_isbn(isbn)for isbn in self[\"\"]if self._clean_isbn(isbn) not in invalid_isbns]if valid_isbns:return valid_isbnsreturn [self._clean_isbn(isbn)for isbn in self[\"\"]]", "docstring": "Get list of VALID ISBN.\n\nReturns:\n list: List with *valid* ISBN strings.", "id": "f1163:c0:m18"} {"signature": "def _parse_persons(self, datafield, subfield, roles=[\"\"]):", "body": "parsed_persons = []raw_persons = self.get_subfields(datafield, subfield)for person in raw_persons:other_subfields = person.other_subfieldsif \"\" in other_subfields and roles != [\"\"]:person_roles = other_subfields[\"\"] relevant = any(map(lambda role: role in roles, person_roles))if not relevant:continueind1 = person.i1ind2 = person.i2person = person.strip()name = \"\"second_name = \"\"surname = \"\"title = \"\"if ind1 == \"\" and ind2 == \"\":if \"\" in person:surname, name = person.split(\"\", )elif \"\" in person:surname, name = person.split(\"\", )else:surname = personif \"\" in other_subfields:title = \"\".join(other_subfields[\"\"])elif ind1 == \"\" and ind2 == \"\":name = person.strip()if \"\" in other_subfields:second_name = \"\".join(other_subfields[\"\"])if \"\" in other_subfields:surname = \"\".join(other_subfields[\"\"])elif ind1 == \"\" and ind2 == \"\" or ind1 == \"\" and ind2 == \"\":name = person.strip()if \"\" in other_subfields:title = \"\".join(other_subfields[\"\"])parsed_persons.append(Person(name.strip(),second_name.strip(),surname.strip(),title.strip()))return parsed_persons", "docstring": "Parse persons from given datafield.\n\nArgs:\n datafield (str): code of datafield (\"010\", \"730\", etc..)\n subfield (char): code of subfield (\"a\", \"z\", \"4\", etc..)\n role (list of str): set to [\"any\"] for any role, [\"aut\"] for\n authors, etc.. For details see\n http://www.loc.gov/marc/relators/relaterm.html\n\nMain records for persons are: \"100\", \"600\" and \"700\", subrecords \"c\".\n\nReturns:\n list: Person objects.", "id": "f1163:c0:m2"} {"signature": "@remove_hairs_decoratordef get_publisher(self, undefined=\"\"):", "body": "publishers = set([remove_hairs_fn(publisher)for publisher in self[\"\"] + self[\"\"]])return _undefined_pattern(\"\".join(publishers),lambda x: x.strip() == \"\",undefined)", "docstring": "Args:\n undefined (optional): Argument, which will be returned if the\n `publisher` record is not found.\n\nReturns:\n str: Name of the publisher (\"``Grada``\" for example) or \\\n `undefined` if `publisher` is not found.", "id": "f1163:c0:m8"} {"signature": "def _parse_corporations(self, datafield, subfield, roles=[\"\"]):", "body": "if len(datafield) != :raise ValueError(\"\")if len(subfield) != :raise ValueError(\"\")parsed_corporations = []for corporation in self.get_subfields(datafield, subfield):other_subfields = corporation.other_subfieldsif \"\" in other_subfields and roles != [\"\"]:corp_roles = other_subfields[\"\"] relevant = any(map(lambda role: role in roles, corp_roles))if not relevant:continuename = \"\"place = \"\"date = \"\"name = corporationif \"\" in other_subfields:place = \"\".join(other_subfields[\"\"])if \"\" in other_subfields:date = \"\".join(other_subfields[\"\"])parsed_corporations.append(Corporation(name, place, date))return parsed_corporations", "docstring": "Parse informations about corporations from given field identified\nby `datafield` parameter.\n\nArgs:\n datafield (str): MARC field ID (\"``110``\", \"``610``\", etc..)\n subfield (str): MARC subfield ID with name, which is typically\n stored in \"``a``\" subfield.\n roles (str): specify which roles you need. Set to ``[\"any\"]`` for\n any role, ``[\"dst\"]`` for distributors, etc.. For\n details, see\n http://www.loc.gov/marc/relators/relaterm.html\n\nReturns:\n list: :class:`Corporation` objects.", "id": "f1163:c0:m1"} {"signature": "def is_single_unit(self):", "body": "return self.get_pub_type() == PublicationType.single_unit", "docstring": "Returns:\n bool: True if the record is single unit.", "id": "f1163:c0:m31"} {"signature": "def is_multi_mono(self):", "body": "return self.get_pub_type() == PublicationType.multipart_monograph", "docstring": "Returns:\n bool: True if the record is multi_mono.", "id": "f1163:c0:m29"} {"signature": "@remove_hairs_decoratordef get_part_name(self, undefined=\"\"):", "body": "return _undefined_pattern(\"\".join(self.get_subfields(\"\", \"\")),lambda x: x.strip() == \"\",undefined)", "docstring": "Args:\n undefined (optional): Argument, which will be returned if the\n `part_name` record is not found.\n\nReturns:\n str: Name of the part of the series. or `undefined` if `part_name`\\\n is not found.", "id": "f1163:c0:m7"} {"signature": "def get_invalid_ISSNs(self):", "body": "return [self._clean_isbn(issn)for issn in self[\"\"] + self[\"\"]]", "docstring": "Get list of invalid ISSNs (``022z`` + ``022y``).\n\nReturns:\n list: List with INVALID ISSN strings.", "id": "f1163:c0:m19"} {"signature": "@propertydef potential(self):", "body": "if self._potential is not None and self._potential:return Trueelse:return False", "docstring": "Getter for 'potential' property\n\nReturns:\n bool: potential is required?", "id": "f1181:c0:m7"} {"signature": "@details.setterdef details(self, value):", "body": "self._details = value", "docstring": "Setter for 'details' property\n\nArgs:\n value (str): Issue's details", "id": "f1181:c0:m10"} {"signature": "@checker.setterdef checker(self, value):", "body": "self._checker_name = value", "docstring": "Setter for 'checker' property\n\nArgs:\n value (str): Issue's checker", "id": "f1181:c0:m12"} {"signature": "def __todict__(self):", "body": "return {\"\": self.name,\"\": self.file,\"\": self.details,\"\": self.severity,\"\": self.potential,\"\": self.checker}", "docstring": "Returns a dictionary with the class representation\n\nReturns:\n dict: class representarion", "id": "f1181:c0:m13"} {"signature": "@propertydef command(self):", "body": "return self._command", "docstring": "Getter for 'command' property\n\nReturns:\n str: Command to execute", "id": "f1184:c0:m1"} {"signature": "def execute(self, shell = True):", "body": "process = Popen(self.command, stdout=PIPE, stderr=PIPE, shell=shell)self.output, self.errors = process.communicate()", "docstring": "Executes the command setted into class\n\nArgs:\n shell (boolean): Set True if command is a shell command. Default: True", "id": "f1184:c0:m9"} {"signature": "@propertydef errors(self):", "body": "return self._errors", "docstring": "Getter for 'errors' property\n\nReturns:\n str: Stderr content", "id": "f1184:c0:m5"} {"signature": "def __init__(self, command = None):", "body": "self._output = Noneself._errors = Noneself._command = Noneself.command = command", "docstring": "Class constructor. \n\nArgs:\n command (str): Command to execute", "id": "f1184:c0:m0"} {"signature": "@classmethoddef parseConfig(cls, value):", "body": "if '' in value:value[''] = bool(value[''])if '' in value:value[''] = [n.strip() for n in ast.literal_eval(value[''])]return value", "docstring": "Parse the config values\n\nArgs:\n value (dict): Dictionary which contains the checker config\n\nReturns:\n dict: The checker config with parsed values", "id": "f1187:c0:m14"} {"signature": "def run(self):", "body": "pass", "docstring": "Abstract method. This method will be executed for subclass which not implemented his own method", "id": "f1187:c0:m12"} {"signature": "@propertydef dao(self):", "body": "return self._dao", "docstring": "Getter for 'dao' property\n\nReturns:\n atomshield.helpers.DAO: Instance of DAO class", "id": "f1187:c0:m1"} {"signature": "@propertydef issues(self):", "body": "return self._issues", "docstring": "Getter for 'issues' property\n\nReturns:\n list: List of instances of Issue class", "id": "f1187:c0:m7"} {"signature": "@propertydef path(self):", "body": "return self._path", "docstring": "Getter for 'path' property\n\nReturns:\n str: Absolute path to scan", "id": "f1187:c0:m3"} {"signature": "@project.setterdef project(self, value):", "body": "self._project = value", "docstring": "Setter for 'project' property\n\nArgs:\n value (str): Project's name", "id": "f1187:c0:m6"} {"signature": "@staticmethoddef isInstalled(value):", "body": "function = \"\"\"\"\"\"bash -c ''", "docstring": "Check if a software is installed into machine.\n\nArgs:\n value (str): Software's name\n\nReturns:\n bool: True if the software is installed. False else", "id": "f1187:c0:m15"} {"signature": "def report(func):", "body": "def execute(self, *args, **kwargs):try:print(\"\".format(n=self.__class__.NAME))if hasattr(self, ''):if self.test():return func(self, *args, **kwargs)else:print(colored(\"\".format(c=self.__class__.__name__), \"\"))else:return func(self, *args, **kwargs)except Exception as e:print(colored(\"\".format(n=self.__class__.NAME, e = e), \"\"))return execute", "docstring": "Decorator for method run. This method will be execute before the execution\nfrom the method with this decorator.", "id": "f1190:m0"} {"signature": "@propertydef issues(self):", "body": "if self._issues is None:return []return self._issues", "docstring": "Getter for 'issues' property\n\nReturns:\n list: List of `Issue` instances", "id": "f1190:c0:m1"} {"signature": "@issues.setterdef issues(self, value):", "body": "self._issues = value", "docstring": "Setter for 'issues' property\n\nArgs:\n value (list): List of `Issue` instances", "id": "f1190:c0:m2"} {"signature": "@propertydef config(self):", "body": "return self._config", "docstring": "Getter for 'config' property\n\nReturns:\n dict: Dictionary which contains the current values for this report config", "id": "f1190:c0:m3"} {"signature": "@staticmethoddef setup():", "body": "if not os.path.isdir(AtomShieldsScanner.CHECKERS_DIR):os.makedirs(AtomShieldsScanner.CHECKERS_DIR)if not os.path.isdir(AtomShieldsScanner.REPORTS_DIR):os.makedirs(AtomShieldsScanner.REPORTS_DIR)for f in AtomShieldsScanner._getFiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), \"\"), \"\"):AtomShieldsScanner.installChecker(f)for f in AtomShieldsScanner._getFiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), \"\"), \"\"):AtomShieldsScanner.installReport(f)AtomShieldsScanner._executeMassiveMethod(path=AtomShieldsScanner.CHECKERS_DIR, method=\"\", args={})config_dir = os.path.dirname(AtomShieldsScanner.CONFIG_PATH)if not os.path.isdir(config_dir):os.makedirs(config_dir)", "docstring": "Creates required directories and copy checkers and reports.", "id": "f1194:c0:m13"} {"signature": "@configFile.setterdef configFile(self, value):", "body": "self._config_file = os.path.abspath(value)", "docstring": "Setter for 'configFile' property\n\nArgs:\n value (str): Path to config file", "id": "f1194:c0:m8"} {"signature": "@propertydef issues(self):", "body": "return self._issues", "docstring": "Getter for 'issues' property\n\nReturns:\n list: List of Issue instances", "id": "f1194:c0:m11"} {"signature": "@config.setterdef config(self, value):", "body": "self._config = value", "docstring": "Setter for 'config' property\n\nArgs:\n value (dict): Dictionary which contains the config", "id": "f1194:c0:m10"} {"signature": "@staticmethoddef _addConfig(instance, config, parent_section):", "body": "try:section_name = \"\".format(p = parent_section, n=instance.NAME.lower())config.add_section(section_name)for k in list(instance.CONFIG.keys()):config.set(section_name, k, instance.CONFIG[k])except Exception as e:print(\"\" % e)", "docstring": "Writes a section for a plugin.\n\nArgs:\n instance (object): Class instance for plugin\n config (object): Object (ConfigParser) which the current config\n parent_section (str): Parent section for plugin. Usually 'checkers' or 'reports'", "id": "f1194:c0:m21"} {"signature": "@issues.setterdef issues(self, value):", "body": "self._issues = value", "docstring": "Setter for 'issues' property\n\nArgs:\n value (list): List of Issue instances", "id": "f1194:c0:m12"} {"signature": "@path.setterdef path(self, value):", "body": "self._path = os.path.abspath(value)", "docstring": "Setter for 'path' property\n\nArgs:\n value (str): Path to target directory", "id": "f1194:c0:m4"} {"signature": "@staticmethoddef _debug(message, color=None, attrs=None):", "body": "if attrs is None:attrs = []if color is not None:print(colored(message, color, attrs=attrs))else:if len(attrs) > :print(colored(message, \"\", attrs=attrs))else:print(message)", "docstring": "Print a message if the class attribute 'verbose' is enabled\n\nArgs:\n message (str): Message to print", "id": "f1194:c0:m1"} {"signature": "@propertydef configFile(self):", "body": "return self._config_file", "docstring": "Getter for 'configFile' property\n\nReturns:\n str: Path to config file", "id": "f1194:c0:m7"} {"signature": "def getConfig(self, section = None):", "body": "data = {}if section is None:for s in self.config.sections():if '' in s:parent, _s = s.split('')data[parent][_s] = dict(self.config.items(s))else:data[s] = dict(self.config.items(s))else:data = dict(self.config.items(section))return data", "docstring": "Returns a dictionary which contains the current config. If a section is setted,\nonly will returns the section config\n\nArgs:\n section (str): (Optional) Section name.\n\nReturns:\n dict: Representation of current config", "id": "f1194:c0:m25"} {"signature": "def initialize(self, templates_path, global_data):", "body": "self.env = Environment(loader=FileSystemLoader(templates_path))self.env.trim_blocks = Trueself.global_data = global_data", "docstring": "initialize with templates' path\n parameters\n templates_path str the position of templates directory\n global_data dict globa data can be got in any templates", "id": "f1200:c0:m0"} {"signature": "def render_to(self, path, template, **data):", "body": "html = self.render(template, **data)with open(path, '') as f:f.write(html.encode(charset))", "docstring": "Render data with template and then write to path", "id": "f1200:c0:m2"} {"signature": "def block_code(self, text, lang):", "body": "if not lang:return self._code_no_lexer(text)try:lexer = get_lexer_by_name(lang, stripall=True)except ClassNotFound: return self._code_no_lexer(text)formatter = HtmlFormatter()return highlight(text, lexer, formatter)", "docstring": "text: unicode text to render", "id": "f1201:c0:m1"} {"signature": "def parse(self, source):", "body": "rt, title, title_pic, markdown = libparser.parse(source)if rt == -:raise SeparatorNotFoundelif rt == -:raise PostTitleNotFoundtitle, title_pic, markdown = map(to_unicode, (title, title_pic,markdown))html = self.markdown.render(markdown)summary = self.markdown.render(markdown[:])return {'': title,'': markdown,'': html,'': summary,'': title_pic}", "docstring": "Parse ascii post source, return dict", "id": "f1201:c1:m2"} {"signature": "def run(self, port):", "body": "self.watcher.start()self.run_server(port)", "docstring": "start web server and watcher", "id": "f1203:c2:m4"} {"signature": "def get_files_stat(self):", "body": "if not exists(Post.src_dir):logger.error(SourceDirectoryNotFound.__doc__)sys.exit(SourceDirectoryNotFound.exit_code)paths = []for fn in ls(Post.src_dir):if fn.endswith(src_ext):paths.append(join(Post.src_dir, fn))if exists(config.filepath):paths.append(config.filepath)files = dict((p, stat(p).st_mtime) for p in paths)return files", "docstring": "get source files' update time", "id": "f1203:c2:m2"} {"signature": "def watch_files(self):", "body": "try:while :sleep() try:files_stat = self.get_files_stat()except SystemExit:logger.error(\"\")self.shutdown_server()if self.files_stat != files_stat:logger.info(\"\")try:generator.re_generate()global _root_root = generator.rootexcept SystemExit: logger.error(\"\")self.shutdown_server()self.files_stat = files_stat except KeyboardInterrupt:logger.info(\"\")self.shutdown_watcher()", "docstring": "watch files for changes, if changed, rebuild blog. this thread\n will quit if the main process ends", "id": "f1203:c2:m3"} {"signature": "def run_server(self, port):", "body": "try:self.server = MultiThreadedHTTPServer(('', port), Handler)except socket.error as e: logger.error(str(e))sys.exit()logger.info(\"\"% port)try:self.server.serve_forever()except KeyboardInterrupt:logger.info(\"\")self.shutdown_server()", "docstring": "run a server binding to port", "id": "f1203:c2:m1"} {"signature": "def initialize(self):", "body": "try:conf = config.parse()except ConfigSyntaxError as e:logger.error(e.__doc__)sys.exit(e.exit_code)update_nested_dict(self.config, conf)self.blog.__dict__.update(self.config[''])self.author.__dict__.update(self.config[''])self.root = self.config['']templates = join(self.blog.theme, '') jinja2_global_data = {'': self.blog,'': self.author,'': self.config,'': self.root}renderer.initialize(templates, jinja2_global_data)logger.success('')", "docstring": "Initialize configuration and renderer environment", "id": "f1206:c0:m2"} {"signature": "def join(*p):", "body": "return os.path.normpath(os.path.join(*p))", "docstring": "return normpath version of path.join", "id": "f1207:m0"} {"signature": "def parse(src):", "body": "rt = libparser.parse(byref(post), src)return (rt,string_at(post.title, post.tsz),string_at(post.tpic, post.tpsz),post.body)", "docstring": "Note: src should be ascii string", "id": "f1208:m0"} {"signature": "def replace_relative_url_to_absolute(self, content):", "body": "p = os.path.join(os.getcwd(), '', '')return content.replace('', p)", "docstring": "Replace '../' leaded url with absolute uri.", "id": "f1210:c0:m3"} {"signature": "def render(template, **data):", "body": "try:return renderer.render(template, **data)except JinjaTemplateNotFound as e:logger.error(e.__doc__ + '' % template)sys.exit(e.exit_code)", "docstring": "shortcut to render data with `template`. Just add exception\n catch to `renderer.render`", "id": "f1210:m0"} {"signature": "def load_fixture(filename):", "body": "path = os.path.join(os.path.dirname(__file__), \"\", filename)with open(path) as json_data:return json.load(json_data)", "docstring": "Load some fixture JSON", "id": "f1216:m0"} {"signature": "def fahrenheit_to_nuheat(fahrenheit):", "body": "return int(round_half(((fahrenheit - ) * ) + ))", "docstring": "Convert Fahrenheit to a temperature value that NuHeat understands\nFormula f(x) = ((x - 33) * 56) + 33\n\n:param fahrenheit: The temperature to convert to NuHeat", "id": "f1219:m2"} {"signature": "def nuheat_to_celsius(nuheat_temperature):", "body": "fahrenheit = nuheat_to_fahrenheit(nuheat_temperature)return fahrenheit_to_celsius(fahrenheit)", "docstring": "Convert the NuHeat temp value to Celsius\n\n:param nuheat_temperature: The temperature to convert to Celsius", "id": "f1219:m6"} {"signature": "def celsius_to_nuheat(celsius):", "body": "fahrenheit = celsius_to_fahrenheit(celsius)return int(round_half(((fahrenheit - ) * ) + ))", "docstring": "Convert Celsius to a temperature value that NuHeat understands\nFormula f(x) = ((x - 33) * 56) + 33\n\n:param celsius: The temperature to convert to NuHeat", "id": "f1219:m4"} {"signature": "def celsius_to_fahrenheit(celsius):", "body": "return int(round_half(celsius * + ))", "docstring": "Convert Celsius to Fahrenheit\n\n:param celsius: The temperature to convert to Fahrenheit", "id": "f1219:m3"} {"signature": "@propertydef schedule_mode(self):", "body": "return self._schedule_mode", "docstring": "Return the mode that the thermostat is currently using", "id": "f1220:c0:m13"} {"signature": "@propertydef max_fahrenheit(self):", "body": "if not self.max_temperature:return Nonereturn nuheat_to_fahrenheit(self.max_temperature)", "docstring": "Return the thermostat's maximum temperature in Fahrenheit", "id": "f1220:c0:m6"} {"signature": "@propertydef fahrenheit(self):", "body": "if not self.temperature:return Nonereturn nuheat_to_fahrenheit(self.temperature)", "docstring": "Return the current temperature in Fahrenheit", "id": "f1220:c0:m2"} {"signature": "def set_target_celsius(self, celsius, mode=config.SCHEDULE_HOLD):", "body": "temperature = celsius_to_nuheat(celsius)self.set_target_temperature(temperature, mode)", "docstring": "Set the target temperature to the desired celsius, with more granular control of the hold\nmode\n\n:param celsius: The desired temperature in C\n:param mode: The desired mode to operate in", "id": "f1220:c0:m17"} {"signature": "def set_data(self, post_data):", "body": "params = {\"\": self.serial_number}self._session.request(config.THERMOSTAT_URL, method=\"\", data=post_data, params=params)", "docstring": "Update (patch) the current instance's data on the NuHeat API", "id": "f1220:c0:m19"} {"signature": "@schedule_mode.setterdef schedule_mode(self, mode):", "body": "modes = [config.SCHEDULE_RUN, config.SCHEDULE_TEMPORARY_HOLD, config.SCHEDULE_HOLD]if mode not in modes:raise Exception(\"\".format(modes))self.set_data({\"\": mode})", "docstring": "Set the thermostat mode\n\n:param mode: The desired mode integer value.\n Auto = 1\n Temporary hold = 2\n Permanent hold = 3", "id": "f1220:c0:m14"} {"signature": "@propertydef celsius(self):", "body": "if not self.temperature:return Nonereturn nuheat_to_celsius(self.temperature)", "docstring": "Return the current temperature in Celsius", "id": "f1220:c0:m3"} {"signature": "@propertydef min_celsius(self):", "body": "if not self.min_temperature:return Nonereturn nuheat_to_celsius(self.min_temperature)", "docstring": "Return the thermostat's minimum temperature in Celsius", "id": "f1220:c0:m5"} {"signature": "@target_fahrenheit.setterdef target_fahrenheit(self, fahrenheit):", "body": "self.set_target_fahrenheit(fahrenheit)", "docstring": "Helper to set and HOLD the target temperature to the desired fahrenheit\n\n:param fahrenheit: The desired temperature in F", "id": "f1220:c0:m10"} {"signature": "def set_target_fahrenheit(self, fahrenheit, mode=config.SCHEDULE_HOLD):", "body": "temperature = fahrenheit_to_nuheat(fahrenheit)self.set_target_temperature(temperature, mode)", "docstring": "Set the target temperature to the desired fahrenheit, with more granular control of the\nhold mode\n\n:param fahrenheit: The desired temperature in F\n:param mode: The desired mode to operate in", "id": "f1220:c0:m16"} {"signature": "def get_thermostat(self, serial_number):", "body": "return NuHeatThermostat(self, serial_number)", "docstring": "Get a thermostat object by serial number\n\n:param serial_number: The serial number / ID of the desired thermostat", "id": "f1222:c0:m3"} {"signature": "def request(self, url, method=\"\", data=None, params=None, retry=True):", "body": "headers = config.REQUEST_HEADERSif params and self._session_id:params[''] = self._session_idif method == \"\":response = requests.get(url, headers=headers, params=params)elif method == \"\":response = requests.post(url, headers=headers, params=params, data=data)if response.status_code == and retry:_LOGGER.warn(\"\")self._session_id = Noneself.authenticate()return self.request(url, method=method, data=data, params=params, retry=False)response.raise_for_status()try:return response.json()except ValueError:return response", "docstring": "Make a request to the NuHeat API\n\n:param url: The URL to request\n:param method: The type of request to make (GET, POST)\n:param data: Data to be sent along with POST requests\n:param params: Querystring parameters\n:param retry: Attempt to re-authenticate and retry request if necessary", "id": "f1222:c0:m4"} {"signature": "@lockdown(until_date=YESTERDAY, after_date=TOMORROW)def locked_view_until_and_after(request):", "body": "return HttpResponse('')", "docstring": "View, only not looked between yesterday and tomorrow.", "id": "f1225:m10"} {"signature": "@lockdown(extra_context={'': ''})def locked_view_with_extra_context(request):", "body": "return HttpResponse('')", "docstring": "View, locked by the decorator with extra context.", "id": "f1225:m5"} {"signature": "@lockdown(form=AuthForm, staff_only=False)def user_locked_view(request):", "body": "return HttpResponse('')", "docstring": "View, locked by the decorator with access for known users only.", "id": "f1225:m11"} {"signature": "@lockdown(after_date=YESTERDAY)def locked_view_after_yesterday(request):", "body": "return HttpResponse('')", "docstring": "View, locked since yesterday.", "id": "f1225:m8"} {"signature": "@lockdown(remote_addr_exceptions=[''])def locked_view_with_ip_exception(request):", "body": "return HttpResponse('')", "docstring": "View, locked except for the configured IP-address.", "id": "f1225:m4"} {"signature": "def clean_answer(self):", "body": "if self.cleaned_data[''] == :return raise forms.ValidationError('')", "docstring": "Clean the answer field, by checking its value.", "id": "f1226:c0:m0"} {"signature": "def setUp(self):", "body": "super(MiddlewareTests, self).setUp()self._old_middleware_classes = django_settings.MIDDLEWAREdjango_settings.MIDDLEWARE.append('',)", "docstring": "Additional setup for middleware tests.", "id": "f1229:c2:m0"} {"signature": "def clean(self):", "body": "cleaned_data = super(AuthForm, self).clean()user = self.get_user()if self.staff_only and (not user or not user.is_staff):raise forms.ValidationError('')if self.superusers_only and (not user or not user.is_superuser):raise forms.ValidationError('')return cleaned_data", "docstring": "When receiving the filled out form, check for valid access.", "id": "f1230:c1:m1"} {"signature": "def __init__(self, passwords=None, *args, **kwargs):", "body": "super(LockdownForm, self).__init__(*args, **kwargs)if passwords is None:passwords = settings.PASSWORDSself.valid_passwords = passwords", "docstring": "Initialize the form by setting the valid passwords.", "id": "f1230:c0:m0"} {"signature": "def clean_password(self):", "body": "value = self.cleaned_data.get('')if value not in self.valid_passwords:raise forms.ValidationError('')return value", "docstring": "Check that the password is valid.", "id": "f1230:c0:m1"} {"signature": "def show_form(self):", "body": "return bool(self.valid_passwords)", "docstring": "Show the form if there are any valid passwords.", "id": "f1230:c0:m4"} {"signature": "def generate_token(self):", "body": "return self.cleaned_data['']", "docstring": "Save the password as the authentication token.\n\n It's acceptable to store the password raw, as it is stored server-side\n in the user's session.", "id": "f1230:c0:m2"} {"signature": "def redirect(self, request):", "body": "url = request.pathquerystring = request.GET.copy()if self.logout_key and self.logout_key in request.GET:del querystring[self.logout_key]if querystring:url = '' % (url, querystring.urlencode())return HttpResponseRedirect(url)", "docstring": "Handle redirects properly.", "id": "f1231:c0:m3"} {"signature": "def __init__(self, get_response=None, form=None, until_date=None,after_date=None, logout_key=None, session_key=None,url_exceptions=None, view_exceptions=None,remote_addr_exceptions=None, trusted_proxies=None,extra_context=None, **form_kwargs):", "body": "if logout_key is None:logout_key = settings.LOGOUT_KEYif session_key is None:session_key = settings.SESSION_KEYself.get_response = get_responseself.form = formself.form_kwargs = form_kwargsself.until_date = until_dateself.after_date = after_dateself.logout_key = logout_keyself.session_key = session_keyself.url_exceptions = url_exceptionsself.remote_addr_exceptions = remote_addr_exceptionsself.trusted_proxies = trusted_proxiesself.extra_context = extra_context", "docstring": "Initialize the middleware, by setting the configuration values.", "id": "f1231:c0:m0"} {"signature": "def process_request(self, request):", "body": "try:session = request.sessionexcept AttributeError:raise ImproperlyConfigured('''')if settings.ENABLED is False:return Noneif self.remote_addr_exceptions:remote_addr_exceptions = self.remote_addr_exceptionselse:remote_addr_exceptions = settings.REMOTE_ADDR_EXCEPTIONSif remote_addr_exceptions:trusted_proxies = self.trusted_proxies or settings.TRUSTED_PROXIESremote_addr = request.META.get('')if remote_addr in remote_addr_exceptions:return Noneif remote_addr in trusted_proxies:x_forwarded_for = request.META.get('')if x_forwarded_for:remote_addr = x_forwarded_for.split('')[-].strip()if remote_addr in remote_addr_exceptions:return Noneif self.url_exceptions:url_exceptions = compile_url_exceptions(self.url_exceptions)else:url_exceptions = compile_url_exceptions(settings.URL_EXCEPTIONS)for pattern in url_exceptions:if pattern.search(request.path):return Nonetry:resolved_path = resolve(request.path)except Resolver404:passelse:if resolved_path.func in settings.VIEW_EXCEPTIONS:return Noneif self.until_date:until_date = self.until_dateelse:until_date = settings.UNTIL_DATEif self.after_date:after_date = self.after_dateelse:after_date = settings.AFTER_DATEif until_date or after_date:locked_date = Falseif until_date and datetime.datetime.now() < until_date:locked_date = Trueif after_date and datetime.datetime.now() > after_date:locked_date = Trueif not locked_date:return Noneform_data = request.POST if request.method == '' else Noneif self.form:form_class = self.formelse:form_class = get_lockdown_form(settings.FORM)form = form_class(data=form_data, **self.form_kwargs)authorized = Falsetoken = session.get(self.session_key)if hasattr(form, ''):if form.authenticate(token):authorized = Trueelif token is True:authorized = Trueif authorized and self.logout_key and self.logout_key in request.GET:if self.session_key in session:del session[self.session_key]querystring = request.GET.copy()del querystring[self.logout_key]return self.redirect(request)if authorized:return Noneif form.is_valid():if hasattr(form, ''):token = form.generate_token()else:token = Truesession[self.session_key] = tokenreturn self.redirect(request)page_data = {'': until_date, '': after_date}if not hasattr(form, '') or form.show_form():page_data[''] = formif self.extra_context:page_data.update(self.extra_context)return render(request, '', page_data)", "docstring": "Check if each request is allowed to access the current resource.", "id": "f1231:c0:m2"} {"signature": "def lockdown(*args, **kwargs):", "body": "return decorator_from_middleware_with_args(LockdownMiddleware)(*args,**kwargs)", "docstring": "Define a decorator based on the LockdownMiddleware.\n\n This decorator takes the same arguments as the middleware, but allows a\n more granular locking than the middleware.", "id": "f1233:m0"} {"signature": "@add.command()@proto_dataset_uri_argument@click.argument(\"\")@click.argument(\"\")@click.argument(\"\")def metadata(proto_dataset_uri, relpath_in_dataset, key, value):", "body": "proto_dataset = dtoolcore.ProtoDataSet.from_uri(uri=proto_dataset_uri,config_path=CONFIG_PATH)proto_dataset.add_item_metadata(handle=relpath_in_dataset,key=key,value=value)", "docstring": "Add metadata to a file in the proto dataset.", "id": "f1243:m13"} {"signature": "@click.group()def readme():", "body": "", "docstring": "Edit / show readme content.\n\n The readme content is descriptive metadata describing the dataset.", "id": "f1243:m5"} {"signature": "@click.group()def add():", "body": "", "docstring": "Add items and item metadata to a proto dataset.", "id": "f1243:m11"} {"signature": "@click.command()@base_dataset_uri_argument@click.argument(\"\", default=\"\")def name(dataset_uri, new_name):", "body": "if new_name != \"\":_validate_name(new_name)try:dataset = dtoolcore.ProtoDataSet.from_uri(uri=dataset_uri,config_path=CONFIG_PATH)except dtoolcore.DtoolCoreTypeError:dataset = dtoolcore.DataSet.from_uri(uri=dataset_uri,config_path=CONFIG_PATH)dataset.update_name(new_name)admin_metadata = dtoolcore._admin_metadata_from_uri(uri=dataset_uri,config_path=CONFIG_PATH)click.secho(admin_metadata[\"\"])", "docstring": "Report / update the name of the dataset.\n\nIt is only possible to update the name of a proto dataset,\ni.e. a dataset that has not yet been frozen.", "id": "f1243:m4"} {"signature": "@click.command()@click.option(\"\", is_flag=True, help=\"\")@click.option(\"\", \"\", is_flag=True, help=\"\")@dataset_uri_argument@click.argument(\"\")def copy(resume, quiet, dataset_uri, dest_base_uri):", "body": "click.secho(\"\",fg=\"\",err=True)click.secho(\"\",fg=\"\",err=True)_copy(resume, quiet, dataset_uri, dest_base_uri)", "docstring": "DEPRECATED: Copy a dataset to a different location.", "id": "f1243:m16"} {"signature": "@readme.command()@proto_dataset_uri_argument@click.argument('', type=click.File(''))def write(proto_dataset_uri, input):", "body": "proto_dataset = dtoolcore.ProtoDataSet.from_uri(uri=proto_dataset_uri)_validate_and_put_readme(proto_dataset, input.read())", "docstring": "Use YAML from a file or stdin to populate the readme.\n\n To stream content from stdin use \"-\", e.g.\n\n echo \"desc: my data\" | dtool readme write -", "id": "f1243:m10"} {"signature": "@click.command()@click.option(\"\", \"\", is_flag=True, help=\"\")@click.argument(\"\")@click.argument(\"\", default=\"\")@click.option(\"\", \"\", type=click.Path(exists=True))def create(quiet, name, base_uri, symlink_path):", "body": "_validate_name(name)admin_metadata = dtoolcore.generate_admin_metadata(name)parsed_base_uri = dtoolcore.utils.generous_parse_uri(base_uri)if parsed_base_uri.scheme == \"\":if symlink_path is None:raise click.UsageError(\"\") if symlink_path:base_uri = dtoolcore.utils.sanitise_uri(\"\" + parsed_base_uri.path)parsed_base_uri = dtoolcore.utils.generous_parse_uri(base_uri)proto_dataset = dtoolcore.generate_proto_dataset(admin_metadata=admin_metadata,base_uri=dtoolcore.utils.urlunparse(parsed_base_uri),config_path=CONFIG_PATH)if symlink_path:symlink_abspath = os.path.abspath(symlink_path)proto_dataset._storage_broker.symlink_path = symlink_abspathtry:proto_dataset.create()except dtoolcore.storagebroker.StorageBrokerOSError as err:raise click.UsageError(str(err))proto_dataset.put_readme(\"\")if quiet:click.secho(proto_dataset.uri)else:click.secho(\"\", nl=False, fg=\"\")click.secho(proto_dataset.uri)click.secho(\"\")step = if parsed_base_uri.scheme != \"\":click.secho(\"\".format(step))click.secho(\"\".format(proto_dataset.uri),fg=\"\")if parsed_base_uri.scheme == \"\":data_path = proto_dataset._storage_broker._data_abspathclick.secho(\"\")click.secho(\"\".format(data_path),fg=\"\")step = step + click.secho(\"\".format(step))click.secho(\"\".format(proto_dataset.uri),fg=\"\")step = step + click.secho(\"\".format(step))click.secho(\"\".format(proto_dataset.uri), fg=\"\")", "docstring": "Create a proto dataset.", "id": "f1243:m3"} {"signature": "def valid_handle(handle):", "body": "if handle.find(\"\") != -:return Falsereturn True", "docstring": "Return false if the handle is invalid.\n\n For example if the handle contains a newline.", "id": "f1246:m0"} {"signature": "def logsigmoid(a):", "body": "return -tf.nn.softplus(-a)", "docstring": "Equivalent to tf.log(tf.sigmoid(a))", "id": "f1258:m0"} {"signature": "def computeStatsEigen(self):", "body": "with tf.device(''):def removeNone(tensor_list):local_list = []for item in tensor_list:if item is not None:local_list.append(item)return local_listdef copyStats(var_list):print(\"\")redundant_stats = {}copied_list = []for item in var_list:if item is not None:if item not in redundant_stats:if self._use_float64:redundant_stats[item] = tf.cast(tf.identity(item), tf.float64)else:redundant_stats[item] = tf.identity(item)copied_list.append(redundant_stats[item])else:copied_list.append(None)return copied_liststats_eigen = self.stats_eigencomputedEigen = {}eigen_reverse_lookup = {}updateOps = []with tf.control_dependencies([]):for stats_var in stats_eigen:if stats_var not in computedEigen:eigens = tf.self_adjoint_eig(stats_var)e = eigens[]Q = eigens[]if self._use_float64:e = tf.cast(e, tf.float32)Q = tf.cast(Q, tf.float32)updateOps.append(e)updateOps.append(Q)computedEigen[stats_var] = {'': e, '': Q}eigen_reverse_lookup[e] = stats_eigen[stats_var]['']eigen_reverse_lookup[Q] = stats_eigen[stats_var]['']self.eigen_reverse_lookup = eigen_reverse_lookupself.eigen_update_list = updateOpsif KFAC_DEBUG:self.eigen_update_list = [item for item in updateOps]with tf.control_dependencies(updateOps):updateOps.append(tf.Print(tf.constant(), [tf.convert_to_tensor('')]))return updateOps", "docstring": "compute the eigen decomp using copied var stats to avoid concurrent read/write from other queue", "id": "f1267:c0:m8"} {"signature": "def convert_episode_to_batch_major(episode):", "body": "episode_batch = {}for key in episode.keys():val = np.array(episode[key]).copy()episode_batch[key] = val.swapaxes(, )return episode_batch", "docstring": "Converts an episode to have the batch dimension in the major (first)\n dimension.", "id": "f1268:m6"} {"signature": "def store_args(method):", "body": "argspec = inspect.getfullargspec(method)defaults = {}if argspec.defaults is not None:defaults = dict(zip(argspec.args[-len(argspec.defaults):], argspec.defaults))if argspec.kwonlydefaults is not None:defaults.update(argspec.kwonlydefaults)arg_names = argspec.args[:]@functools.wraps(method)def wrapper(*positional_args, **keyword_args):self = positional_args[]args = defaults.copy()for name, value in zip(arg_names, positional_args[:]):args[name] = valueargs.update(keyword_args)self.__dict__.update(args)return method(*positional_args, **keyword_args)return wrapper", "docstring": "Stores provided method args as instance attributes.", "id": "f1268:m0"} {"signature": "def transitions_in_episode_batch(episode_batch):", "body": "shape = episode_batch[''].shapereturn shape[] * shape[]", "docstring": "Number of transitions in a given episode batch.", "id": "f1268:m7"} {"signature": "def import_function(spec):", "body": "mod_name, fn_name = spec.split('')module = importlib.import_module(mod_name)fn = getattr(module, fn_name)return fn", "docstring": "Import a function identified by a string like \"pkg.module:fn_name\".", "id": "f1268:m1"} {"signature": "def __init__(self, buffer_shapes, size_in_transitions, T, sample_transitions):", "body": "self.buffer_shapes = buffer_shapesself.size = size_in_transitions // Tself.T = Tself.sample_transitions = sample_transitionsself.buffers = {key: np.empty([self.size, *shape])for key, shape in buffer_shapes.items()}self.current_size = self.n_transitions_stored = self.lock = threading.Lock()", "docstring": "Creates a replay buffer.\n\n Args:\n buffer_shapes (dict of ints): the shape for all buffers that are used in the replay\n buffer\n size_in_transitions (int): the size of the buffer, measured in transitions\n T (int): the time horizon for episodes\n sample_transitions (function): a function that samples from the replay buffer", "id": "f1273:c0:m0"} {"signature": "def sample(self, batch_size):", "body": "buffers = {}with self.lock:assert self.current_size > for key in self.buffers.keys():buffers[key] = self.buffers[key][:self.current_size]buffers[''] = buffers[''][:, :, :]buffers[''] = buffers[''][:, :, :]transitions = self.sample_transitions(buffers, batch_size)for key in (['', '', ''] + list(self.buffers.keys())):assert key in transitions, \"\" % keyreturn transitions", "docstring": "Returns a dict {key: array(batch_size x shapes[key])}", "id": "f1273:c0:m2"} {"signature": "def __init__(self, size, eps=, default_clip_range=np.inf, sess=None):", "body": "self.size = sizeself.eps = epsself.default_clip_range = default_clip_rangeself.sess = sess if sess is not None else tf.get_default_session()self.local_sum = np.zeros(self.size, np.float32)self.local_sumsq = np.zeros(self.size, np.float32)self.local_count = np.zeros(, np.float32)self.sum_tf = tf.get_variable(initializer=tf.zeros_initializer(), shape=self.local_sum.shape, name='',trainable=False, dtype=tf.float32)self.sumsq_tf = tf.get_variable(initializer=tf.zeros_initializer(), shape=self.local_sumsq.shape, name='',trainable=False, dtype=tf.float32)self.count_tf = tf.get_variable(initializer=tf.ones_initializer(), shape=self.local_count.shape, name='',trainable=False, dtype=tf.float32)self.mean = tf.get_variable(initializer=tf.zeros_initializer(), shape=(self.size,), name='',trainable=False, dtype=tf.float32)self.std = tf.get_variable(initializer=tf.ones_initializer(), shape=(self.size,), name='',trainable=False, dtype=tf.float32)self.count_pl = tf.placeholder(name='', shape=(,), dtype=tf.float32)self.sum_pl = tf.placeholder(name='', shape=(self.size,), dtype=tf.float32)self.sumsq_pl = tf.placeholder(name='', shape=(self.size,), dtype=tf.float32)self.update_op = tf.group(self.count_tf.assign_add(self.count_pl),self.sum_tf.assign_add(self.sum_pl),self.sumsq_tf.assign_add(self.sumsq_pl))self.recompute_op = tf.group(tf.assign(self.mean, self.sum_tf / self.count_tf),tf.assign(self.std, tf.sqrt(tf.maximum(tf.square(self.eps),self.sumsq_tf / self.count_tf - tf.square(self.sum_tf / self.count_tf)))),)self.lock = threading.Lock()", "docstring": "A normalizer that ensures that observations are approximately distributed according to\n a standard Normal distribution (i.e. have mean zero and variance one).\n\n Args:\n size (int): the size of the observation to be normalized\n eps (float): a small constant that avoids underflows\n default_clip_range (float): normalized observations are clipped to be in\n [-default_clip_range, default_clip_range]\n sess (object): the TensorFlow session to be used", "id": "f1274:c0:m0"} {"signature": "@store_argsdef __init__(self, venv, policy, dims, logger, T, rollout_batch_size=,exploit=False, use_target_net=False, compute_Q=False, noise_eps=,random_eps=, history_len=, render=False, monitor=False, **kwargs):", "body": "assert self.T > self.info_keys = [key.replace('', '') for key in dims.keys() if key.startswith('')]self.success_history = deque(maxlen=history_len)self.Q_history = deque(maxlen=history_len)self.n_episodes = self.reset_all_rollouts()self.clear_history()", "docstring": "Rollout worker generates experience by interacting with one or many environments.\n\n Args:\n make_env (function): a factory function that creates a new instance of the environment\n when called\n policy (object): the policy that is used to act\n dims (dict of ints): the dimensions for observations (o), goals (g), and actions (u)\n logger (object): the logger that is used by the rollout worker\n rollout_batch_size (int): the number of parallel rollouts that should be used\n exploit (boolean): whether or not to exploit, i.e. to act optimally according to the\n current policy without any exploration\n use_target_net (boolean): whether or not to use the target net for rollouts\n compute_Q (boolean): whether or not to compute the Q values alongside the actions\n noise_eps (float): scale of the additive Gaussian noise\n random_eps (float): probability of selecting a completely random action\n history_len (int): length of history for statistics smoothing\n render (boolean): whether or not to render the rollouts", "id": "f1277:c0:m0"} {"signature": "def clear_history(self):", "body": "self.success_history.clear()self.Q_history.clear()", "docstring": "Clears all histories that are used for statistics", "id": "f1277:c0:m3"} {"signature": "def generate_rollouts(self):", "body": "self.reset_all_rollouts()o = np.empty((self.rollout_batch_size, self.dims['']), np.float32) ag = np.empty((self.rollout_batch_size, self.dims['']), np.float32) o[:] = self.initial_oag[:] = self.initial_agobs, achieved_goals, acts, goals, successes = [], [], [], [], []dones = []info_values = [np.empty((self.T - , self.rollout_batch_size, self.dims['' + key]), np.float32) for key in self.info_keys]Qs = []for t in range(self.T):policy_output = self.policy.get_actions(o, ag, self.g,compute_Q=self.compute_Q,noise_eps=self.noise_eps if not self.exploit else ,random_eps=self.random_eps if not self.exploit else ,use_target_net=self.use_target_net)if self.compute_Q:u, Q = policy_outputQs.append(Q)else:u = policy_outputif u.ndim == :u = u.reshape(, -)o_new = np.empty((self.rollout_batch_size, self.dims['']))ag_new = np.empty((self.rollout_batch_size, self.dims['']))success = np.zeros(self.rollout_batch_size)obs_dict_new, _, done, info = self.venv.step(u)o_new = obs_dict_new['']ag_new = obs_dict_new['']success = np.array([i.get('', ) for i in info])if any(done):breakfor i, info_dict in enumerate(info):for idx, key in enumerate(self.info_keys):info_values[idx][t, i] = info[i][key]if np.isnan(o_new).any():self.logger.warn('')self.reset_all_rollouts()return self.generate_rollouts()dones.append(done)obs.append(o.copy())achieved_goals.append(ag.copy())successes.append(success.copy())acts.append(u.copy())goals.append(self.g.copy())o[...] = o_newag[...] = ag_newobs.append(o.copy())achieved_goals.append(ag.copy())episode = dict(o=obs,u=acts,g=goals,ag=achieved_goals)for key, value in zip(self.info_keys, info_values):episode[''.format(key)] = valuesuccessful = np.array(successes)[-, :]assert successful.shape == (self.rollout_batch_size,)success_rate = np.mean(successful)self.success_history.append(success_rate)if self.compute_Q:self.Q_history.append(np.mean(Qs))self.n_episodes += self.rollout_batch_sizereturn convert_episode_to_batch_major(episode)", "docstring": "Performs `rollout_batch_size` rollouts in parallel for time horizon `T` with the current\n policy acting on it accordingly.", "id": "f1277:c0:m2"} {"signature": "def logs(self, prefix=''):", "body": "logs = []logs += [('', np.mean(self.success_history))]if self.compute_Q:logs += [('', np.mean(self.Q_history))]logs += [('', self.n_episodes)]if prefix != '' and not prefix.endswith(''):return [(prefix + '' + key, val) for key, val in logs]else:return logs", "docstring": "Generates a dictionary that contains all collected statistics.", "id": "f1277:c0:m7"} {"signature": "def save_policy(self, path):", "body": "with open(path, '') as f:pickle.dump(self.policy, f)", "docstring": "Pickles the current policy for later inspection.", "id": "f1277:c0:m6"} {"signature": "def __init__(self, size, alpha):", "body": "super(PrioritizedReplayBuffer, self).__init__(size)assert alpha >= self._alpha = alphait_capacity = while it_capacity < size:it_capacity *= self._it_sum = SumSegmentTree(it_capacity)self._it_min = MinSegmentTree(it_capacity)self._max_priority = ", "docstring": "Create Prioritized Replay buffer.\n\n Parameters\n ----------\n size: int\n Max number of transitions to store in the buffer. When the buffer\n overflows the old memories are dropped.\n alpha: float\n how much prioritization is used\n (0 - no prioritization, 1 - full prioritization)\n\n See Also\n --------\n ReplayBuffer.__init__", "id": "f1281:c1:m0"} {"signature": "def __init__(self, size):", "body": "self._storage = []self._maxsize = sizeself._next_idx = ", "docstring": "Create Replay buffer.\n\n Parameters\n ----------\n size: int\n Max number of transitions to store in the buffer. When the buffer\n overflows the old memories are dropped.", "id": "f1281:c0:m0"} {"signature": "def update_priorities(self, idxes, priorities):", "body": "assert len(idxes) == len(priorities)for idx, priority in zip(idxes, priorities):assert priority > assert <= idx < len(self._storage)self._it_sum[idx] = priority ** self._alphaself._it_min[idx] = priority ** self._alphaself._max_priority = max(self._max_priority, priority)", "docstring": "Update priorities of sampled transitions.\n\n sets priority of transition at index idxes[i] in buffer\n to priorities[i].\n\n Parameters\n ----------\n idxes: [int]\n List of idxes of sampled transitions\n priorities: [float]\n List of updated priorities corresponding to\n transitions at the sampled idxes denoted by\n variable `idxes`.", "id": "f1281:c1:m4"} {"signature": "def make_feed_dict(self, data):", "body": "raise NotImplementedError", "docstring": "Given data input it to the placeholder(s).", "id": "f1284:c0:m2"} {"signature": "def __init__(self, placeholder):", "body": "super().__init__(placeholder.name)self._placeholder = placeholder", "docstring": "Wrapper for regular tensorflow placeholder.", "id": "f1284:c1:m0"} {"signature": "def build_train(make_obs_ph, q_func, num_actions, optimizer, grad_norm_clipping=None, gamma=,double_q=True, scope=\"\", reuse=None, param_noise=False, param_noise_filter_func=None):", "body": "if param_noise:act_f = build_act_with_param_noise(make_obs_ph, q_func, num_actions, scope=scope, reuse=reuse,param_noise_filter_func=param_noise_filter_func)else:act_f = build_act(make_obs_ph, q_func, num_actions, scope=scope, reuse=reuse)with tf.variable_scope(scope, reuse=reuse):obs_t_input = make_obs_ph(\"\")act_t_ph = tf.placeholder(tf.int32, [None], name=\"\")rew_t_ph = tf.placeholder(tf.float32, [None], name=\"\")obs_tp1_input = make_obs_ph(\"\")done_mask_ph = tf.placeholder(tf.float32, [None], name=\"\")importance_weights_ph = tf.placeholder(tf.float32, [None], name=\"\")q_t = q_func(obs_t_input.get(), num_actions, scope=\"\", reuse=True) q_func_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=tf.get_variable_scope().name + \"\")q_tp1 = q_func(obs_tp1_input.get(), num_actions, scope=\"\")target_q_func_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=tf.get_variable_scope().name + \"\")q_t_selected = tf.reduce_sum(q_t * tf.one_hot(act_t_ph, num_actions), )if double_q:q_tp1_using_online_net = q_func(obs_tp1_input.get(), num_actions, scope=\"\", reuse=True)q_tp1_best_using_online_net = tf.argmax(q_tp1_using_online_net, )q_tp1_best = tf.reduce_sum(q_tp1 * tf.one_hot(q_tp1_best_using_online_net, num_actions), )else:q_tp1_best = tf.reduce_max(q_tp1, )q_tp1_best_masked = ( - done_mask_ph) * q_tp1_bestq_t_selected_target = rew_t_ph + gamma * q_tp1_best_maskedtd_error = q_t_selected - tf.stop_gradient(q_t_selected_target)errors = U.huber_loss(td_error)weighted_error = tf.reduce_mean(importance_weights_ph * errors)if grad_norm_clipping is not None:gradients = optimizer.compute_gradients(weighted_error, var_list=q_func_vars)for i, (grad, var) in enumerate(gradients):if grad is not None:gradients[i] = (tf.clip_by_norm(grad, grad_norm_clipping), var)optimize_expr = optimizer.apply_gradients(gradients)else:optimize_expr = optimizer.minimize(weighted_error, var_list=q_func_vars)update_target_expr = []for var, var_target in zip(sorted(q_func_vars, key=lambda v: v.name),sorted(target_q_func_vars, key=lambda v: v.name)):update_target_expr.append(var_target.assign(var))update_target_expr = tf.group(*update_target_expr)train = U.function(inputs=[obs_t_input,act_t_ph,rew_t_ph,obs_tp1_input,done_mask_ph,importance_weights_ph],outputs=td_error,updates=[optimize_expr])update_target = U.function([], [], updates=[update_target_expr])q_values = U.function([obs_t_input], q_t)return act_f, train, update_target, {'': q_values}", "docstring": "Creates the train function:\n\n Parameters\n ----------\n make_obs_ph: str -> tf.placeholder or TfInput\n a function that takes a name and creates a placeholder of input with that name\n q_func: (tf.Variable, int, str, bool) -> tf.Variable\n the model that takes the following inputs:\n observation_in: object\n the output of observation placeholder\n num_actions: int\n number of actions\n scope: str\n reuse: bool\n should be passed to outer variable scope\n and returns a tensor of shape (batch_size, num_actions) with values of every action.\n num_actions: int\n number of actions\n reuse: bool\n whether or not to reuse the graph variables\n optimizer: tf.train.Optimizer\n optimizer to use for the Q-learning objective.\n grad_norm_clipping: float or None\n clip gradient norms to this value. If None no clipping is performed.\n gamma: float\n discount rate.\n double_q: bool\n if true will use Double Q Learning (https://arxiv.org/abs/1509.06461).\n In general it is a good idea to keep it enabled.\n scope: str or VariableScope\n optional scope for variable_scope.\n reuse: bool or None\n whether or not the variables should be reused. To be able to reuse the scope must be given.\n param_noise: bool\n whether or not to use parameter space noise (https://arxiv.org/abs/1706.01905)\n param_noise_filter_func: tf.Variable -> bool\n function that decides whether or not a variable should be perturbed. Only applicable\n if param_noise is True. If set to None, default_param_noise_filter is used by default.\n\n Returns\n -------\n act: (tf.Variable, bool, float) -> tf.Variable\n function to select and action given observation.\n` See the top of the file for details.\n train: (object, np.array, np.array, object, np.array, np.array) -> np.array\n optimize the error in Bellman's equation.\n` See the top of the file for details.\n update_target: () -> ()\n copy the parameters from optimized Q function to the target Q function.\n` See the top of the file for details.\n debug: {str: function}\n a bunch of functions to print debug data like q_values.", "id": "f1285:m6"} {"signature": "def scope_name():", "body": "return tf.get_variable_scope().name", "docstring": "Returns the name of current scope as a string, e.g. deepq/q_func", "id": "f1285:m1"} {"signature": "def build_act_with_param_noise(make_obs_ph, q_func, num_actions, scope=\"\", reuse=None, param_noise_filter_func=None):", "body": "if param_noise_filter_func is None:param_noise_filter_func = default_param_noise_filterwith tf.variable_scope(scope, reuse=reuse):observations_ph = make_obs_ph(\"\")stochastic_ph = tf.placeholder(tf.bool, (), name=\"\")update_eps_ph = tf.placeholder(tf.float32, (), name=\"\")update_param_noise_threshold_ph = tf.placeholder(tf.float32, (), name=\"\")update_param_noise_scale_ph = tf.placeholder(tf.bool, (), name=\"\")reset_ph = tf.placeholder(tf.bool, (), name=\"\")eps = tf.get_variable(\"\", (), initializer=tf.constant_initializer())param_noise_scale = tf.get_variable(\"\", (), initializer=tf.constant_initializer(), trainable=False)param_noise_threshold = tf.get_variable(\"\", (), initializer=tf.constant_initializer(), trainable=False)q_values = q_func(observations_ph.get(), num_actions, scope=\"\")q_values_perturbed = q_func(observations_ph.get(), num_actions, scope=\"\")def perturb_vars(original_scope, perturbed_scope):all_vars = scope_vars(absolute_scope_name(original_scope))all_perturbed_vars = scope_vars(absolute_scope_name(perturbed_scope))assert len(all_vars) == len(all_perturbed_vars)perturb_ops = []for var, perturbed_var in zip(all_vars, all_perturbed_vars):if param_noise_filter_func(perturbed_var):op = tf.assign(perturbed_var, var + tf.random_normal(shape=tf.shape(var), mean=, stddev=param_noise_scale))else:op = tf.assign(perturbed_var, var)perturb_ops.append(op)assert len(perturb_ops) == len(all_vars)return tf.group(*perturb_ops)q_values_adaptive = q_func(observations_ph.get(), num_actions, scope=\"\")perturb_for_adaption = perturb_vars(original_scope=\"\", perturbed_scope=\"\")kl = tf.reduce_sum(tf.nn.softmax(q_values) * (tf.log(tf.nn.softmax(q_values)) - tf.log(tf.nn.softmax(q_values_adaptive))), axis=-)mean_kl = tf.reduce_mean(kl)def update_scale():with tf.control_dependencies([perturb_for_adaption]):update_scale_expr = tf.cond(mean_kl < param_noise_threshold,lambda: param_noise_scale.assign(param_noise_scale * ),lambda: param_noise_scale.assign(param_noise_scale / ),)return update_scale_exprupdate_param_noise_threshold_expr = param_noise_threshold.assign(tf.cond(update_param_noise_threshold_ph >= ,lambda: update_param_noise_threshold_ph, lambda: param_noise_threshold))deterministic_actions = tf.argmax(q_values_perturbed, axis=)batch_size = tf.shape(observations_ph.get())[]random_actions = tf.random_uniform(tf.stack([batch_size]), minval=, maxval=num_actions, dtype=tf.int64)chose_random = tf.random_uniform(tf.stack([batch_size]), minval=, maxval=, dtype=tf.float32) < epsstochastic_actions = tf.where(chose_random, random_actions, deterministic_actions)output_actions = tf.cond(stochastic_ph, lambda: stochastic_actions, lambda: deterministic_actions)update_eps_expr = eps.assign(tf.cond(update_eps_ph >= , lambda: update_eps_ph, lambda: eps))updates = [update_eps_expr,tf.cond(reset_ph, lambda: perturb_vars(original_scope=\"\", perturbed_scope=\"\"), lambda: tf.group(*[])),tf.cond(update_param_noise_scale_ph, lambda: update_scale(), lambda: tf.Variable(, trainable=False)),update_param_noise_threshold_expr,]_act = U.function(inputs=[observations_ph, stochastic_ph, update_eps_ph, reset_ph, update_param_noise_threshold_ph, update_param_noise_scale_ph],outputs=output_actions,givens={update_eps_ph: -, stochastic_ph: True, reset_ph: False, update_param_noise_threshold_ph: False, update_param_noise_scale_ph: False},updates=updates)def act(ob, reset=False, update_param_noise_threshold=False, update_param_noise_scale=False, stochastic=True, update_eps=-):return _act(ob, stochastic, update_eps, reset, update_param_noise_threshold, update_param_noise_scale)return act", "docstring": "Creates the act function with support for parameter space noise exploration (https://arxiv.org/abs/1706.01905):\n\n Parameters\n ----------\n make_obs_ph: str -> tf.placeholder or TfInput\n a function that take a name and creates a placeholder of input with that name\n q_func: (tf.Variable, int, str, bool) -> tf.Variable\n the model that takes the following inputs:\n observation_in: object\n the output of observation placeholder\n num_actions: int\n number of actions\n scope: str\n reuse: bool\n should be passed to outer variable scope\n and returns a tensor of shape (batch_size, num_actions) with values of every action.\n num_actions: int\n number of actions.\n scope: str or VariableScope\n optional scope for variable_scope.\n reuse: bool or None\n whether or not the variables should be reused. To be able to reuse the scope must be given.\n param_noise_filter_func: tf.Variable -> bool\n function that decides whether or not a variable should be perturbed. Only applicable\n if param_noise is True. If set to None, default_param_noise_filter is used by default.\n\n Returns\n -------\n act: (tf.Variable, bool, float, bool, float, bool) -> tf.Variable\n function to select and action given observation.\n` See the top of the file for details.", "id": "f1285:m5"} {"signature": "def build_act(make_obs_ph, q_func, num_actions, scope=\"\", reuse=None):", "body": "with tf.variable_scope(scope, reuse=reuse):observations_ph = make_obs_ph(\"\")stochastic_ph = tf.placeholder(tf.bool, (), name=\"\")update_eps_ph = tf.placeholder(tf.float32, (), name=\"\")eps = tf.get_variable(\"\", (), initializer=tf.constant_initializer())q_values = q_func(observations_ph.get(), num_actions, scope=\"\")deterministic_actions = tf.argmax(q_values, axis=)batch_size = tf.shape(observations_ph.get())[]random_actions = tf.random_uniform(tf.stack([batch_size]), minval=, maxval=num_actions, dtype=tf.int64)chose_random = tf.random_uniform(tf.stack([batch_size]), minval=, maxval=, dtype=tf.float32) < epsstochastic_actions = tf.where(chose_random, random_actions, deterministic_actions)output_actions = tf.cond(stochastic_ph, lambda: stochastic_actions, lambda: deterministic_actions)update_eps_expr = eps.assign(tf.cond(update_eps_ph >= , lambda: update_eps_ph, lambda: eps))_act = U.function(inputs=[observations_ph, stochastic_ph, update_eps_ph],outputs=output_actions,givens={update_eps_ph: -, stochastic_ph: True},updates=[update_eps_expr])def act(ob, stochastic=True, update_eps=-):return _act(ob, stochastic, update_eps)return act", "docstring": "Creates the act function:\n\n Parameters\n ----------\n make_obs_ph: str -> tf.placeholder or TfInput\n a function that take a name and creates a placeholder of input with that name\n q_func: (tf.Variable, int, str, bool) -> tf.Variable\n the model that takes the following inputs:\n observation_in: object\n the output of observation placeholder\n num_actions: int\n number of actions\n scope: str\n reuse: bool\n should be passed to outer variable scope\n and returns a tensor of shape (batch_size, num_actions) with values of every action.\n num_actions: int\n number of actions.\n scope: str or VariableScope\n optional scope for variable_scope.\n reuse: bool or None\n whether or not the variables should be reused. To be able to reuse the scope must be given.\n\n Returns\n -------\n act: (tf.Variable, bool, float) -> tf.Variable\n function to select and action given observation.\n` See the top of the file for details.", "id": "f1285:m4"} {"signature": "def mlp(hiddens=[], layer_norm=False):", "body": "return lambda *args, **kwargs: _mlp(hiddens, layer_norm=layer_norm, *args, **kwargs)", "docstring": "This model takes as input an observation and returns values of all actions.\n\n Parameters\n ----------\n hiddens: [int]\n list of sizes of hidden layers\n layer_norm: bool\n if true applies layer normalization for every layer\n as described in https://arxiv.org/abs/1607.06450\n\n Returns\n -------\n q_func: function\n q_function for DQN algorithm.", "id": "f1286:m1"} {"signature": "def get_task(benchmark, env_id):", "body": "return next(filter(lambda task: task[''] == env_id, benchmark['']), None)", "docstring": "Get a task by env_id. Return None if the benchmark doesn't have the env", "id": "f1300:m3"} {"signature": "def learn(*,network,env,total_timesteps,timesteps_per_batch=, max_kl=,cg_iters=,gamma=,lam=, seed=None,ent_coef=,cg_damping=,vf_stepsize=,vf_iters =,max_episodes=, max_iters=, callback=None,load_path=None,**network_kwargs):", "body": "if MPI is not None:nworkers = MPI.COMM_WORLD.Get_size()rank = MPI.COMM_WORLD.Get_rank()else:nworkers = rank = cpus_per_worker = U.get_session(config=tf.ConfigProto(allow_soft_placement=True,inter_op_parallelism_threads=cpus_per_worker,intra_op_parallelism_threads=cpus_per_worker))policy = build_policy(env, network, value_network='', **network_kwargs)set_global_seeds(seed)np.set_printoptions(precision=)ob_space = env.observation_spaceac_space = env.action_spaceob = observation_placeholder(ob_space)with tf.variable_scope(\"\"):pi = policy(observ_placeholder=ob)with tf.variable_scope(\"\"):oldpi = policy(observ_placeholder=ob)atarg = tf.placeholder(dtype=tf.float32, shape=[None]) ret = tf.placeholder(dtype=tf.float32, shape=[None]) ac = pi.pdtype.sample_placeholder([None])kloldnew = oldpi.pd.kl(pi.pd)ent = pi.pd.entropy()meankl = tf.reduce_mean(kloldnew)meanent = tf.reduce_mean(ent)entbonus = ent_coef * meanentvferr = tf.reduce_mean(tf.square(pi.vf - ret))ratio = tf.exp(pi.pd.logp(ac) - oldpi.pd.logp(ac)) surrgain = tf.reduce_mean(ratio * atarg)optimgain = surrgain + entbonuslosses = [optimgain, meankl, entbonus, surrgain, meanent]loss_names = [\"\", \"\", \"\", \"\", \"\"]dist = meanklall_var_list = get_trainable_variables(\"\")var_list = get_pi_trainable_variables(\"\")vf_var_list = get_vf_trainable_variables(\"\")vfadam = MpiAdam(vf_var_list)get_flat = U.GetFlat(var_list)set_from_flat = U.SetFromFlat(var_list)klgrads = tf.gradients(dist, var_list)flat_tangent = tf.placeholder(dtype=tf.float32, shape=[None], name=\"\")shapes = [var.get_shape().as_list() for var in var_list]start = tangents = []for shape in shapes:sz = U.intprod(shape)tangents.append(tf.reshape(flat_tangent[start:start+sz], shape))start += szgvp = tf.add_n([tf.reduce_sum(g*tangent) for (g, tangent) in zipsame(klgrads, tangents)]) fvp = U.flatgrad(gvp, var_list)assign_old_eq_new = U.function([],[], updates=[tf.assign(oldv, newv)for (oldv, newv) in zipsame(get_variables(\"\"), get_variables(\"\"))])compute_losses = U.function([ob, ac, atarg], losses)compute_lossandgrad = U.function([ob, ac, atarg], losses + [U.flatgrad(optimgain, var_list)])compute_fvp = U.function([flat_tangent, ob, ac, atarg], fvp)compute_vflossandgrad = U.function([ob, ret], U.flatgrad(vferr, vf_var_list))@contextmanagerdef timed(msg):if rank == :print(colorize(msg, color=''))tstart = time.time()yieldprint(colorize(\"\"%(time.time() - tstart), color=''))else:yielddef allmean(x):assert isinstance(x, np.ndarray)if MPI is not None:out = np.empty_like(x)MPI.COMM_WORLD.Allreduce(x, out, op=MPI.SUM)out /= nworkerselse:out = np.copy(x)return outU.initialize()if load_path is not None:pi.load(load_path)th_init = get_flat()if MPI is not None:MPI.COMM_WORLD.Bcast(th_init, root=)set_from_flat(th_init)vfadam.sync()print(\"\", th_init.sum(), flush=True)seg_gen = traj_segment_generator(pi, env, timesteps_per_batch, stochastic=True)episodes_so_far = timesteps_so_far = iters_so_far = tstart = time.time()lenbuffer = deque(maxlen=) rewbuffer = deque(maxlen=) if sum([max_iters>, total_timesteps>, max_episodes>])==:return piassert sum([max_iters>, total_timesteps>, max_episodes>]) < ,''while True:if callback: callback(locals(), globals())if total_timesteps and timesteps_so_far >= total_timesteps:breakelif max_episodes and episodes_so_far >= max_episodes:breakelif max_iters and iters_so_far >= max_iters:breaklogger.log(\"\"%iters_so_far)with timed(\"\"):seg = seg_gen.__next__()add_vtarg_and_adv(seg, gamma, lam)ob, ac, atarg, tdlamret = seg[\"\"], seg[\"\"], seg[\"\"], seg[\"\"]vpredbefore = seg[\"\"] atarg = (atarg - atarg.mean()) / atarg.std() if hasattr(pi, \"\"): pi.ret_rms.update(tdlamret)if hasattr(pi, \"\"): pi.ob_rms.update(ob) args = seg[\"\"], seg[\"\"], atargfvpargs = [arr[::] for arr in args]def fisher_vector_product(p):return allmean(compute_fvp(p, *fvpargs)) + cg_damping * passign_old_eq_new() with timed(\"\"):*lossbefore, g = compute_lossandgrad(*args)lossbefore = allmean(np.array(lossbefore))g = allmean(g)if np.allclose(g, ):logger.log(\"\")else:with timed(\"\"):stepdir = cg(fisher_vector_product, g, cg_iters=cg_iters, verbose=rank==)assert np.isfinite(stepdir).all()shs = *stepdir.dot(fisher_vector_product(stepdir))lm = np.sqrt(shs / max_kl)fullstep = stepdir / lmexpectedimprove = g.dot(fullstep)surrbefore = lossbefore[]stepsize = thbefore = get_flat()for _ in range():thnew = thbefore + fullstep * stepsizeset_from_flat(thnew)meanlosses = surr, kl, *_ = allmean(np.array(compute_losses(*args)))improve = surr - surrbeforelogger.log(\"\"%(expectedimprove, improve))if not np.isfinite(meanlosses).all():logger.log(\"\")elif kl > max_kl * :logger.log(\"\")elif improve < :logger.log(\"\")else:logger.log(\"\")breakstepsize *= else:logger.log(\"\")set_from_flat(thbefore)if nworkers > and iters_so_far % == :paramsums = MPI.COMM_WORLD.allgather((thnew.sum(), vfadam.getflat().sum())) assert all(np.allclose(ps, paramsums[]) for ps in paramsums[:])for (lossname, lossval) in zip(loss_names, meanlosses):logger.record_tabular(lossname, lossval)with timed(\"\"):for _ in range(vf_iters):for (mbob, mbret) in dataset.iterbatches((seg[\"\"], seg[\"\"]),include_final_partial_batch=False, batch_size=):g = allmean(compute_vflossandgrad(mbob, mbret))vfadam.update(g, vf_stepsize)logger.record_tabular(\"\", explained_variance(vpredbefore, tdlamret))lrlocal = (seg[\"\"], seg[\"\"]) if MPI is not None:listoflrpairs = MPI.COMM_WORLD.allgather(lrlocal) else:listoflrpairs = [lrlocal]lens, rews = map(flatten_lists, zip(*listoflrpairs))lenbuffer.extend(lens)rewbuffer.extend(rews)logger.record_tabular(\"\", np.mean(lenbuffer))logger.record_tabular(\"\", np.mean(rewbuffer))logger.record_tabular(\"\", len(lens))episodes_so_far += len(lens)timesteps_so_far += sum(lens)iters_so_far += logger.record_tabular(\"\", episodes_so_far)logger.record_tabular(\"\", timesteps_so_far)logger.record_tabular(\"\", time.time() - tstart)if rank==:logger.dump_tabular()return pi", "docstring": "learn a policy function with TRPO algorithm\n\nParameters:\n----------\n\nnetwork neural network to learn. Can be either string ('mlp', 'cnn', 'lstm', 'lnlstm' for basic types)\n or function that takes input placeholder and returns tuple (output, None) for feedforward nets\n or (output, (state_placeholder, state_output, mask_placeholder)) for recurrent nets\n\nenv environment (one of the gym environments or wrapped via baselines.common.vec_env.VecEnv-type class\n\ntimesteps_per_batch timesteps per gradient estimation batch\n\nmax_kl max KL divergence between old policy and new policy ( KL(pi_old || pi) )\n\nent_coef coefficient of policy entropy term in the optimization objective\n\ncg_iters number of iterations of conjugate gradient algorithm\n\ncg_damping conjugate gradient damping\n\nvf_stepsize learning rate for adam optimizer used to optimie value function loss\n\nvf_iters number of iterations of value function optimization iterations per each policy optimization step\n\ntotal_timesteps max number of timesteps\n\nmax_episodes max number of episodes\n\nmax_iters maximum number of policy optimization iterations\n\ncallback function to be called with (locals(), globals()) each policy optimization step\n\nload_path str, path to load the model from (default: None, i.e. no model is loaded)\n\n**network_kwargs keyword arguments to the policy / network builder. See baselines.common/policies.py/build_policy and arguments to a particular type of network\n\nReturns:\n-------\n\nlearnt model", "id": "f1305:m2"} {"signature": "def learn(network,env,seed=None,nsteps=,total_timesteps=int(),vf_coef=,ent_coef=,max_grad_norm=,lr=,lrschedule='',epsilon=,alpha=,gamma=,log_interval=,load_path=None,**network_kwargs):", "body": "set_global_seeds(seed)nenvs = env.num_envspolicy = build_policy(env, network, **network_kwargs)model = Model(policy=policy, env=env, nsteps=nsteps, ent_coef=ent_coef, vf_coef=vf_coef,max_grad_norm=max_grad_norm, lr=lr, alpha=alpha, epsilon=epsilon, total_timesteps=total_timesteps, lrschedule=lrschedule)if load_path is not None:model.load(load_path)runner = Runner(env, model, nsteps=nsteps, gamma=gamma)epinfobuf = deque(maxlen=)nbatch = nenvs*nstepststart = time.time()for update in range(, total_timesteps//nbatch+):obs, states, rewards, masks, actions, values, epinfos = runner.run()epinfobuf.extend(epinfos)policy_loss, value_loss, policy_entropy = model.train(obs, states, rewards, masks, actions, values)nseconds = time.time()-tstartfps = int((update*nbatch)/nseconds)if update % log_interval == or update == :ev = explained_variance(values, rewards)logger.record_tabular(\"\", update)logger.record_tabular(\"\", update*nbatch)logger.record_tabular(\"\", fps)logger.record_tabular(\"\", float(policy_entropy))logger.record_tabular(\"\", float(value_loss))logger.record_tabular(\"\", float(ev))logger.record_tabular(\"\", safemean([epinfo[''] for epinfo in epinfobuf]))logger.record_tabular(\"\", safemean([epinfo[''] for epinfo in epinfobuf]))logger.dump_tabular()return model", "docstring": "Main entrypoint for A2C algorithm. Train a policy with given network architecture on a given environment using a2c algorithm.\n\nParameters:\n-----------\n\nnetwork: policy network architecture. Either string (mlp, lstm, lnlstm, cnn_lstm, cnn, cnn_small, conv_only - see baselines.common/models.py for full list)\n specifying the standard network architecture, or a function that takes tensorflow tensor as input and returns\n tuple (output_tensor, extra_feed) where output tensor is the last network layer output, extra_feed is None for feed-forward\n neural nets, and extra_feed is a dictionary describing how to feed state into the network for recurrent neural nets.\n See baselines.common/policies.py/lstm for more details on using recurrent nets in policies\n\n\nenv: RL environment. Should implement interface similar to VecEnv (baselines.common/vec_env) or be wrapped with DummyVecEnv (baselines.common/vec_env/dummy_vec_env.py)\n\n\nseed: seed to make random number sequence in the alorightm reproducible. By default is None which means seed from system noise generator (not reproducible)\n\nnsteps: int, number of steps of the vectorized environment per update (i.e. batch size is nsteps * nenv where\n nenv is number of environment copies simulated in parallel)\n\ntotal_timesteps: int, total number of timesteps to train on (default: 80M)\n\nvf_coef: float, coefficient in front of value function loss in the total loss function (default: 0.5)\n\nent_coef: float, coeffictiant in front of the policy entropy in the total loss function (default: 0.01)\n\nmax_gradient_norm: float, gradient is clipped to have global L2 norm no more than this value (default: 0.5)\n\nlr: float, learning rate for RMSProp (current implementation has RMSProp hardcoded in) (default: 7e-4)\n\nlrschedule: schedule of learning rate. Can be 'linear', 'constant', or a function [0..1] -> [0..1] that takes fraction of the training progress as input and\n returns fraction of the learning rate (specified as lr) as output\n\nepsilon: float, RMSProp epsilon (stabilizes square root computation in denominator of RMSProp update) (default: 1e-5)\n\nalpha: float, RMSProp decay parameter (default: 0.99)\n\ngamma: float, reward discounting parameter (default: 0.99)\n\nlog_interval: int, specifies how frequently the logs are printed out (default: 100)\n\n**network_kwargs: keyword arguments to the policy / network builder. See baselines.common/policies.py/build_policy and arguments to a particular type of network\n For instance, 'mlp' network architecture has arguments num_hidden and num_layers.", "id": "f1319:m0"} {"signature": "def __init__(self, env):", "body": "gym.Wrapper.__init__(self, env)self.lives = self.was_real_done = True", "docstring": "Make end-of-life == end-of-episode, but only reset on true game over.\n Done by DeepMind for the DQN and co. since it helps value estimation.", "id": "f1340:c2:m0"} {"signature": "def reset(self, **kwargs):", "body": "self.env.reset(**kwargs)if self.override_num_noops is not None:noops = self.override_num_noopselse:noops = self.unwrapped.np_random.randint(, self.noop_max + ) assert noops > obs = Nonefor _ in range(noops):obs, _, done, _ = self.env.step(self.noop_action)if done:obs = self.env.reset(**kwargs)return obs", "docstring": "Do no-op action for a number of steps in [1, noop_max].", "id": "f1340:c0:m1"} {"signature": "def __init__(self, env, k):", "body": "gym.Wrapper.__init__(self, env)self.k = kself.frames = deque([], maxlen=k)shp = env.observation_space.shapeself.observation_space = spaces.Box(low=, high=, shape=(shp[:-] + (shp[-] * k,)), dtype=env.observation_space.dtype)", "docstring": "Stack k last frames.\n\n Returns lazy array, which is much more memory efficient.\n\n See Also\n --------\n baselines.common.atari_wrappers.LazyFrames", "id": "f1340:c6:m0"} {"signature": "def __init__(self, env):", "body": "gym.Wrapper.__init__(self, env)assert env.unwrapped.get_action_meanings()[] == ''assert len(env.unwrapped.get_action_meanings()) >= ", "docstring": "Take action on reset for environments that are fixed until firing.", "id": "f1340:c1:m0"} {"signature": "def explained_variance(ypred,y):", "body": "assert y.ndim == and ypred.ndim == vary = np.var(y)return np.nan if vary== else - np.var(y-ypred)/vary", "docstring": "Computes fraction of variance that ypred explains about y.\nReturns 1 - Var[y-ypred] / Var[y]\n\ninterpretation:\n ev=0 => might as well have predicted zero\n ev=1 => perfect prediction\n ev<0 => worse than just predicting zero", "id": "f1341:m1"} {"signature": "def make_robotics_env(env_id, seed, rank=):", "body": "set_global_seeds(seed)env = gym.make(env_id)env = FlattenDictWrapper(env, ['', ''])env = Monitor(env, logger.get_dir() and os.path.join(logger.get_dir(), str(rank)),info_keywords=('',))env.seed(seed)return env", "docstring": "Create a wrapped, monitored gym.Env for MuJoCo.", "id": "f1342:m3"} {"signature": "def robotics_arg_parser():", "body": "parser = arg_parser()parser.add_argument('', help='', type=str, default='')parser.add_argument('', help='', type=int, default=None)parser.add_argument('', type=int, default=int())return parser", "docstring": "Create an argparse.ArgumentParser for run_mujoco.py.", "id": "f1342:m8"} {"signature": "def parse_unknown_args(args):", "body": "retval = {}preceded_by_key = Falsefor arg in args:if arg.startswith(''):if '' in arg:key = arg.split('')[][:]value = arg.split('')[]retval[key] = valueelse:key = arg[:]preceded_by_key = Trueelif preceded_by_key:retval[key] = argpreceded_by_key = Falsereturn retval", "docstring": "Parse arguments not consumed by arg parser into a dicitonary", "id": "f1342:m9"} {"signature": "def make_vec_env(env_id, env_type, num_env, seed,wrapper_kwargs=None,start_index=,reward_scale=,flatten_dict_observations=True,gamestate=None):", "body": "wrapper_kwargs = wrapper_kwargs or {}mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else seed = seed + * mpi_rank if seed is not None else Nonelogger_dir = logger.get_dir()def make_thunk(rank):return lambda: make_env(env_id=env_id,env_type=env_type,mpi_rank=mpi_rank,subrank=rank,seed=seed,reward_scale=reward_scale,gamestate=gamestate,flatten_dict_observations=flatten_dict_observations,wrapper_kwargs=wrapper_kwargs,logger_dir=logger_dir)set_global_seeds(seed)if num_env > :return SubprocVecEnv([make_thunk(i + start_index) for i in range(num_env)])else:return DummyVecEnv([make_thunk(start_index)])", "docstring": "Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo.", "id": "f1342:m0"} {"signature": "def atari_arg_parser():", "body": "print('')return common_arg_parser()", "docstring": "Create an argparse.ArgumentParser for run_atari.py.", "id": "f1342:m5"} {"signature": "def common_arg_parser():", "body": "parser = arg_parser()parser.add_argument('', help='', type=str, default='')parser.add_argument('', help='', type=str)parser.add_argument('', help='', type=int, default=None)parser.add_argument('', help='', type=str, default='')parser.add_argument('', type=float, default=),parser.add_argument('', help='', default=None)parser.add_argument('', help='', default=None)parser.add_argument('', help='', default=None, type=int)parser.add_argument('', help='', default=, type=float)parser.add_argument('', help='', default=None, type=str)parser.add_argument('', help='', default=, type=int)parser.add_argument('', help='', default=, type=int)parser.add_argument('', default=False, action='')return parser", "docstring": "Create an argparse.ArgumentParser for run_mujoco.py.", "id": "f1342:m7"} {"signature": "def make_mujoco_env(env_id, seed, reward_scale=):", "body": "rank = MPI.COMM_WORLD.Get_rank()myseed = seed + * rank if seed is not None else Noneset_global_seeds(myseed)env = gym.make(env_id)logger_path = None if logger.get_dir() is None else os.path.join(logger.get_dir(), str(rank))env = Monitor(env, logger_path, allow_early_resets=True)env.seed(seed)if reward_scale != :from baselines.common.retro_wrappers import RewardScalerenv = RewardScaler(env, reward_scale)return env", "docstring": "Create a wrapped, monitored gym.Env for MuJoCo.", "id": "f1342:m2"} {"signature": "def arg_parser():", "body": "import argparsereturn argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)", "docstring": "Create an empty argparse.ArgumentParser.", "id": "f1342:m4"} {"signature": "def obs_to_dict(obs):", "body": "if isinstance(obs, dict):return obsreturn {None: obs}", "docstring": "Convert an observation into a dict.", "id": "f1343:m3"} {"signature": "def copy_obs_dict(obs):", "body": "return {k: np.copy(v) for k, v in obs.items()}", "docstring": "Deep-copy an observation dict.", "id": "f1343:m0"} {"signature": "def __init__(self, env_fns, spaces=None, context=''):", "body": "self.waiting = Falseself.closed = Falsenenvs = len(env_fns)ctx = mp.get_context(context)self.remotes, self.work_remotes = zip(*[ctx.Pipe() for _ in range(nenvs)])self.ps = [ctx.Process(target=worker, args=(work_remote, remote, CloudpickleWrapper(env_fn)))for (work_remote, remote, env_fn) in zip(self.work_remotes, self.remotes, env_fns)]for p in self.ps:p.daemon = True with clear_mpi_env_vars():p.start()for remote in self.work_remotes:remote.close()self.remotes[].send(('', None))observation_space, action_space, self.spec = self.remotes[].recv()self.viewer = NoneVecEnv.__init__(self, len(env_fns), observation_space, action_space)", "docstring": "Arguments:\n\nenv_fns: iterable of callables - functions that create environments to run in subprocesses. Need to be cloud-pickleable", "id": "f1344:c0:m0"} {"signature": "def close_extras(self):", "body": "pass", "docstring": "Clean up the extra resources, beyond what's in this base class.\nOnly runs when not self.closed.", "id": "f1352:c2:m4"} {"signature": "def get_images(self):", "body": "raise NotImplementedError", "docstring": "Return RGB images from each environment", "id": "f1352:c2:m8"} {"signature": "def step(self, actions):", "body": "self.step_async(actions)return self.step_wait()", "docstring": "Step the environments synchronously.\n\nThis is available for backwards compatibility.", "id": "f1352:c2:m6"} {"signature": "def encode_observation(ob_space, placeholder):", "body": "if isinstance(ob_space, Discrete):return tf.to_float(tf.one_hot(placeholder, ob_space.n))elif isinstance(ob_space, Box):return tf.to_float(placeholder)elif isinstance(ob_space, MultiDiscrete):placeholder = tf.cast(placeholder, tf.int32)one_hots = [tf.to_float(tf.one_hot(placeholder[..., i], ob_space.nvec[i])) for i in range(placeholder.shape[-])]return tf.concat(one_hots, axis=-)else:raise NotImplementedError", "docstring": "Encode input in the way that is appropriate to the observation space\n\nParameters:\n----------\n\nob_space: gym.Space observation space\n\nplaceholder: tf.placeholder observation input placeholder", "id": "f1356:m2"} {"signature": "def observation_input(ob_space, batch_size=None, name=''):", "body": "placeholder = observation_placeholder(ob_space, batch_size, name)return placeholder, encode_observation(ob_space, placeholder)", "docstring": "Create placeholder to feed observations into of the size appropriate to the observation space, and add input\nencoder of the appropriate type.", "id": "f1356:m1"} {"signature": "def min(self, start=, end=None):", "body": "return super(MinSegmentTree, self).reduce(start, end)", "docstring": "Returns min(arr[start], ..., arr[end])", "id": "f1357:c2:m1"} {"signature": "def adjust_shape(placeholder, data):", "body": "if not isinstance(data, np.ndarray) and not isinstance(data, list):return dataif isinstance(data, list):data = np.array(data)placeholder_shape = [x or - for x in placeholder.shape.as_list()]assert _check_shape(placeholder_shape, data.shape),''.format(data.shape, placeholder_shape)return np.reshape(data, placeholder_shape)", "docstring": "adjust shape of the data to the shape of the placeholder if possible.\nIf shape is incompatible, AssertionError is thrown\n\nParameters:\n placeholder tensorflow input placeholder\n\n data input data to be (potentially) reshaped to be fed into placeholder\n\nReturns:\n reshaped data", "id": "f1363:m24"} {"signature": "def launch_tensorboard_in_background(log_dir):", "body": "import subprocesssubprocess.Popen(['', '', log_dir])", "docstring": "To log the Tensorflow graph when using rl-algs\nalgorithms, you can run the following code\nin your main script:\n import threading, time\n def start_tensorboard(session):\n time.sleep(10) # Wait until graph is setup\n tb_path = osp.join(logger.get_dir(), 'tb')\n summary_writer = tf.summary.FileWriter(tb_path, graph=session.graph)\n summary_op = tf.summary.merge_all()\n launch_tensorboard_in_background(tb_path)\n session = tf.get_default_session()\n t = threading.Thread(target=start_tensorboard, args=([session]))\n t.start()", "id": "f1363:m27"} {"signature": "def get_session(config=None):", "body": "sess = tf.get_default_session()if sess is None:sess = make_session(config=config, make_default=True)return sess", "docstring": "Get default session or create one with a given config", "id": "f1363:m3"} {"signature": "def _check_shape(placeholder_shape, data_shape):", "body": "return Truesqueezed_placeholder_shape = _squeeze_shape(placeholder_shape)squeezed_data_shape = _squeeze_shape(data_shape)for i, s_data in enumerate(squeezed_data_shape):s_placeholder = squeezed_placeholder_shape[i]if s_placeholder != - and s_data != s_placeholder:return Falsereturn True", "docstring": "check if two shapes are compatible (i.e. differ only by dimensions of size 1, or by the batch dimension)", "id": "f1363:m25"} {"signature": "def function(inputs, outputs, updates=None, givens=None):", "body": "if isinstance(outputs, list):return _Function(inputs, outputs, updates, givens=givens)elif isinstance(outputs, (dict, collections.OrderedDict)):f = _Function(inputs, outputs.values(), updates, givens=givens)return lambda *args, **kwargs: type(outputs)(zip(outputs.keys(), f(*args, **kwargs)))else:f = _Function(inputs, [outputs], updates, givens=givens)return lambda *args, **kwargs: f(*args, **kwargs)[]", "docstring": "Just like Theano function. Take a bunch of tensorflow placeholders and expressions\n computed based on those placeholders and produces f(inputs) -> outputs. Function f takes\n values to be fed to the input's placeholders and produces the values of the expressions\n in outputs.\n\n Input values can be passed in the same order as inputs or can be provided as kwargs based\n on placeholder name (passed to constructor or accessible via placeholder.op.name).\n\n Example:\n x = tf.placeholder(tf.int32, (), name=\"x\")\n y = tf.placeholder(tf.int32, (), name=\"y\")\n z = 3 * x + 2 * y\n lin = function([x, y], z, givens={y: 0})\n\n with single_threaded_session():\n initialize()\n\n assert lin(2) == 6\n assert lin(x=3) == 9\n assert lin(2, 2) == 10\n assert lin(x=2, y=3) == 12\n\n Parameters\n ----------\n inputs: [tf.placeholder, tf.constant, or object with make_feed_dict method]\n list of input arguments\n outputs: [tf.Variable] or tf.Variable\n list of outputs or a single output to be returned from function. Returned\n value will also have the same shape.\n updates: [tf.Operation] or tf.Operation\n list of update functions or single update function that will be run whenever\n the function is called. The return is ignored.", "id": "f1363:m10"} {"signature": "def symmetric_ema(xolds, yolds, low=None, high=None, n=, decay_steps=, low_counts_threshold=):", "body": "xs, ys1, count_ys1 = one_sided_ema(xolds, yolds, low, high, n, decay_steps, low_counts_threshold=)_, ys2, count_ys2 = one_sided_ema(-xolds[::-], yolds[::-], -high, -low, n, decay_steps, low_counts_threshold=)ys2 = ys2[::-]count_ys2 = count_ys2[::-]count_ys = count_ys1 + count_ys2ys = (ys1 * count_ys1 + ys2 * count_ys2) / count_ysys[count_ys < low_counts_threshold] = np.nanreturn xs, ys, count_ys", "docstring": "perform symmetric EMA (exponential moving average)\nsmoothing and resampling to an even grid with n points.\nDoes not do extrapolation, so we assume\nxolds[0] <= low && high <= xolds[-1]\n\nArguments:\n\nxolds: array or list - x values of data. Needs to be sorted in ascending order\nyolds: array of list - y values of data. Has to have the same length as xolds\n\nlow: float - min value of the new x grid. By default equals to xolds[0]\nhigh: float - max value of the new x grid. By default equals to xolds[-1]\n\nn: int - number of points in new x grid\n\ndecay_steps: float - EMA decay factor, expressed in new x grid steps.\n\nlow_counts_threshold: float or int\n - y values with counts less than this value will be set to NaN\n\nReturns:\n tuple sum_ys, count_ys where\n xs - array with new x grid\n ys - array of EMA of y at each point of the new x grid\n count_ys - array of EMA of y counts at each point of the new x grid", "id": "f1364:m2"} {"signature": "def one_sided_ema(xolds, yolds, low=None, high=None, n=, decay_steps=, low_counts_threshold=):", "body": "low = xolds[] if low is None else lowhigh = xolds[-] if high is None else highassert xolds[] <= low, ''.format(low, xolds[])assert xolds[-] >= high, ''.format(high, xolds[-])assert len(xolds) == len(yolds), ''.format(len(xolds), len(yolds))xolds = xolds.astype('')yolds = yolds.astype('')luoi = sum_y = count_y = xnews = np.linspace(low, high, n)decay_period = (high - low) / (n - ) * decay_stepsinterstep_decay = np.exp(- / decay_steps)sum_ys = np.zeros_like(xnews)count_ys = np.zeros_like(xnews)for i in range(n):xnew = xnews[i]sum_y *= interstep_decaycount_y *= interstep_decaywhile True:xold = xolds[luoi]if xold <= xnew:decay = np.exp(- (xnew - xold) / decay_period)sum_y += decay * yolds[luoi]count_y += decayluoi += else:breakif luoi >= len(xolds):breaksum_ys[i] = sum_ycount_ys[i] = count_yys = sum_ys / count_ysys[count_ys < low_counts_threshold] = np.nanreturn xnews, ys, count_ys", "docstring": "perform one-sided (causal) EMA (exponential moving average)\nsmoothing and resampling to an even grid with n points.\nDoes not do extrapolation, so we assume\nxolds[0] <= low && high <= xolds[-1]\n\nArguments:\n\nxolds: array or list - x values of data. Needs to be sorted in ascending order\nyolds: array of list - y values of data. Has to have the same length as xolds\n\nlow: float - min value of the new x grid. By default equals to xolds[0]\nhigh: float - max value of the new x grid. By default equals to xolds[-1]\n\nn: int - number of points in new x grid\n\ndecay_steps: float - EMA decay factor, expressed in new x grid steps.\n\nlow_counts_threshold: float or int\n - y values with counts less than this value will be set to NaN\n\nReturns:\n tuple sum_ys, count_ys where\n xs - array with new x grid\n ys - array of EMA of y at each point of the new x grid\n count_ys - array of EMA of y counts at each point of the new x grid", "id": "f1364:m1"} {"signature": "def plot_results(allresults, *,xy_fn=default_xy_fn,split_fn=default_split_fn,group_fn=default_split_fn,average_group=False,shaded_std=True,shaded_err=True,figsize=None,legend_outside=False,resample=,smooth_step=):", "body": "if split_fn is None: split_fn = lambda _ : ''if group_fn is None: group_fn = lambda _ : ''sk2r = defaultdict(list) for result in allresults:splitkey = split_fn(result)sk2r[splitkey].append(result)assert len(sk2r) > assert isinstance(resample, int), \"\"nrows = len(sk2r)ncols = figsize = figsize or (, * nrows)f, axarr = plt.subplots(nrows, ncols, sharex=False, squeeze=False, figsize=figsize)groups = list(set(group_fn(result) for result in allresults))default_samples = if average_group:resample = resample or default_samplesfor (isplit, sk) in enumerate(sorted(sk2r.keys())):g2l = {}g2c = defaultdict(int)sresults = sk2r[sk]gresults = defaultdict(list)ax = axarr[isplit][]for result in sresults:group = group_fn(result)g2c[group] += x, y = xy_fn(result)if x is None: x = np.arange(len(y))x, y = map(np.asarray, (x, y))if average_group:gresults[group].append((x,y))else:if resample:x, y, counts = symmetric_ema(x, y, x[], x[-], resample, decay_steps=smooth_step)l, = ax.plot(x, y, color=COLORS[groups.index(group) % len(COLORS)])g2l[group] = lif average_group:for group in sorted(groups):xys = gresults[group]if not any(xys):continuecolor = COLORS[groups.index(group) % len(COLORS)]origxs = [xy[] for xy in xys]minxlen = min(map(len, origxs))def allequal(qs):return all((q==qs[]).all() for q in qs[:])if resample:low = max(x[] for x in origxs)high = min(x[-] for x in origxs)usex = np.linspace(low, high, resample)ys = []for (x, y) in xys:ys.append(symmetric_ema(x, y, low, high, resample, decay_steps=smooth_step)[])else:assert allequal([x[:minxlen] for x in origxs]),''usex = origxs[]ys = [xy[][:minxlen] for xy in xys]ymean = np.mean(ys, axis=)ystd = np.std(ys, axis=)ystderr = ystd / np.sqrt(len(ys))l, = axarr[isplit][].plot(usex, ymean, color=color)g2l[group] = lif shaded_err:ax.fill_between(usex, ymean - ystderr, ymean + ystderr, color=color, alpha=)if shaded_std:ax.fill_between(usex, ymean - ystd, ymean + ystd, color=color, alpha=)plt.tight_layout()if any(g2l.keys()):ax.legend(g2l.values(),[''%(g, g2c[g]) for g in g2l] if average_group else g2l.keys(),loc= if legend_outside else None,bbox_to_anchor=(,) if legend_outside else None)ax.set_title(sk)return f, axarr", "docstring": "Plot multiple Results objects\n\nxy_fn: function Result -> x,y - function that converts results objects into tuple of x and y values.\n By default, x is cumsum of episode lengths, and y is episode rewards\n\nsplit_fn: function Result -> hashable - function that converts results objects into keys to split curves into sub-panels by.\n That is, the results r for which split_fn(r) is different will be put on different sub-panels.\n By default, the portion of r.dirname between last / and - is returned. The sub-panels are\n stacked vertically in the figure.\n\ngroup_fn: function Result -> hashable - function that converts results objects into keys to group curves by.\n That is, the results r for which group_fn(r) is the same will be put into the same group.\n Curves in the same group have the same color (if average_group is False), or averaged over\n (if average_group is True). The default value is the same as default value for split_fn\n\naverage_group: bool - if True, will average the curves in the same group and plot the mean. Enables resampling\n (if resample = 0, will use 512 steps)\n\nshaded_std: bool - if True (default), the shaded region corresponding to standard deviation of the group of curves will be\n shown (only applicable if average_group = True)\n\nshaded_err: bool - if True (default), the shaded region corresponding to error in mean estimate of the group of curves\n (that is, standard deviation divided by square root of number of curves) will be\n shown (only applicable if average_group = True)\n\nfigsize: tuple or None - size of the resulting figure (including sub-panels). By default, width is 6 and height is 6 times number of\n sub-panels.\n\n\nlegend_outside: bool - if True, will place the legend outside of the sub-panels.\n\nresample: int - if not zero, size of the uniform grid in x direction to resample onto. Resampling is performed via symmetric\n EMA smoothing (see the docstring for symmetric_ema).\n Default is zero (no resampling). Note that if average_group is True, resampling is necessary; in that case, default\n value is 512.\n\nsmooth_step: float - when resampling (i.e. when resample > 0 or average_group is True), use this EMA decay parameter (in units of the new grid step).\n See docstrings for decay_steps in symmetric_ema or one_sided_ema functions.", "id": "f1364:m6"} {"signature": "def cg(f_Ax, b, cg_iters=, callback=None, verbose=False, residual_tol=):", "body": "p = b.copy()r = b.copy()x = np.zeros_like(b)rdotr = r.dot(r)fmtstr = \"\"titlestr = \"\"if verbose: print(titlestr % (\"\", \"\", \"\"))for i in range(cg_iters):if callback is not None:callback(x)if verbose: print(fmtstr % (i, rdotr, np.linalg.norm(x)))z = f_Ax(p)v = rdotr / p.dot(z)x += v*pr -= v*znewrdotr = r.dot(r)mu = newrdotr/rdotrp = r + mu*prdotr = newrdotrif rdotr < residual_tol:breakif callback is not None:callback(x)if verbose: print(fmtstr % (i+, rdotr, np.linalg.norm(x))) return x", "docstring": "Demmel p 312", "id": "f1365:m0"} {"signature": "def setup_mpi_gpus():", "body": "if '' not in os.environ:if sys.platform == '': ids = [] else:lrank, _lsize = get_local_rank_size(MPI.COMM_WORLD)ids = [lrank]os.environ[\"\"] = \"\".join(map(str, ids))", "docstring": "Set CUDA_VISIBLE_DEVICES to MPI rank if not already set", "id": "f1366:m2"} {"signature": "def dict_gather(comm, d, op='', assert_all_have_data=True):", "body": "if comm is None: return dalldicts = comm.allgather(d)size = comm.sizek2li = defaultdict(list)for d in alldicts:for (k,v) in d.items():k2li[k].append(v)result = {}for (k,li) in k2li.items():if assert_all_have_data:assert len(li)==size, \"\" % (len(li), size, k)if op=='':result[k] = np.mean(li, axis=)elif op=='':result[k] = np.sum(li, axis=)else:assert , opreturn result", "docstring": "Perform a reduction operation over dicts", "id": "f1366:m5"} {"signature": "def sync_from_root(sess, variables, comm=None):", "body": "if comm is None: comm = MPI.COMM_WORLDimport tensorflow as tfvalues = comm.bcast(sess.run(variables))sess.run([tf.assign(var, val)for (var, val) in zip(variables, values)])", "docstring": "Send the root node's parameters to every worker.\nArguments:\n sess: the TensorFlow session.\n variables: all parameter variables including optimizer's", "id": "f1366:m0"} {"signature": "def get_local_rank_size(comm):", "body": "this_node = platform.node()ranks_nodes = comm.allgather((comm.Get_rank(), this_node))node2rankssofar = defaultdict(int)local_rank = Nonefor (rank, node) in ranks_nodes:if rank == comm.Get_rank():local_rank = node2rankssofar[node]node2rankssofar[node] += assert local_rank is not Nonereturn local_rank, node2rankssofar[this_node]", "docstring": "Returns the rank of each process on its machine\nThe processes on a given machine will be assigned ranks\n 0, 1, 2, ..., N-1,\nwhere N is the number of processes on this machine.\n\nUseful if you want to assign one gpu per machine", "id": "f1366:m3"} {"signature": "def share_file(comm, path):", "body": "localrank, _ = get_local_rank_size(comm)if comm.Get_rank() == :with open(path, '') as fh:data = fh.read()comm.bcast(data)else:data = comm.bcast(None)if localrank == :os.makedirs(os.path.dirname(path), exist_ok=True)with open(path, '') as fh:fh.write(data)comm.Barrier()", "docstring": "Copies the file from rank 0 to all other ranks\nPuts it in the same place on all machines", "id": "f1366:m4"} {"signature": "def value(self, ob, *args, **kwargs):", "body": "return self._evaluate(self.vf, ob, *args, **kwargs)", "docstring": "Compute value estimate(s) given the observation(s)\n\nParameters:\n----------\n\nobservation observation data (either single or a batch)\n\n**extra_feed additional data such as state or mask (names of the arguments should match the ones in constructor, see __init__)\n\nReturns:\n-------\nvalue estimate", "id": "f1367:c0:m3"} {"signature": "def value(self, t):", "body": "return self._v", "docstring": "See Schedule.value", "id": "f1370:c1:m1"} {"signature": "def value(self, t):", "body": "raise NotImplementedError()", "docstring": "Value of the schedule at time t", "id": "f1370:c0:m0"} {"signature": "def __init__(self, value):", "body": "self._v = value", "docstring": "Value remains constant over time.\n\n Parameters\n ----------\n value: float\n Constant value of the schedule", "id": "f1370:c1:m0"} {"signature": "def tile_images(img_nhwc):", "body": "img_nhwc = np.asarray(img_nhwc)N, h, w, c = img_nhwc.shapeH = int(np.ceil(np.sqrt(N)))W = int(np.ceil(float(N)/H))img_nhwc = np.array(list(img_nhwc) + [img_nhwc[]* for _ in range(N, H*W)])img_HWhwc = img_nhwc.reshape(H, W, h, w, c)img_HhWwc = img_HWhwc.transpose(, , , , )img_Hh_Ww_c = img_HhWwc.reshape(H*h, W*w, c)return img_Hh_Ww_c", "docstring": "Tile N images into one big PxQ image\n(P,Q) are chosen to be as close as possible, and if N\nis square, then P=Q.\n\ninput: img_nhwc, list or array of images, ndim=4 once turned into array\n n = batch index, h = height, w = width, c = channel\nreturns:\n bigim_HWc, ndarray with ndim=3", "id": "f1371:m0"} {"signature": "@register(\"\")def conv_only(convs=[(, , ), (, , ), (, , )], **conv_kwargs):", "body": "def network_fn(X):out = tf.cast(X, tf.float32) / with tf.variable_scope(\"\"):for num_outputs, kernel_size, stride in convs:out = layers.convolution2d(out,num_outputs=num_outputs,kernel_size=kernel_size,stride=stride,activation_fn=tf.nn.relu,**conv_kwargs)return outreturn network_fn", "docstring": "convolutions-only net\n\nParameters:\n----------\n\nconv: list of triples (filter_number, filter_size, stride) specifying parameters for each layer.\n\nReturns:\n\nfunction that takes tensorflow tensor as input and returns the output of the last convolutional layer", "id": "f1373:m8"} {"signature": "@register(\"\")def lstm(nlstm=, layer_norm=False):", "body": "def network_fn(X, nenv=):nbatch = X.shape[]nsteps = nbatch // nenvh = tf.layers.flatten(X)M = tf.placeholder(tf.float32, [nbatch]) S = tf.placeholder(tf.float32, [nenv, *nlstm]) xs = batch_to_seq(h, nenv, nsteps)ms = batch_to_seq(M, nenv, nsteps)if layer_norm:h5, snew = utils.lnlstm(xs, ms, S, scope='', nh=nlstm)else:h5, snew = utils.lstm(xs, ms, S, scope='', nh=nlstm)h = seq_to_batch(h5)initial_state = np.zeros(S.shape.as_list(), dtype=float)return h, {'':S, '':M, '':snew, '':initial_state}return network_fn", "docstring": "Builds LSTM (Long-Short Term Memory) network to be used in a policy.\nNote that the resulting function returns not only the output of the LSTM\n(i.e. hidden state of lstm for each step in the sequence), but also a dictionary\nwith auxiliary tensors to be set as policy attributes.\n\nSpecifically,\n S is a placeholder to feed current state (LSTM state has to be managed outside policy)\n M is a placeholder for the mask (used to mask out observations after the end of the episode, but can be used for other purposes too)\n initial_state is a numpy array containing initial lstm state (usually zeros)\n state is the output LSTM state (to be fed into S at the next call)\n\n\nAn example of usage of lstm-based policy can be found here: common/tests/test_doc_examples.py/test_lstm_example\n\nParameters:\n----------\n\nnlstm: int LSTM hidden state size\n\nlayer_norm: bool if True, layer-normalized version of LSTM is used\n\nReturns:\n-------\n\nfunction that builds LSTM with a given input tensor / placeholder", "id": "f1373:m5"} {"signature": "def nature_cnn(unscaled_images, **conv_kwargs):", "body": "scaled_images = tf.cast(unscaled_images, tf.float32) / activ = tf.nn.reluh = activ(conv(scaled_images, '', nf=, rf=, stride=, init_scale=np.sqrt(),**conv_kwargs))h2 = activ(conv(h, '', nf=, rf=, stride=, init_scale=np.sqrt(), **conv_kwargs))h3 = activ(conv(h2, '', nf=, rf=, stride=, init_scale=np.sqrt(), **conv_kwargs))h3 = conv_to_fc(h3)return activ(fc(h3, '', nh=, init_scale=np.sqrt()))", "docstring": "CNN from Nature paper.", "id": "f1373:m1"} {"signature": "def get_network_builder(name):", "body": "if callable(name):return nameelif name in mapping:return mapping[name]else:raise ValueError(''.format(name))", "docstring": "If you want to register your own network outside models.py, you just need:\n\nUsage Example:\n-------------\nfrom baselines.common.models import register\n@register(\"your_network_name\")\ndef your_network_define(**net_kwargs):\n ...\n return network_fn", "id": "f1373:m10"} {"signature": "def boolean_flag(parser, name, default=False, help=None):", "body": "dest = name.replace('', '')parser.add_argument(\"\" + name, action=\"\", default=default, dest=dest, help=help)parser.add_argument(\"\" + name, action=\"\", dest=dest)", "docstring": "Add a boolean flag to argparse parser.\n\n Parameters\n ----------\n parser: argparse.Parser\n parser to add the flag to\n name: str\n -- will enable the flag, while --no- will disable it\n default: bool or None\n default value of the flag\n help: str\n help string for the flag", "id": "f1375:m3"} {"signature": "def __float__(self):", "body": "return self._value", "docstring": "Get the current estimate", "id": "f1375:c1:m2"} {"signature": "def pretty_eta(seconds_left):", "body": "minutes_left = seconds_left // seconds_left %= hours_left = minutes_left // minutes_left %= days_left = hours_left // hours_left %= def helper(cnt, name):return \"\".format(str(cnt), name, ('' if cnt > else ''))if days_left > :msg = helper(days_left, '')if hours_left > :msg += '' + helper(hours_left, '')return msgif hours_left > :msg = helper(hours_left, '')if minutes_left > :msg += '' + helper(minutes_left, '')return msgif minutes_left > :return helper(minutes_left, '')return ''", "docstring": "Print the number of seconds in human readable format.\n\n Examples:\n 2 days\n 2 hours and 37 minutes\n less than a minute\n\n Paramters\n ---------\n seconds_left: int\n Number of seconds to be converted to the ETA\n Returns\n -------\n eta: str\n String representing the pretty ETA.", "id": "f1375:m2"} {"signature": "def __init__(self, gamma, init_value=None):", "body": "self._value = init_valueself._gamma = gamma", "docstring": "Keep a running estimate of a quantity. This is a bit like mean\n but more sensitive to recent changes.\n\n Parameters\n ----------\n gamma: float\n Must be between 0 and 1, where 0 is the most sensitive to recent\n changes.\n init_value: float or None\n Initial value of the estimate. If None, it will be set on the first update.", "id": "f1375:c1:m0"} {"signature": "def wrap_deepmind_retro(env, scale=True, frame_stack=):", "body": "env = WarpFrame(env)env = ClipRewardEnv(env)if frame_stack > :env = FrameStack(env, frame_stack)if scale:env = ScaledFloatFrame(env)return env", "docstring": "Configure environment for retro games, using config similar to DeepMind-style Atari in wrap_deepmind", "id": "f1376:m1"} {"signature": "def __init__(self, env, ratio):", "body": "gym.ObservationWrapper.__init__(self, env)(oldh, oldw, oldc) = env.observation_space.shapenewshape = (oldh//ratio, oldw//ratio, oldc)self.observation_space = gym.spaces.Box(low=, high=,shape=newshape, dtype=np.uint8)", "docstring": "Downsample images by a factor of ratio", "id": "f1376:c2:m0"} {"signature": "def __init__(self, env):", "body": "gym.ObservationWrapper.__init__(self, env)(oldh, oldw, _oldc) = env.observation_space.shapeself.observation_space = gym.spaces.Box(low=, high=,shape=(oldh, oldw, ), dtype=np.uint8)", "docstring": "Downsample images by a factor of ratio", "id": "f1376:c3:m0"} {"signature": "def logkv_mean(key, val):", "body": "get_current().logkv_mean(key, val)", "docstring": "The same as logkv(), but if called many times, values averaged.", "id": "f1379:m2"} {"signature": "def logkv(key, val):", "body": "get_current().logkv(key, val)", "docstring": "Log a value of some diagnostic\nCall this once for each diagnostic quantity, each iteration\nIf called many times, last value will be used.", "id": "f1379:m1"} {"signature": "def dumpkvs():", "body": "return get_current().dumpkvs()", "docstring": "Write all of the diagnostics from the current iteration", "id": "f1379:m4"} {"signature": "def get_dir():", "body": "return get_current().get_dir()", "docstring": "Get directory that log files are being written to.\nwill be None if there is no output directory (i.e., if you didn't call start)", "id": "f1379:m13"} {"signature": "def log(*args, level=INFO):", "body": "get_current().log(*args, level=level)", "docstring": "Write the sequence of args, with no separators, to the console and output files (if you've configured an output file).", "id": "f1379:m6"} {"signature": "def profile(n):", "body": "def decorator_with_name(func):def func_wrapper(*args, **kwargs):with profile_kv(n):return func(*args, **kwargs)return func_wrapperreturn decorator_with_name", "docstring": "Usage:\n@profile(\"my_func\")\ndef my_func(): code", "id": "f1379:m15"} {"signature": "@staticmethoddef create_datapoint(name, columns, points):", "body": "return {\"\": \"\",\"\": name,\"\": columns,\"\": points,}", "docstring": "Create datastructure in InfluxDB 0.8 data format\n:param name:\n:param columns:\n:param points:\n:return:", "id": "f1381:c0:m1"} {"signature": "def batches(iterable, n=):", "body": "l = len(iterable)for ndx in range(, l, n):yield iterable[ndx:min(ndx + n, l)]", "docstring": "From http://stackoverflow.com/a/8290508/270334\n:param n:\n:param iterable:", "id": "f1382:m0"} {"signature": "def __str__(self):", "body": "statements = [(Keyword.SELECT, self.select_stmt),(Keyword.FROM, self.from_stmt)]if self.where_stmt:statements.append((Keyword.WHERE, self.where_stmt))if self.limit_stmt:statements.append((Keyword.LIMIT, self.limit_stmt))if self.group_by_stmt:statements.append((Keyword.GROUP_BY, ['', self.group_by_stmt]))return self._format_statements(statements)", "docstring": "Standard string representation of select query", "id": "f1390:c0:m6"} {"signature": "def handle_error(self, request, client_address):", "body": "cls, e = sys.exc_info()[:]if cls is socket.error or cls is ssl.SSLError:passelse:return HTTPServer.handle_error(self, request, client_address)", "docstring": "Overwrite error handling to suppress socket/ssl related errors\n:param client_address: Address of client\n:param request: Request causing an error", "id": "f1396:c0:m1"} {"signature": "@staticmethoddef get_queries(parameters):", "body": "parsed_params = urlparse.parse_qs(parameters)if '' not in parsed_params:return []queries = parsed_params['']if not isinstance(queries, list):queries = [queries]return queries", "docstring": "Get a list of all queries (q=... parameters) from an URL parameter string\n:param parameters: The url parameter list", "id": "f1397:c0:m6"} {"signature": "def send_error(self, code, message=None):", "body": "message = message.strip()self.log_error(\"\", code, message)self.send_response(code)self.send_header(\"\", \"\")self.send_header('', '')self.end_headers()if message:self.wfile.write(message)", "docstring": "Send and log plain text error reply.\n:param code:\n:param message:", "id": "f1397:c0:m11"} {"signature": "def _return_response(self, response):", "body": "self.filter_headers(response.msg)if \"\" in response.msg:del response.msg[\"\"]self.send_response(response.status, response.reason)for header_key, header_value in response.msg.items():self.send_header(header_key, header_value)body = response.read()self.send_header('', str(len(body)))self.end_headers()self.wfile.write(body)", "docstring": ":type result: HTTPResponse", "id": "f1397:c0:m12"} {"signature": "def naughty_strings(filepath=FILEPATH):", "body": "strings = []with open(filepath, '') as f:strings = f.readlines()strings = [x.strip(u'') for x in strings]strings = [x for x in strings if x and not x.startswith(u'')]strings.insert(, u\"\")return strings", "docstring": "Get the list of naughty_strings.\n\n By default this will get the strings from the blns.txt file\n\n Code is a simple port of what is already in the /scripts directory\n\n :param filepath: Optional filepath the the blns.txt file\n :returns: The list of naughty strings", "id": "f1421:m0"} {"signature": "def flatten(d, parent_key='', sep=''):", "body": "items = []for k, v in d.items():new_key = parent_key + sep + k if parent_key else kif isinstance(v, collections.MutableMapping):items.extend(flatten(v, new_key, sep=sep).items())else:items.append((new_key, v))return dict(items)", "docstring": "Flatten keys in a dictionary\nExample:\nflatten({'a': 1, 'c': {'a': 2, 'b': {'x': 5, 'y' : 10}}, 'd': [1, 2, 3]})\n=> {'a': 1, 'c_a': 2, 'c_b_x': 5, 'd': [1, 2, 3], 'c_b_y': 10}\n:param d: Dictionary to flatten\n:param sep: Separator between keys\n:param parent_key: Key to merge with", "id": "f1424:m3"} {"signature": "def load_config():", "body": "config = flatten(default_config.DEFAULT_CONFIG)cli_config = flatten(parse_args())if \"\" in cli_config:logging.info(\"\".format(cli_config['']))configfile = parse_configfile(cli_config[''])config = overwrite_config(config, configfile)config = overwrite_config(config, cli_config)if '' in config:if config[''] == :logging.getLogger().setLevel(logging.INFO)elif config[''] > :logging.getLogger().setLevel(logging.DEBUG)return ObjectView(config)", "docstring": "Load settings from default config and optionally\noverwrite with config file and commandline parameters\n(in that order).", "id": "f1424:m0"} {"signature": "def parse_configfile(configfile):", "body": "with open(configfile) as f:try:return yaml.safe_load(f)except Exception as e:logging.fatal(\"\", e)exit(-)", "docstring": "Read settings from file\n:param configfile:", "id": "f1424:m2"} {"signature": "def check_write_permissions(file):", "body": "try:open(file, '')except IOError:print(\"\"\"\".format(file))sys.exit()", "docstring": "Check if we can write to the given file\n\nOtherwise since we might detach the process to run in the background\nwe might never find out that writing failed and get an ugly\nexit message on startup. For example:\nERROR: Child exited immediately with non-zero exit code 127\n\nSo we catch this error upfront and print a nicer error message\nwith a hint on how to fix it.", "id": "f1428:m1"} {"signature": "def show_rules():", "body": "from rules.loader import import_rulesfrom rules.rule_list import all_rulesrules = import_rules(all_rules)print(\"\")for name, rule in rules.iteritems():heading = \"\".format(rule.description(), name)print(\"\".format(heading))for line in rule.reason():print(line)print(\"\")sys.exit()", "docstring": "Show the list of available rules and quit\n:return:", "id": "f1428:m2"} {"signature": "def show_version():", "body": "from version import __version__print(\"\".format(__package__, __version__))sys.exit()", "docstring": "Show program version an quit\n:return:", "id": "f1428:m3"} {"signature": "def __init__(self, rules, whitelist=[], safe_mode=True):", "body": "self.parser = QueryParser()self.guard = Guard(rules)self.sanitizer = Sanitizer()self.whitelist = whitelistself.safe_mode = safe_mode", "docstring": ":param rules: A list of rules to evaluate\n:param safe_mode: If set to True, allow the query in case it can not be parsed\n:return:", "id": "f1429:c0:m0"} {"signature": "def check(self, query):", "body": "if query.get_type() in {Keyword.LIST, Keyword.DROP}:series = query.series_stmtelse:series = query.from_stmtif len(series) >= self.min_series_name_length:return Ok(True)return Err(\"\")", "docstring": ":param query:", "id": "f1432:c0:m3"} {"signature": "def check(self, query):", "body": "if query.get_type() not in {Keyword.SELECT}:return Ok(True)earliest_date = query.get_earliest_date()if earliest_date >= self.min_start_date:return Ok(True)if query.limit_stmt:return Ok(True)return Err((\"\"\"\").format(self.min_start_date.strftime(\"\"),earliest_date))", "docstring": ":param query:", "id": "f1433:c0:m3"} {"signature": "def check(self, query):", "body": "if query.get_type() not in {Keyword.SELECT, Keyword.DELETE}:return Ok(True)datapoints = query.get_datapoints()if datapoints <= self.max_datapoints:return Ok(True)return Err((\"\"\"\"\"\"\"\").format(datapoints))", "docstring": ":param query:", "id": "f1434:c0:m3"} {"signature": "@staticmethoddef reason():", "body": "pass", "docstring": "When and why the rule is useful.\n\nThis should return a list of lines instead of a long string.\nIt's easier to format line breaks this way.\n\n:return: The reason for the rule", "id": "f1436:c0:m1"} {"signature": "def check(self, query):", "body": "pass", "docstring": "Check if a given query is permitted\n:param query:\n:return: result.Ok() if permitted, result.Err() if not.", "id": "f1436:c0:m2"} {"signature": "def import_rule(path):", "body": "rule = importlib.import_module(path)return rule", "docstring": "Load the given rule\n:param path: Import path to rule", "id": "f1439:m1"} {"signature": "def check(self, query):", "body": "if query.get_type() not in {Keyword.SELECT}:return Ok(True)if query.get_resolution() > :return Ok(True)return Err(\"\")", "docstring": ":param query:", "id": "f1441:c0:m2"} {"signature": "def create_list_query(self, tokens):", "body": "if not tokens[Keyword.SERIES]:tokens[Keyword.SERIES] = ''return ListQuery(self.parse_keyword(Keyword.SERIES, tokens))", "docstring": "Parse tokens of list query\n:param tokens: A list of InfluxDB query tokens", "id": "f1443:c0:m10"} {"signature": "def create_query_object(self, tokens):", "body": "try:query_type = tokens['']return getattr(self, '' % query_type)(tokens)except (KeyError, TypeError):return self.invalid_query(tokens)", "docstring": "Analyze query tokens and create an InfluxDBStatement from them\nReturn None on error\n:param tokens: A list of InfluxDB query tokens", "id": "f1443:c0:m8"} {"signature": "def _parse_datapoints(self, parsed_duration, parsed_resolution, limit):", "body": "return self.datapoints_parser.parse(parsed_duration, parsed_resolution, limit)", "docstring": "Parse the number of datapoints of a query.\nThis can be calculated from the given duration and resolution of the query.\nE.g. if the query has a duation of 2*60*60 = 7200 seconds and a resolution of 10 seconds\nthen the number of datapoints would be 7200/10 => 7200 datapoints.\n\n:param parsed_duration:\n:param parsed_resolution:\n:param limit:\n:return:", "id": "f1443:c0:m17"} {"signature": "def create_drop_query(self, tokens):", "body": "if not tokens[Keyword.SERIES]:return Nonereturn DropQuery(self.parse_keyword(Keyword.SERIES, tokens))", "docstring": "Parse tokens of drop query\n:param tokens: A list of InfluxDB query tokens", "id": "f1443:c0:m11"} {"signature": "def _parse_resolution(self, tokens):", "body": "return self.resolution_parser.parse(self.parse_keyword(Keyword.GROUP_BY, tokens))", "docstring": "Parse resolution from the GROUP BY statement.\nE.g. GROUP BY time(10s) would mean a 10 second resolution\n:param tokens:\n:return:", "id": "f1443:c0:m15"} {"signature": "def parse(self, group_by_stmt):", "body": "if not group_by_stmt:return Resolution.MAX_RESOLUTIONm = self.GROUP_BY_TIME_PATTERN.match(group_by_stmt)if not m:return Nonevalue = int(m.group())unit = m.group()resolution = self.convert_to_seconds(value, unit)return max(resolution, Resolution.MAX_RESOLUTION)", "docstring": "Extract the data resolution of a query in seconds\nE.g. \"group by time(99s)\" => 99\n\n:param group_by_stmt: A raw InfluxDB group by statement", "id": "f1445:c0:m1"} {"signature": "@classmethoddef get_object(cls, api_token, cert_id):", "body": "certificate = cls(token=api_token, id=cert_id)certificate.load()return certificate", "docstring": "Class method that will return a Certificate object by its ID.", "id": "f1448:c0:m1"} {"signature": "def destroy(self):", "body": "return self.get_data(\"\" % self.id, type=DELETE)", "docstring": "Delete the Certificate", "id": "f1448:c0:m4"} {"signature": "def load(self):", "body": "data = self.get_data(\"\" % self.id)certificate = data[\"\"]for attr in certificate.keys():setattr(self, attr, certificate[attr])return self", "docstring": "Load the Certificate object from DigitalOcean.\n\nRequires self.id to be set.", "id": "f1448:c0:m2"} {"signature": "def assert_url_query_equal(self, url1, url2):", "body": "base1, qlist1 = self.split_url(url1)base2, qlist2 = self.split_url(url2)self.assertEqual(base1, base2)self.assertEqual(qlist1, qlist2)", "docstring": "Test if two URL queries are equal\n\n The key=value pairs after the ? in a URL can occur in any order\n (especially since dicts in python 3 are not deterministic across runs).\n The method sorts the key=value pairs and then compares the URLs.", "id": "f1460:c0:m3"} {"signature": "def create(self):", "body": "data = {\"\": self.name,\"\": self.ip_address,}domain = self.get_data(\"\", type=POST, params=data)return domain", "docstring": "Create new doamin", "id": "f1464:c0:m5"} {"signature": "@classmethoddef get_object(cls, api_token, domain_name):", "body": "domain = cls(token=api_token, name=domain_name)domain.load()return domain", "docstring": "Class method that will return a Domain object by ID.", "id": "f1464:c0:m1"} {"signature": "def assign(self, droplet_id):", "body": "return self.get_data(\"\" % self.ip,type=POST,params={\"\": \"\", \"\": droplet_id})", "docstring": "Assign a FloatingIP to a Droplet.\n\nArgs:\n droplet_id: int - droplet id", "id": "f1465:c0:m6"} {"signature": "def unassign(self):", "body": "return self.get_data(\"\" % self.ip,type=POST,params={\"\": \"\"})", "docstring": "Unassign a FloatingIP.", "id": "f1465:c0:m7"} {"signature": "def create(self, *args, **kwargs):", "body": "data = self.get_data('',type=POST,params={'': self.droplet_id})if data:self.ip = data['']['']self.region = data['']['']return self", "docstring": "Creates a FloatingIP and assigns it to a Droplet.\n\nNote: Every argument and parameter given to this method will be\nassigned to the object.\n\nArgs:\n droplet_id: int - droplet id", "id": "f1465:c0:m3"} {"signature": "def create(self, *args, **kwargs):", "body": "for attr in kwargs.keys():setattr(self, attr, kwargs[attr])if not self.size_slug and self.size:self.size_slug = self.sizessh_keys_id = Droplet.__get_ssh_keys_id_or_fingerprint(self.ssh_keys,self.token,self.name)data = {\"\": self.name,\"\": self.size_slug,\"\": self.image,\"\": self.region,\"\": ssh_keys_id,\"\": bool(self.backups),\"\": bool(self.ipv6),\"\": bool(self.private_networking),\"\": self.volumes,\"\": self.tags,\"\": bool(self.monitoring),}if self.user_data:data[\"\"] = self.user_datadata = self.get_data(\"\", type=POST, params=data)if data:self.id = data['']['']action_id = data[''][''][]['']self.action_ids = []self.action_ids.append(action_id)", "docstring": "Create the droplet with object properties.\n\nNote: Every argument and parameter given to this method will be\nassigned to the object.", "id": "f1466:c3:m25"} {"signature": "def change_kernel(self, kernel, return_dict=True):", "body": "if type(kernel) != Kernel:raise BadKernelObject(\"\")return self._perform_action({'': '', '': kernel.id},return_dict)", "docstring": "Change the kernel to a new one\n\n Args:\n kernel : instance of digitalocean.Kernel.Kernel\n\n Optional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n\n Returns dict or Action", "id": "f1466:c3:m23"} {"signature": "def get_snapshots(self):", "body": "snapshots = list()for id in self.snapshot_ids:snapshot = Image()snapshot.id = idsnapshot.token = self.tokensnapshots.append(snapshot)return snapshots", "docstring": "This method will return the snapshots/images connected to that\nspecific droplet.", "id": "f1466:c3:m29"} {"signature": "def get_actions(self):", "body": "answer = self.get_data(\"\" % self.id, type=GET)actions = []for action_dict in answer['']:action = Action(**action_dict)action.token = self.tokenaction.droplet_id = self.idaction.load()actions.append(action)return actions", "docstring": "Returns a list of Action objects\nThis actions can be used to check the droplet's status", "id": "f1466:c3:m27"} {"signature": "def enable_backups(self, return_dict=True):", "body": "return self._perform_action({'': ''}, return_dict)", "docstring": "Enable automatic backups\n\nOptional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n\nReturns dict or Action", "id": "f1466:c3:m17"} {"signature": "def _perform_action(self, params, return_dict=True):", "body": "action = self.get_data(\"\" % self.id,type=POST,params=params)if return_dict:return actionelse:action = action[u'']return_action = Action(token=self.token)for attr in action.keys():setattr(return_action, attr, action[attr])return return_action", "docstring": "Perform a droplet action.\n\nArgs:\n params (dict): parameters of the action\n\nOptional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n\nReturns dict or Action", "id": "f1466:c3:m6"} {"signature": "def resize(self, new_size_slug, return_dict=True, disk=True):", "body": "options = {\"\": \"\", \"\": new_size_slug}if disk: options[\"\"] = \"\"return self._perform_action(options, return_dict)", "docstring": "Resize the droplet to a new size slug.\n https://developers.digitalocean.com/documentation/v2/#resize-a-droplet\n\n Args:\n new_size_slug (str): name of new size\n\n Optional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n disk (bool): If a permanent resize, with disk changes included.\n\n Returns dict or Action", "id": "f1466:c3:m13"} {"signature": "def disable_backups(self, return_dict=True):", "body": "return self._perform_action({'': ''}, return_dict)", "docstring": "Disable automatic backups\n\nOptional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n\nReturns dict or Action", "id": "f1466:c3:m18"} {"signature": "def reset_root_password(self, return_dict=True):", "body": "return self._perform_action({'': ''}, return_dict)", "docstring": "reset the root password\n\nOptional Args:\n return_dict (bool): Return a dict when True (default),\n otherwise return an Action.\n\nReturns dict or Action", "id": "f1466:c3:m12"} {"signature": "def destroy(self):", "body": "return self.get_data(\"\" % self.id, type=DELETE)", "docstring": "Destroy the Firewall", "id": "f1468:c5:m9"} {"signature": "def add_droplets(self, droplet_ids):", "body": "return self.get_data(\"\" % self.id,type=POST,params={\"\": droplet_ids})", "docstring": "Add droplets to this Firewall.", "id": "f1468:c5:m5"} {"signature": "def add_tags(self, tags):", "body": "return self.get_data(\"\" % self.id,type=POST,params={\"\": tags})", "docstring": "Add tags to this Firewall.", "id": "f1468:c5:m7"} {"signature": "def load(self):", "body": "identifier = Noneif self.id:identifier = self.idelif self.fingerprint is not None:identifier = self.fingerprintdata = self.get_data(\"\" % identifier, type=GET)ssh_key = data['']for attr in ssh_key.keys():setattr(self, attr, ssh_key[attr])self.id = ssh_key['']", "docstring": "Load the SSHKey object from DigitalOcean.\n\nRequires either self.id or self.fingerprint to be set.", "id": "f1469:c0:m2"} {"signature": "@classmethoddef get_object(cls, api_token, ssh_key_id):", "body": "ssh_key = cls(token=api_token, id=ssh_key_id)ssh_key.load()return ssh_key", "docstring": "Class method that will return a SSHKey object by ID.", "id": "f1469:c0:m1"} {"signature": "def destroy(self):", "body": "return self.get_data(\"\" % self.id, type=DELETE)", "docstring": "Destroy the SSH Key", "id": "f1469:c0:m6"} {"signature": "def wait(self, update_every_seconds=):", "body": "while self.status == u'':sleep(update_every_seconds)self.load()return self.status == u''", "docstring": "Wait until the action is marked as completed or with an error.\nIt will return True in case of success, otherwise False.\n\nOptional Args:\n update_every_seconds - int : number of seconds to wait before\n checking if the action is completed.", "id": "f1470:c0:m4"} {"signature": "def get_images(self, private=False, type=None):", "body": "params = {}if private:params[''] = ''if type:params[''] = typedata = self.get_data(\"\", params=params)images = list()for jsoned in data['']:image = Image(**jsoned)image.token = self.tokenimages.append(image)return images", "docstring": "This function returns a list of Image object.", "id": "f1473:c0:m6"} {"signature": "def get_droplet_snapshots(self):", "body": "data = self.get_data(\"\")return [Snapshot(token=self.token, **snapshot)for snapshot in data['']]", "docstring": "This method returns a list of all Snapshots based on Droplets.", "id": "f1473:c0:m27"} {"signature": "def get_load_balancer(self, id):", "body": "return LoadBalancer.get_object(api_token=self.token, id=id)", "docstring": "Returns a Load Balancer object by its ID.\n\nArgs:\n id (str): Load Balancer ID", "id": "f1473:c0:m22"} {"signature": "def get_droplet(self, droplet_id):", "body": "return Droplet.get_object(api_token=self.token, droplet_id=droplet_id)", "docstring": "Return a Droplet by its ID.", "id": "f1473:c0:m4"} {"signature": "def get_all_certificates(self):", "body": "data = self.get_data(\"\")certificates = list()for jsoned in data['']:cert = Certificate(**jsoned)cert.token = self.tokencertificates.append(cert)return certificates", "docstring": "This function returns a list of Certificate objects.", "id": "f1473:c0:m24"} {"signature": "def get_volume_snapshots(self):", "body": "data = self.get_data(\"\")return [Snapshot(token=self.token, **snapshot)for snapshot in data['']]", "docstring": "This method returns a list of all Snapshots based on volumes.", "id": "f1473:c0:m28"} {"signature": "def get_all_load_balancers(self):", "body": "data = self.get_data(\"\")load_balancers = list()for jsoned in data['']:load_balancer = LoadBalancer(**jsoned)load_balancer.token = self.tokenload_balancer.health_check = HealthCheck(**jsoned[''])load_balancer.sticky_sessions = StickySesions(**jsoned[''])forwarding_rules = list()for rule in jsoned['']:forwarding_rules.append(ForwardingRule(**rule))load_balancer.forwarding_rules = forwarding_rulesload_balancers.append(load_balancer)return load_balancers", "docstring": "Returns a list of Load Balancer objects.", "id": "f1473:c0:m21"} {"signature": "def get_ssh_key(self, ssh_key_id):", "body": "return SSHKey.get_object(api_token=self.token, ssh_key_id=ssh_key_id)", "docstring": "Return a SSHKey object by its ID.", "id": "f1473:c0:m16"} {"signature": "def get_certificate(self, id):", "body": "return Certificate.get_object(api_token=self.token, cert_id=id)", "docstring": "Returns a Certificate object by its ID.\n\nArgs:\n id (str): Certificate ID", "id": "f1473:c0:m23"} {"signature": "def get_distro_images(self):", "body": "images = self.get_images(type='')return images", "docstring": "This function returns a list of Image objects representing\npublic base distribution images.", "id": "f1473:c0:m11"} {"signature": "def get_app_images(self):", "body": "images = self.get_images(type='')return images", "docstring": "This function returns a list of Image objectobjects representing\npublic DigitalOcean 'One-Click' application images.", "id": "f1473:c0:m12"} {"signature": "def get_all_snapshots(self):", "body": "data = self.get_data(\"\")return [Snapshot(token=self.token, **snapshot)for snapshot in data['']]", "docstring": "This method returns a list of all Snapshots.", "id": "f1473:c0:m26"} {"signature": "def get_account(self):", "body": "return Account.get_object(api_token=self.token)", "docstring": "Returns an Account object.", "id": "f1473:c0:m1"} {"signature": "def get_global_images(self):", "body": "data = self.get_images()images = list()for i in data:if i.public:i.token = self.tokenimages.append(i)return images", "docstring": "This function returns a list of Image objects representing\npublic DigitalOcean images (e.g. base distribution images\nand 'One-Click' applications).", "id": "f1473:c0:m10"} {"signature": "def get_floating_ip(self, ip):", "body": "return FloatingIP.get_object(api_token=self.token, ip=ip)", "docstring": "Returns a of FloatingIP object by its IP address.", "id": "f1473:c0:m20"} {"signature": "def get_all_floating_ips(self):", "body": "data = self.get_data(\"\")floating_ips = list()for jsoned in data['']:floating_ip = FloatingIP(**jsoned)floating_ip.token = self.tokenfloating_ips.append(floating_ip)return floating_ips", "docstring": "This function returns a list of FloatingIP objects.", "id": "f1473:c0:m19"} {"signature": "def destroy(self):", "body": "return self.get_data(\"\" % self.id, type=DELETE)", "docstring": "Destroy the image", "id": "f1474:c0:m3"} {"signature": "def get_data(self, url, type=GET, params=None):", "body": "if params is None:params = dict()if type is GET:params.setdefault(\"\", )req = self.__perform_request(url, type, params)if req.status_code == :return Trueif req.status_code == :raise NotFoundError()try:data = req.json()except ValueError as e:raise JSONReadError('' % str(e))if not req.ok:msg = [data[m] for m in (\"\", \"\") if m in data][]raise DataReadError(msg)self.__init_ratelimit(req.headers)pages = data.get(\"\", {}).get(\"\", {})if pages.get(\"\") and \"\" not in params:return self.__deal_with_pagination(url, type, params, data)else:return data", "docstring": "This method is a basic implementation of __call_api that checks\nerrors too. In case of success the method will return True or the\ncontent of the response to the request.\n\nPagination is automatically detected and handled accordingly", "id": "f1475:c5:m7"} {"signature": "def get_timeout(self):", "body": "timeout_str = os.environ.get(REQUEST_TIMEOUT_ENV_VAR)if timeout_str:try:return float(timeout_str)except:self._log.error('''' %timeout_str)return None", "docstring": "Checks if any timeout for the requests to DigitalOcean is required.\nTo set a timeout, use the REQUEST_TIMEOUT_ENV_VAR environment\nvariable.", "id": "f1475:c5:m6"} {"signature": "def create(self):", "body": "params = {'': self.name,'': self.region,'': self.url,'': self.distribution,'': self.description,'': self.tags}data = self.get_data('', type=POST, params=params)if data:for attr in data[''].keys():setattr(self, attr, data[''][attr])return self", "docstring": "Creates a new custom DigitalOcean Image from the Linux virtual machine\nimage located at the provided `url`.", "id": "f1476:c0:m3"} {"signature": "@staticmethoddef _is_string(value):", "body": "if type(value) in [type(u''), type('')]:return Trueelif type(value) in [int, type( ** )]:return Falseelse:return None", "docstring": "Checks if the value provided is a string (True) or not integer\n(False) or something else (None).", "id": "f1476:c0:m2"} {"signature": "def transfer(self, new_region_slug):", "body": "return self.get_data(\"\" % self.id,type=POST,params={\"\": \"\", \"\": new_region_slug})", "docstring": "Transfer the image", "id": "f1476:c0:m6"} {"signature": "def rename(self, new_name):", "body": "return self.get_data(\"\" % self.id,type=PUT,params={\"\": new_name})", "docstring": "Rename an image", "id": "f1476:c0:m7"} {"signature": "@classmethoddef get_object(cls, api_token, image_id_or_slug):", "body": "if cls._is_string(image_id_or_slug):image = cls(token=api_token, slug=image_id_or_slug)image.load(use_slug=True)else:image = cls(token=api_token, id=image_id_or_slug)image.load()return image", "docstring": "Class method that will return an Image object by ID or slug.\n\nThis method is used to validate the type of the image. If it is a\nnumber, it will be considered as an Image ID, instead if it is a\nstring, it will considered as slug.", "id": "f1476:c0:m1"} {"signature": "def detach(self, droplet_id, region):", "body": "return self.get_data(\"\" % self.id,type=POST,params={\"\": \"\",\"\": droplet_id,\"\": region})", "docstring": "Detach a Volume to a Droplet.\n\nArgs:\n droplet_id: int - droplet id\n region: string - slug identifier for the region", "id": "f1478:c0:m7"} {"signature": "def create(self, *args, **kwargs):", "body": "data = self.get_data('',type=POST,params={'': self.name,'': self.region,'': self.size_gigabytes,'': self.description,'': self.filesystem_type,'': self.filesystem_label})if data:self.id = data['']['']self.created_at = data['']['']return self", "docstring": "Creates a Block Storage volume\n\nNote: Every argument and parameter given to this method will be\nassigned to the object.\n\nArgs:\n name: string - a name for the volume\n region: string - slug identifier for the region\n size_gigabytes: int - size of the Block Storage volume in GiB\n filesystem_type: string, optional - name of the filesystem type the\n volume will be formated with ('ext4' or 'xfs')\n filesystem_label: string, optional - the label to be applied to the\n filesystem, only used in conjunction with filesystem_type\n\nOptional Args:\n description: string - text field to describe a volume", "id": "f1478:c0:m3"} {"signature": "def destroy(self):", "body": "return self.get_data(\"\" % (self.domain, self.id),type=DELETE,)", "docstring": "Destroy the record", "id": "f1479:c0:m3"} {"signature": "def create(self):", "body": "input_params = {\"\": self.type,\"\": self.data,\"\": self.name,\"\": self.priority,\"\": self.port,\"\": self.ttl,\"\": self.weight,\"\": self.flags,\"\": self.tags}data = self.get_data(\"\" % (self.domain),type=POST,params=input_params,)if data:self.id = data['']['']", "docstring": "Creates a new record for a domain.\n\nArgs:\n type (str): The type of the DNS record (e.g. A, CNAME, TXT).\n name (str): The host name, alias, or service being defined by the\n record.\n data (int): Variable data depending on record type.\n priority (int): The priority for SRV and MX records.\n port (int): The port for SRV records.\n ttl (int): The time to live for the record, in seconds.\n weight (int): The weight for SRV records.\n flags (int): An unsigned integer between 0-255 used for CAA records.\n tags (string): The parameter tag for CAA records. Valid values are\n \"issue\", \"wildissue\", or \"iodef\"", "id": "f1479:c0:m2"} {"signature": "def save(self):", "body": "data = {\"\": self.type,\"\": self.data,\"\": self.name,\"\": self.priority,\"\": self.port,\"\": self.ttl,\"\": self.weight,\"\": self.flags,\"\": self.tags}return self.get_data(\"\" % (self.domain, self.id),type=PUT,params=data)", "docstring": "Save existing record", "id": "f1479:c0:m4"} {"signature": "def load(self):", "body": "tags = self.get_data(\"\" % self.name)tag = tags['']for attr in tag.keys():setattr(self, attr, tag[attr])return self", "docstring": "Fetch data about tag", "id": "f1481:c0:m2"} {"signature": "def __extract_resources_from_droplets(self, data):", "body": "resources = []if not isinstance(data, list): return datafor a_droplet in data:res = {}try:if isinstance(a_droplet, unicode):res = {\"\": a_droplet, \"\": \"\"}except NameError:passif isinstance(a_droplet, str) or isinstance(a_droplet, int):res = {\"\": str(a_droplet), \"\": \"\"}elif isinstance(a_droplet, Droplet):res = {\"\": str(a_droplet.id), \"\": \"\"}if len(res) > :resources.append(res)return resources", "docstring": "Private method to extract from a value, the resources.\nIt will check the type of object in the array provided and build\nthe right structure for the API.", "id": "f1481:c0:m8"} {"signature": "def __remove_resources(self, resources):", "body": "return self.__get_resources(resources, method='')", "docstring": "Remove resources from this tag.\n\nAttributes accepted at creation time:\n resources: array - See API.", "id": "f1481:c0:m7"} {"signature": "def __add_resources(self, resources):", "body": "return self.__get_resources(resources, method='')", "docstring": "Add to the resources to this tag.\n\nAttributes accepted at creation time:\n resources: array - See API.", "id": "f1481:c0:m6"} {"signature": "def remove_droplets(self, droplet):", "body": "droplets = dropletif not isinstance(droplets, list):droplets = [droplet]resources = self.__extract_resources_from_droplets(droplets)if len(resources) > :return self.__remove_resources(resources)return False", "docstring": "Remove the Tag from the Droplet.\n\nAttributes accepted at creation time:\n droplet: array of string or array of int, or array of Droplets.", "id": "f1481:c0:m10"} {"signature": "def add_droplets(self, droplet):", "body": "droplets = dropletif not isinstance(droplets, list):droplets = [droplet]resources = self.__extract_resources_from_droplets(droplets)if len(resources) > :return self.__add_resources(resources)return False", "docstring": "Add the Tag to a Droplet.\n\nAttributes accepted at creation time:\n droplet: array of string or array of int, or array of Droplets.", "id": "f1481:c0:m9"} {"signature": "def create(self, **kwargs):", "body": "for attr in kwargs.keys():setattr(self, attr, kwargs[attr])params = {\"\": self.name}output = self.get_data(\"\", type=\"\", params=params)if output:self.name = output['']['']self.resources = output['']['']", "docstring": "Create the tag.", "id": "f1481:c0:m3"} {"signature": "def remove_droplets(self, droplet_ids):", "body": "return self.get_data(\"\" % self.id,type=DELETE,params={\"\": droplet_ids})", "docstring": "Unassign a LoadBalancer.\n\nArgs:\n droplet_ids (obj:`list` of `int`): A list of Droplet IDs", "id": "f1482:c3:m7"} {"signature": "def add_droplets(self, droplet_ids):", "body": "return self.get_data(\"\" % self.id,type=POST,params={\"\": droplet_ids})", "docstring": "Assign a LoadBalancer to a Droplet.\n\nArgs:\n droplet_ids (obj:`list` of `int`): A list of Droplet IDs", "id": "f1482:c3:m6"} {"signature": "def destroy(self):", "body": "return self.get_data('' % self.id, type=DELETE)", "docstring": "Destroy the LoadBalancer", "id": "f1482:c3:m5"} {"signature": "def add_forwarding_rules(self, forwarding_rules):", "body": "rules_dict = [rule.__dict__ for rule in forwarding_rules]return self.get_data(\"\" % self.id,type=POST,params={\"\": rules_dict})", "docstring": "Adds new forwarding rules to a LoadBalancer.\n\nArgs:\n forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects", "id": "f1482:c3:m8"} {"signature": "@classmethoddef get_object(cls, api_token, id):", "body": "load_balancer = cls(token=api_token, id=id)load_balancer.load()return load_balancer", "docstring": "Class method that will return a LoadBalancer object by its ID.\n\nArgs:\n api_token (str): DigitalOcean API token\n id (str): Load Balancer ID", "id": "f1482:c3:m1"} {"signature": "def parse_type_comment(type_comment):", "body": "try:result = ast3.parse(type_comment, '', '')except SyntaxError:raise ValueError(f\"\") from Noneassert isinstance(result, ast3.Expression)return result.body", "docstring": "Parse a type comment string into AST nodes.", "id": "f1486:m50"} {"signature": "def parse_signature_type_comment(type_comment):", "body": "try:result = ast3.parse(type_comment, '', '')except SyntaxError:raise ValueError(f\"\")assert isinstance(result, ast3.FunctionType)if len(result.argtypes) == :argtypes = result.argtypes[]else:argtypes = result.argtypesreturn argtypes, result.returns", "docstring": "Parse the fugly signature type comment into AST nodes.\n\n Caveats: ASTifying **kwargs is impossible with the current grammar so we\n hack it into unary subtraction (to differentiate from Starred in vararg).\n\n For example from:\n \"(str, int, *int, **Any) -> 'SomeReturnType'\"\n\n To:\n ([ast3.Name, ast.Name, ast3.Name, ast.Name], ast3.Str)", "id": "f1486:m49"} {"signature": "def new(n, prefix=None):", "body": "if isinstance(n, Leaf):return Leaf(n.type, n.value, prefix=n.prefix if prefix is None else prefix)n.parent = Noneif prefix is not None:n.prefix = prefixreturn n", "docstring": "lib2to3's AST requires unique objects as children.", "id": "f1486:m70"} {"signature": "def retype_path(src, pyi_dir, targets, *, src_explicitly_given=False, quiet=False, hg=False):", "body": "if src.is_dir():for child in src.iterdir():if child == pyi_dir or child == targets:continueyield from retype_path(child, pyi_dir / src.name, targets / src.name, quiet=quiet, hg=hg,)elif src.suffix == '' or src_explicitly_given:try:retype_file(src, pyi_dir, targets, quiet=quiet, hg=hg)except Exception as e:yield (src,str(e),type(e),traceback.format_tb(e.__traceback__),)", "docstring": "Recursively retype files or directories given. Generate errors.", "id": "f1486:m1"} {"signature": "@singledispatchdef convert_annotation(ann):", "body": "raise NotImplementedError(f\"\")", "docstring": "Converts an AST object into its lib2to3 equivalent.", "id": "f1486:m18"} {"signature": "def copy_type_comments_to_annotations(args):", "body": "for arg in args.args:copy_type_comment_to_annotation(arg)if args.vararg:copy_type_comment_to_annotation(args.vararg)for arg in args.kwonlyargs:copy_type_comment_to_annotation(arg)if args.kwarg:copy_type_comment_to_annotation(args.kwarg)", "docstring": "Copies argument type comments from the legacy long form to annotations\n in the entire function signature.", "id": "f1486:m53"} {"signature": "def copy_arguments_to_annotations(args, type_comment, *, is_method=False):", "body": "if isinstance(type_comment, ast3.Ellipsis):returnexpected = len(args.args)if args.vararg:expected += expected += len(args.kwonlyargs)if args.kwarg:expected += actual = len(type_comment) if isinstance(type_comment, list) else if expected != actual:if is_method and expected - actual == :pass else:raise ValueError(f\"\" +f\"\")if isinstance(type_comment, list):next_value = type_comment.popelse:_tc = type_commentdef next_value(index: int = ) -> ast3.expr:return _tcfor arg in args.args[expected - actual:]:ensure_no_annotation(arg.annotation)arg.annotation = next_value()if args.vararg:ensure_no_annotation(args.vararg.annotation)args.vararg.annotation = next_value()for arg in args.kwonlyargs:ensure_no_annotation(arg.annotation)arg.annotation = next_value()if args.kwarg:ensure_no_annotation(args.kwarg.annotation)args.kwarg.annotation = next_value()", "docstring": "Copies AST nodes from `type_comment` into the ast3.arguments in `args`.\n\n Does validaation of argument count (allowing for untyped self/cls)\n and type (vararg and kwarg).", "id": "f1486:m52"} {"signature": "def fix_remaining_type_comments(node):", "body": "assert node.type == syms.file_inputlast_n = Nonefor n in node.post_order():if last_n is not None:if n.type == token.NEWLINE and is_assignment(last_n):fix_variable_annotation_type_comment(n, last_n)elif n.type == syms.funcdef and last_n.type == syms.suite:fix_signature_annotation_type_comment(n, last_n, offset=)elif n.type == syms.async_funcdef and last_n.type == syms.suite:fix_signature_annotation_type_comment(n, last_n, offset=)last_n = n", "docstring": "Converts type comments in `node` to proper annotated assignments.", "id": "f1486:m39"} {"signature": "def retype_file(src, pyi_dir, targets, *, quiet=False, hg=False):", "body": "with tokenize.open(src) as src_buffer:src_encoding = src_buffer.encodingsrc_node = lib2to3_parse(src_buffer.read())try:with open((pyi_dir / src.name).with_suffix('')) as pyi_file:pyi_txt = pyi_file.read()except FileNotFoundError:if not quiet:print(f'',file=sys.stderr,)else:pyi_ast = ast3.parse(pyi_txt)assert isinstance(pyi_ast, ast3.Module)reapply_all(pyi_ast.body, src_node)fix_remaining_type_comments(src_node)targets.mkdir(parents=True, exist_ok=True)with open(targets / src.name, '', encoding=src_encoding) as target_file:target_file.write(lib2to3_unparse(src_node, hg=hg))return targets / src.name", "docstring": "Retype `src`, finding types in `pyi_dir`. Save in `targets`.\n\n The file should remain formatted exactly as it was before, save for:\n - annotations\n - additional imports needed to satisfy annotations\n - additional module-level names needed to satisfy annotations\n\n Type comments in sources are normalized to type annotations.", "id": "f1486:m2"} {"signature": "def ensure_annotations_equal(name, expected, actual):", "body": "maybe_replace_any_if_equal(name, expected, actual)", "docstring": "Raise ValueError if `expected` isn't equal to `actual`.\n\n If --replace-any is used, the Any type in `actual` is considered equal.", "id": "f1486:m57"} {"signature": "def __getitem__(self, slice_obj):", "body": "start, stop, step = normalize_slice(slice_obj, self.index.line_count)if isinstance(slice_obj, slice):if step == :return self._get_lines(start, stop)return [self._get_lines(i)[] for i in range(start, stop, step)]if isinstance(slice_obj, int):return self._get_lines(start)[]return None", "docstring": "Supports slice operations on the file\n\nFor example:\n\n with IndexedOpen(filename) as f:\n print f[6:-2]", "id": "f1492:c0:m4"} {"signature": "@propertydef index_path(self):", "body": "return Path(str(self.filepath) + \"\")", "docstring": "the path to the index file", "id": "f1492:c0:m7"} {"signature": "def create_index(file_path, index_path, index_ratio, index_width):", "body": "i = with file_path.open() as f:with index_path.open(\"\") as idx:idx.write(index_ratio.to_bytes(, byteorder=\"\"))idx.write(index_width.to_bytes(, byteorder=\"\"))idx.write(().to_bytes(, byteorder=\"\")) idx.write(().to_bytes(index_width, byteorder=\"\"))while f.readline():i += if (i % index_ratio) == :pointer = f.tell()b = pointer.to_bytes(index_width, byteorder=\"\")idx.write(b)idx.seek()idx.write(i.to_bytes(, byteorder=\"\"))t = file_path.stat().st_mtimeos.utime(str(index_path), (t, t))", "docstring": "Index format:\n 1st byte: index_ratio\n 2nd byte: index_width\n 3rd byte: line_count", "id": "f1492:m0"} {"signature": "def normalize_slice(slice_obj, length):", "body": "if isinstance(slice_obj, slice):start, stop, step = slice_obj.start, slice_obj.stop, slice_obj.stepif start is None:start = if stop is None:stop = lengthif step is None:step = if start < :start += lengthif stop < :stop += lengthelif isinstance(slice_obj, int):start = slice_objif start < :start += lengthstop = start + step = else:raise TypeErrorif ( <= start <= length) and ( <= stop <= length):return start, stop, stepraise IndexError", "docstring": "Given a slice object, return appropriate values for use in the range function\n\n:param slice_obj: The slice object or integer provided in the `[]` notation\n:param length: For negative indexing we need to know the max length of the object.", "id": "f1494:m0"} {"signature": "def setUp(self):", "body": "global engineengine = create_engine('', echo=False)global SessionSession = sessionmaker(bind=engine)global sessionsession = Session()session._model_changes = {}Base.metadata.create_all(bind=engine)session.add_all([User(name='', lastname='', uid='', city_id=),User(name='', lastname='', uid='', city_id=),User(name='', lastname='', uid='', city_id=),City(name=''),City(name='')])session.commit()", "docstring": "Initial setup for the test", "id": "f1495:c2:m0"} {"signature": "def elastic_query(model, query, session=None, enabled_fields=None):", "body": "instance = ElasticQuery(model, query, session, enabled_fields)return instance.search()", "docstring": "Public method for init the class ElasticQuery\n :model: SQLAlchemy model\n :query: valid string like a ElasticSearch\n :session: SQLAlchemy session *optional\n :enabled_fields: Fields allowed for make a query *optional", "id": "f1497:m0"} {"signature": "def search(self):", "body": "try:filters = json.loads(self.query)except ValueError:return Falseresult = self.model_queryif ''in filters.keys():result = self.parse_filter(filters[''])if ''in filters.keys():result = result.order_by(*self.sort(filters['']))return result", "docstring": "This is the most important method", "id": "f1497:c0:m1"} {"signature": "def create_query(self, attr):", "body": "field = attr[]operator = attr[]value = attr[]model = self.modelif '' in field:field_items = field.split('')field_name = getattr(model, field_items[], None)class_name = field_name.property.mapper.class_new_model = getattr(class_name, field_items[])return field_name.has(OPERATORS[operator](new_model, value))return OPERATORS[operator](getattr(model, field, None), value)", "docstring": "Mix all values and make the query", "id": "f1497:c0:m6"} {"signature": "def make_literal(s):", "body": "return partial(s, tri(string), s)", "docstring": "returns a literal parser", "id": "f1511:m4"} {"signature": "def caseless_literal(s):", "body": "return make_caseless_literal(s)()", "docstring": "A literal string, case independant.", "id": "f1511:m7"} {"signature": "def make_caseless_literal(s):", "body": "return partial(s, tri(caseless_string), s)", "docstring": "returns a literal string, case independant parser.", "id": "f1511:m6"} {"signature": "def literal(s):", "body": "return make_literal(s)()", "docstring": "A literal string.", "id": "f1511:m5"} {"signature": "def quoted(parser=any_token):", "body": "quote_char = quote()value, _ = many_until(parser, partial(one_of, quote_char))return build_string(value)", "docstring": "Parses as much as possible until it encounters a matching closing quote.\n\n By default matches any_token, but can be provided with a more specific parser if required.\n Returns a string", "id": "f1511:m3"} {"signature": "def satisfies(guard):", "body": "i = peek()if (i is EndOfFile) or (not guard(i)):fail([\"\" + _fun_to_str(guard) + \">\"])next()return i", "docstring": "Returns the current token if it satisfies the guard function provided.\n\n Fails otherwise.\n This is the a generalisation of one_of.", "id": "f1512:m10"} {"signature": "def seq(*sequence):", "body": "results = {}for p in sequence:if callable(p): p()continuek, v = presults[k] = v()return results", "docstring": "Runs a series of parsers in sequence optionally storing results in a returned dictionary.\n\n For example:\n seq(whitespace, ('phone', digits), whitespace, ('name', remaining))", "id": "f1512:m25"} {"signature": "def optional(parser, default=None):", "body": "return choice(parser, lambda: default)", "docstring": "Tries to apply the provided parser, returning default if the parser fails.", "id": "f1512:m11"} {"signature": "def string(string):", "body": "found = []for c in string:found.append(one_of(c))return found", "docstring": "Iterates over string, matching input to the items provided.\n\n The most obvious usage of this is to accept an entire string of characters,\n However this is function is more general than that. It takes an iterable\n and for each item, it tries one_of for that set. For example, \n string(['aA','bB','cC'])\n will accept 'abc' in either case. \n\n note, If you wish to match caseless strings as in the example, use \n picoparse.text.caseless_string.", "id": "f1512:m21"} {"signature": "def many1(parser):", "body": "return [parser()] + many(parser)", "docstring": "Like many, but must consume at least one of parser", "id": "f1512:m14"} {"signature": "def compose(f, g):", "body": "return lambda *args, **kwargs: f(g(*args, **kwargs))", "docstring": "Compose returns a two functions composed as a new function.\n\n The first is called with the result of the second as its argument. Any arguments \n are passed to the second.", "id": "f1512:m4"} {"signature": "def not_followed_by(parser):", "body": "@tridef not_followed_by_block():failed = object()result = optional(tri(parser), failed)if result != failed:fail([\"\" + _fun_to_str(parser)])choice(not_followed_by_block)", "docstring": "Succeeds if the given parser cannot consume input", "id": "f1512:m12"} {"signature": "def allele_expectation(bgen, variant_idx):", "body": "geno = bgen[\"\"][variant_idx].compute()if geno[\"\"]:raise ValueError(\"\")nalleles = bgen[\"\"].loc[variant_idx, \"\"].compute().item()genotypes = get_genotypes(geno[\"\"], nalleles)expec = []for i in range(len(genotypes)):count = asarray(genotypes_to_allele_counts(genotypes[i]), float)n = count.shape[]expec.append((count.T * geno[\"\"][i, :n]).sum())return stack(expec, axis=)", "docstring": "r\"\"\" Allele expectation.\n\n Compute the expectation of each allele from the genotype probabilities.\n\n Parameters\n ----------\n bgen : bgen_file\n Bgen file handler.\n variant_idx : int\n Variant index.\n\n Returns\n -------\n :class:`numpy.ndarray`\n Samples-by-alleles matrix of allele expectations.\n\n Note\n ----\n This function supports unphased genotypes only.\n\n Examples\n --------\n .. doctest::\n\n >>> from bgen_reader import allele_expectation, example_files, read_bgen\n >>>\n >>> from texttable import Texttable\n >>>\n >>> # Download an example.\n >>> example = example_files(\"example.32bits.bgen\")\n >>> filepath = example.filepath\n >>>\n >>> # Read the example.\n >>> bgen = read_bgen(filepath, verbose=False)\n >>>\n >>> variants = bgen[\"variants\"]\n >>> samples = bgen[\"samples\"]\n >>> genotype = bgen[\"genotype\"]\n >>>\n >>> genotype = bgen[\"genotype\"]\n >>> # This `compute` call will return a pandas data frame,\n >>> variant = variants[variants[\"rsid\"] == \"RSID_6\"].compute()\n >>> # from which we retrieve the variant index.\n >>> variant_idx = variant.index.item()\n >>> print(variant)\n id rsid chrom pos nalleles allele_ids vaddr\n 4 SNPID_6 RSID_6 01 6000 2 A,G 19377\n >>> genotype = bgen[\"genotype\"]\n >>> # Samples is a pandas series, and we retrieve the\n >>> # sample index from the sample name.\n >>> sample_idx = samples[samples == \"sample_005\"].index.item()\n >>>\n >>> genotype = bgen[\"genotype\"]\n >>> # This `compute` call will return a dictionary from which\n >>> # we can get the probability matrix the corresponding\n >>> # variant.\n >>> p = genotype[variant_idx].compute()[\"probs\"][sample_idx]\n >>>\n >>> genotype = bgen[\"genotype\"]\n >>> # Allele expectation makes sense for unphased genotypes only,\n >>> # which is the case here.\n >>> e = allele_expectation(bgen, variant_idx)[sample_idx]\n >>>\n >>> genotype = bgen[\"genotype\"]\n >>> alleles = variant[\"allele_ids\"].item().split(\",\")\n >>>\n >>> genotype = bgen[\"genotype\"]\n >>>\n >>> # Print what we have got in a nice format.\n >>> table = Texttable()\n >>> table = table.add_rows(\n ... [\n ... [\"\", \"AA\", \"AG\", \"GG\", \"E[.]\"],\n ... [\"p\"] + list(p) + [\"na\"],\n ... [\"#\" + alleles[0], 2, 1, 0, e[0]],\n ... [\"#\" + alleles[1], 0, 1, 2, e[1]],\n ... ]\n ... )\n >>> print(table.draw())\n +----+-------+-------+-------+-------+\n | | AA | AG | GG | E[.] |\n +====+=======+=======+=======+=======+\n | p | 0.012 | 0.987 | 0.001 | na |\n +----+-------+-------+-------+-------+\n | #A | 2 | 1 | 0 | 1.011 |\n +----+-------+-------+-------+-------+\n | #G | 0 | 1 | 2 | 0.989 |\n +----+-------+-------+-------+-------+\n >>>\n >>> # Clean-up.\n >>> example.close()", "id": "f1519:m2"} {"signature": "def allele_frequency(expec):", "body": "expec = asarray(expec, float)if expec.ndim != :raise ValueError(\"\")ploidy = expec.shape[-]return expec.sum(-) / ploidy", "docstring": "r\"\"\" Compute allele frequency from its expectation.\n\n Parameters\n ----------\n expec : array_like\n Allele expectations encoded as a samples-by-alleles matrix.\n\n Returns\n -------\n :class:`numpy.ndarray`\n Allele frequencies encoded as a variants-by-alleles matrix.\n\n Examples\n --------\n .. doctest::\n\n >>> from bgen_reader import read_bgen, example_files\n >>> from bgen_reader import allele_expectation, allele_frequency\n >>>\n >>> # Download an example\n >>> example = example_files(\"example.32bits.bgen\")\n >>> filepath = example.filepath\n >>>\n >>> bgen = read_bgen(filepath, verbose=False)\n >>>\n >>> variants = bgen[\"variants\"]\n >>> samples = bgen[\"samples\"]\n >>> genotype = bgen[\"genotype\"]\n >>>\n >>> variant = variants[variants[\"rsid\"] == \"RSID_6\"].compute()\n >>> variant_idx = variant.index.item()\n >>>\n >>> p = genotype[variant_idx].compute()[\"probs\"]\n >>> # For unphased genotypes only.\n >>> e = allele_expectation(bgen, variant_idx)\n >>> f = allele_frequency(e)\n >>>\n >>> alleles = variant[\"allele_ids\"].item().split(\",\")\n >>> print(alleles[0] + \": {}\".format(f[0]))\n A: 229.23103218810434\n >>> print(alleles[1] + \": {}\".format(f[1]))\n G: 270.7689678118956\n >>> print(variant)\n id rsid chrom pos nalleles allele_ids vaddr\n 4 SNPID_6 RSID_6 01 6000 2 A,G 19377\n >>>\n >>> # Clean-up the example\n >>> example.close()", "id": "f1519:m0"} {"signature": "def _touch(fname, mode=, dir_fd=None, **kwargs):", "body": "flags = os.O_CREAT | os.O_APPENDwith os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:os.utime(f.fileno() if os.utime in os.supports_fd else fname,dir_fd=None if os.supports_fd else dir_fd,**kwargs,)", "docstring": "Touch a file.\n\n Credits to .", "id": "f1521:m4"} {"signature": "def point_distance(point1, point2):", "body": "return ((point1[] - point2[]) ** + (point1[] - point2[]) ** ) ** ", "docstring": "Computes the distance beteen two points on a plane.\n\nArgs:\n point1: Tuple or list, the x and y coordinate of the first point.\n\n point2: Tuple or list, the x and y coordinate of the second point.\n\nReturns:\n The distance between the two points as a floating point number.", "id": "f1541:m8"} {"signature": "def rectangle_area(width, height):", "body": "return width * height", "docstring": "Returns the area of a rectangle with the given width and height.\n\n Args:\n width:\n Integer or float, width of the rectangle.\n\n height: Integer or float, height of the rectangle.\n\n Returns:\n The area of a rectangle as an integer or float.", "id": "f1541:m3"} {"signature": "def average(numbers, numtype=''):", "body": "if type == '':return Decimal(sum(numbers)) / len(numbers)else:return float(sum(numbers)) / len(numbers)", "docstring": "Calculates the average or mean of a list of numbers\n\nArgs:\n numbers: a list of integers or floating point numbers.\n\n numtype: string, 'decimal' or 'float'; the type of number to return.\n\nReturns:\n The average (mean) of the numbers as a floating point number\n or a Decimal object.\n\nRequires:\n The math module", "id": "f1541:m13"} {"signature": "def miles_to_feet(miles):", "body": "return miles * float()", "docstring": "Converts a number of miles to feet.\n\nArgs:\n miles: Number of miles we want to convert.\n\nReturns:\n Floating point number as the number of\n feet in the given miles.", "id": "f1541:m0"} {"signature": "def get_percentage(a, b, i=False, r=False):", "body": "if i is False and r is True:percentage = round( * (float(a) / b), )elif (i is True and r is True) or (i is True and r is False):percentage = int(round( * (float(a) / b)))if r is False:warnings.warn(\"\")else:percentage = * (float(a) / b)return percentage", "docstring": "Finds the percentage of one number over another.\n\nArgs:\n a: The number that is a percent, int or float.\n\n b: The base number that a is a percent of, int or float.\n\n i: Optional boolean integer. True if the user wants the result returned as\n a whole number. Assumes False.\n\n r: Optional boolean round. True if the user wants the result rounded.\n Rounds to the second decimal point on floating point numbers. Assumes False.\n\nReturns:\n The argument a as a percentage of b. Throws a warning if integer is set to True\n and round is set to False.", "id": "f1541:m16"} {"signature": "def future_value(present_value, annual_rate, periods_per_year, years):", "body": "rate_per_period = annual_rate / float(periods_per_year)periods = periods_per_year * yearsreturn present_value * ( + rate_per_period) ** periods", "docstring": "Calculates the future value of money invested at an anual interest rate,\nx times per year, for a given number of years.\n\nArgs:\n present_value: int or float, the current value of the money (principal).\n\n annual_rate: float 0 to 1 e.g., .5 = 50%), the interest rate paid out.\n\n periods_per_year: int, the number of times money is invested per year.\n\n years: int, the number of years invested.\n\nReturns:\n Float, the future value of the money invested with compound interest.", "id": "f1541:m7"} {"signature": "def total_seconds(hours, minutes, seconds):", "body": "return (hours * + minutes) * + seconds", "docstring": "Returns the number of seconds in the given number of hours,\nminutes, and seconds.\n\nArgs:\n hours:\n Integer, number of hours.\n\n minutes:\n Integer, number of minutes.\n\n seconds:\n Integer, number of seconds.\n\nReturns:\n Integer, time in seconds.", "id": "f1541:m1"} {"signature": "def get_full_binary_tree_nodes(height):", "body": "return ** (height + ) - ", "docstring": "Calculate the number of internal nodes in a complete binary tree in which each\ninternal node has exactly two children. A full binary tree is complete if every\nleaf in the tree has the same depth. Internal nodes include both leaves and\ninternal nodes. The root node is also included in this calculation.\n\nArgs:\n height: integer, the height of the tree. Height is defined by the number\n of edges from the furthest child to the root. An edge is the line segment\n that runs between and connects nodes.", "id": "f1541:m19"} {"signature": "def is_leap_year(year):", "body": "if (year % ) == :return Trueelif (year % ) == :return Falseelif (year % ) == :return Trueelse:return False", "docstring": "Checks to see if a given year is a leap year.\n\nArgs:\n Integer, the year to test.\n\nReturns:\n Boolean", "id": "f1541:m10"} {"signature": "def savings_rate(take_home_pay, spending, numtype=''):", "body": "if numtype == '':try:return ((Decimal(take_home_pay) - Decimal(spending)) / (Decimal(take_home_pay))) * Decimal()except (InvalidOperation, DivisionByZero):return Decimal()else:try:return ((float(take_home_pay) - float(spending)) / (float(take_home_pay))) * except (ZeroDivisionError):return ", "docstring": "Calculate net take-home pay including employer retirement savings match\nusing the formula laid out by Mr. Money Mustache:\nhttp://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/\n\nArgs:\n take_home_pay: float or int, monthly take-home pay\n\n spending: float or int, monthly spending\n\n numtype: string, 'decimal' or 'float'; the type of number to return.\n\nReturns:\n your monthly savings rate expressed as a percentage.", "id": "f1541:m21"} {"signature": "def get_full_binary_tree_leaves(height):", "body": "return ** height", "docstring": "Calculate the number of leaves in a complete binary tree in which each internal\nnode has exactly two children. A full binary tree is complete if every leaf\nin the tree has the same depth. A leaf is a node without children\n\nArgs:\n height: integer, the height of the tree. Height is defined by the number\n of edges from the furthest child to the root. An edge is the line segment\n that runs between and connects nodes.", "id": "f1541:m18"} {"signature": "def get_slope(point1, point2):", "body": "return (float(point2[]) - point1[]) / (float(point2[]) - point1[])", "docstring": "Calculate the slope of the line connecting two points on a grid.\n\nArgs:\n point1: Tuple or list, the x and y coordinate of the first point.\n\n point2: Tuple or list, the x and y coordinate of the second point\n\nReturns:\n the slope of a line connecting two points on a grid.", "id": "f1541:m17"} {"signature": "def standard_deviation(variance):", "body": "return variance ** ", "docstring": "Calculates the standard deviation.\n\nArgs:\n variance: The variance of a group of numbers.\n\nReturns:\n The standard deviation as a floating point number.", "id": "f1541:m15"} {"signature": "def take_home_pay(gross_pay, employer_match, taxes_and_fees, numtype=''):", "body": "if numtype == '':return (Decimal(gross_pay) + Decimal(employer_match)) - Decimal(sum(taxes_and_fees))else:return (float(gross_pay) + float(employer_match)) - sum(taxes_and_fees)", "docstring": "Calculate net take-home pay including employer retirement savings match\nusing the formula laid out by Mr. Money Mustache:\nhttp://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/\n\nArgs:\n gross_pay: float or int, gross monthly pay.\n\n employer_match: float or int, the 401(k) match from your employer.\n\n taxes_and_fees: list, taxes and fees that are deducted from your paycheck.\n\n numtype: string, 'decimal' or 'float'; the type of number to return.\n\nReturns:\n your monthly take-home pay.", "id": "f1541:m20"} {"signature": "def triangle_area(point1, point2, point3):", "body": "\"\"\"\"\"\"a = point_distance(point1, point2)b = point_distance(point1, point3)c = point_distance(point2, point3)\"\"\"\"\"\"s = (a + b + c) / \"\"\"\"\"\"return math.sqrt(s * (s - a) * (s - b) * (s - c))", "docstring": "Uses Heron's formula to find the area of a triangle\nbased on the coordinates of three points.\n\nArgs:\n point1: list or tuple, the x y coordinate of point one.\n\n point2: list or tuple, the x y coordinate of point two.\n\n point3: list or tuple, the x y coordinate of point three.\n\nReturns:\n The area of a triangle as a floating point number.\n\nRequires:\n The math module, point_distance().", "id": "f1541:m9"} {"signature": "def compound_interest(principal, annual_rate, years):", "body": "return principal * ( + * annual_rate) ** years", "docstring": "Returns the future value of money invested at an annual\ninterest rate, compounded annually for a given number of years.\n\nArgs:\n principal: The beginning ammount of money invested\n\n annual_rate: The interest rate paid out\n\n years: The number of years invested\n\nReturns:\n A basic calculation of compound interest.", "id": "f1541:m6"} {"signature": "def foo(a):", "body": "assert a < ", "docstring": "Meaningless...", "id": "f1544:m0"} {"signature": "def get_built_in(self, language, level, data):", "body": "pp = pprint.PrettyPrinter(indent=level)lookup = {'' : pp.pformat(data),'' : str(json.dumps(data, sort_keys=True, indent=level, separators=('', '')))}self.data_structure = lookup[language]", "docstring": "Gets the return string for a language that's supported by python.\nUsed in cases when python provides support for the conversion.\n\nArgs:\n language: string the langage to return for.\n\n level: integer, the indentation level.\n\n data: python data structure being converted (list of tuples)\n\nReturns:\n None, updates self.data_structure", "id": "f1547:c0:m3"} {"signature": "def excel_to_html(path, sheetname='', css_classes='',caption='', details=[], row_headers=False, merge=False):", "body": "def get_data_on_merged_cells():\"\"\"\"\"\"merged_cells = xls.book.sheet_by_name(sheetname).merged_cellsds = {}for crange in merged_cells:rlo, rhi, clo, chi = crangefor rowx in range(rlo, rhi):for colx in range(clo, chi):parent_cell = (rlo,clo)child_cell = (rowx,colx)if not parent_cell in ds:ds[parent_cell] = [[,], set([])]else:if parent_cell != child_cell and child_cell[] == parent_cell[]:ds[parent_cell][][] += ds[parent_cell][].add('')elif parent_cell != child_cell and child_cell[] > parent_cell[]:if child_cell[] == parent_cell[]:ds[parent_cell][][] += ds[parent_cell][].add('')else:raise RuntimeError('')return dsdef mark_cells_going_right(cell, curr_cell, merged_cells):\"\"\"\"\"\"try:xcount = merged_cells[curr_cell][][]if xcount > : cell[''] = xcountcol_count = xcount - while col_count > :cell = cell.find_next_sibling()cell[''] = ''col_count -= except:passdef mark_cells_going_down(cell, curr_cell, merged_cells):\"\"\"\"\"\"if curr_cell in merged_cells and merged_cells[curr_cell][] == set(['']):ycount = merged_cells[curr_cell][][]cell[''] = ycountrow_count = ycountfor child_row in cell.parent.find_next_siblings(limit=row_count - ):i = for child in child_row.find_all(''):if i == curr_cell[]:child[''] = ''i += def mark_cells_going_down_and_right(cell, curr_cell, merged_cells):\"\"\"\"\"\"if curr_cell in merged_cells and('' in merged_cells[curr_cell][] and'' in merged_cells[curr_cell][]):xcount = merged_cells[curr_cell][][]ycount = merged_cells[curr_cell][][]row_count = ycountcol_count = xcountmark_cells_going_right(cell, curr_cell, merged_cells)flag = Falsefor child_row in [cell.parent] + cell.parent.find_all_next('', limit=row_count - ):i = for child in child_row.find_all(''):if i == curr_cell[]:mark_cells_going_right(child, curr_cell, merged_cells)if not flag:child[''] = col_countchild[''] = row_countflag = Trueelse:child[''] = ''i += def is_empty_th(string):\"\"\"\"\"\"if string[:] == '':data = string.split('')if is_numeric(data[]):return Truereturn Falsedef mark_header_cells(html):\"\"\"\"\"\"th = html.find_all('')for header in th:txt = header.stringif not is_empty_th(txt):header[''] = ''count = for sibling in header.find_next_siblings():if is_empty_th(sibling.string):count += sibling[''] = ''else:breakif count > :header[''] = countheader[''] = ''def create_caption(html, caption):\"\"\"\"\"\"ctag = html.new_tag('')ctag.insert(, caption)html.table.insert(, ctag)def create_summary_and_details(html, details):\"\"\"\"\"\"if len(details) != :msg = ''+ ''+ ''+ ''raise RuntimeError(msg)summary = details[]details = details[]if not caption:create_caption(html, caption)dtag = html.new_tag('')stag = html.new_tag('')ptag = html.new_tag('')stag.insert(, summary)ptag.insert(, details)dtag.insert(, stag)dtag.append(ptag) html.table.caption.insert(, dtag) def format_properly(html):\"\"\"\"\"\"return html.replace('', '').replace('','').replace('', '').replace('','').replace('', '')def add_row_headers(html):\"\"\"\"\"\"for row in html.tbody.find_all(''):spans_rows = '' in row.td.attrsspans_columns = '' in row.td.attrsnew_tag = html.new_tag('')new_tag[''] = ''new_tag.string = row.td.stringif spans_rows:new_tag[''] = row.td.attrs['']new_tag[''] = ''if spans_columns:new_tag[''] = row.td.attrs['']row.td.replace_with(new_tag)def beautify(html):\"\"\"\"\"\"table = html.find('')first_tr = table.find('')del table['']del first_tr['']return format_properly(html.prettify(formatter=''))def parse_html(html, caption, details):\"\"\"\"\"\"new_html = BeautifulSoup(html, '')if merge:row_num = merged_cells = get_data_on_merged_cells()rows = new_html.find('').find('').find_all('')for row in rows:cell_num = cells = row.find_all('')for cell in cells:curr_cell = (row_num, cell_num)mark_cells_going_right(cell, curr_cell, merged_cells) mark_cells_going_down(cell, curr_cell, merged_cells)mark_cells_going_down_and_right(cell, curr_cell, merged_cells)cell_num += row_num += mark_header_cells(new_html)destroy = new_html.find_all(attrs={'' : '' })for item in destroy:item.extract()if row_headers:add_row_headers(new_html)if caption:create_caption(new_html, caption)if details:create_summary_and_details(new_html, details)return beautify(new_html)pd.options.display.max_colwidth = -xls = pd.ExcelFile(path)df = xls.parse(sheetname)panda_html = df.to_html(classes=css_classes, index=False, na_rep='')return parse_html(panda_html, caption, details)", "docstring": "Convert an excel spreadsheet to an html table.\nThis function supports the conversion of merged \ncells. It can be used in code or run from the \ncommand-line. If passed the correct arguments\nit can generate fully accessible html.\n\nArgs:\n path: string, path to the spreadsheet.\n\n sheetname: string, name of the sheet\n to convert. \n\n css_classes: string, space separated\n classnames to append to the table.\n\n caption: string, a short heading-like \n description of the table.\n\n details: list of strings, where the first\n item in the list is a string for the html \n summary element and the second item is\n a string for the details element. The \n summary should be very short, e.g. \"Help\",\n where as the details element should be a \n long description regarding the purpose or \n how to navigate the table.\n\n row_headers: boolean, defaults to False.\n Does the table have row headers? If set\n to True, the first element in each row\n will be a element \n instead of a element.\n\n merge: boolean, whether or not to \n combine cells that were merged in the \n spreadsheet.\n\nReturns:\n string, html table", "id": "f1549:m14"} {"signature": "def add_newlines(f, output, char):", "body": "line_count = get_line_count(f)f = open(f, '')output = open(output, '')for line in range(line_count):string = f.readline()string = re.sub(char, char + '', string)output.write(string)", "docstring": "Adds line breaks after every occurance of a given character in a file.\n\n Args:\n f: string, path to input file.\n\n output: string, path to output file.\n\n Returns:\n None.", "id": "f1549:m4"} {"signature": "def get_line_count(fname):", "body": "i = with open(fname) as f:for i, l in enumerate(f):passreturn i + ", "docstring": "Counts the number of lines in a file.\n\n Args:\n fname: string, name of the file.\n\n Returns:\n integer, the number of lines in the file.", "id": "f1549:m2"} {"signature": "def indent_css(f, output):", "body": "line_count = get_line_count(f)f = open(f, '')output = open(output, '')for line in range(line_count):string = f.readline().rstrip()if len(string) > :if string[-] == \"\":output.write(\"\" + string + \"\")else:output.write(string + \"\")output.close()f.close()", "docstring": "Indentes css that has not been indented and saves it to a new file.\n A new file is created if the output destination does not already exist.\n\n Args:\n f: string, path to file.\n\n output: string, path/name of the output file (e.g. /directory/output.css).\n print type(response.read())\n\n Returns:\n None.", "id": "f1549:m3"} {"signature": "def is_numeric(string):", "body": "try:float(string)return Trueexcept ValueError:return False", "docstring": "Checks if a string is numeric. If the string value is an integer\nor a float, return True, otherwise False. Can be used to test \nsoley for floats as well. \n\nArgs:\n string: a string to test.\n\nReturns: \n boolean", "id": "f1549:m7"} {"signature": "def clean_strings(iterable):", "body": "retval = []for val in iterable:try:retval.append(val.strip())except(AttributeError):retval.append(val)return retval", "docstring": "Take a list of strings and clear whitespace \non each one. If a value in the list is not a \nstring pass it through untouched.\n\nArgs:\n iterable: mixed list\n\nReturns: \n mixed list", "id": "f1549:m12"} {"signature": "def is_int(string):", "body": "try:a = float(string)b = int(a)except ValueError:return Falseelse:return a == b", "docstring": "Checks if a string is an integer. If the string value is an integer\nreturn True, otherwise return False. \n\nArgs:\n string: a string to test.\n\nReturns: \n boolean", "id": "f1549:m10"} {"signature": "def pluralize_collection(base, local_cls, referred_cls, constraint):", "body": "\"\"referred_name = referred_cls.__name__uncamelized = re.sub(r'',lambda m: \"\" % m.group().lower(),referred_name)[:]pluralized = _pluralizer.plural(uncamelized)return pluralized", "docstring": "Produce an 'uncamelized', 'pluralized' class name, e.g.", "id": "f1564:m1"} {"signature": "def is_populated(self) -> bool:", "body": "return < self.count_model()", "docstring": "Check if the database is already populated.", "id": "f1569:c1:m5"} {"signature": "@classmethoddef populate(cls):", "body": "cls.manager.populate()", "docstring": "Populate the database.", "id": "f1571:c3:m0"} {"signature": "@classmethoddef populate(cls):", "body": "cls.manager.populate(return_true=True)", "docstring": "Populate the database.", "id": "f1571:c4:m0"} {"signature": "def setUp(self):", "body": "self.runner = CliRunner()self.main = Manager.get_cli()", "docstring": "Set up a CliRunner and an accompanying CLI for each test.", "id": "f1574:c0:m0"} {"signature": "def setUp(self):", "body": "self.runner = CliRunner()self.main = NamespaceManager.get_cli()self.manager = Manager(connection=self.connection)self.manager.populate()", "docstring": "Set up a CliRunner and an accompanying CLI for each test.", "id": "f1575:c3:m0"} {"signature": "def populate(self):", "body": "self.manager.populate()", "docstring": "Populate the manager.", "id": "f1575:c2:m0"} {"signature": "def make_df_getter(data_url: str, data_path: str, **kwargs) -> Callable[[Optional[str], bool, bool], pd.DataFrame]:", "body": "download_function = make_downloader(data_url, data_path)def get_df(url: Optional[str] = None, cache: bool = True, force_download: bool = False) -> pd.DataFrame:\"\"\"\"\"\"if url is None and cache:url = download_function(force_download=force_download)return pd.read_csv(url or data_url,**kwargs)return get_df", "docstring": "Build a function that handles downloading tabular data and parsing it into a pandas DataFrame.\n\n :param data_url: The URL of the data\n :param data_path: The path where the data should get stored\n :param kwargs: Any other arguments to pass to :func:`pandas.read_csv`", "id": "f1576:m1"} {"signature": "def build_engine_session(connection, echo=False, autoflush=None, autocommit=None, expire_on_commit=None,scopefunc=None):", "body": "if connection is None:raise ValueError('')engine = create_engine(connection, echo=echo)autoflush = autoflush if autoflush is not None else Falseautocommit = autocommit if autocommit is not None else Falseexpire_on_commit = expire_on_commit if expire_on_commit is not None else Truelog.debug('', autoflush, autocommit, expire_on_commit)session_maker = sessionmaker(bind=engine,autoflush=autoflush,autocommit=autocommit,expire_on_commit=expire_on_commit,)session = scoped_session(session_maker,scopefunc=scopefunc)return engine, session", "docstring": "Build an engine and a session.\n\n :param str connection: An RFC-1738 database connection string\n :param bool echo: Turn on echoing SQL\n :param Optional[bool] autoflush: Defaults to True if not specified in kwargs or configuration.\n :param Optional[bool] autocommit: Defaults to False if not specified in kwargs or configuration.\n :param Optional[bool] expire_on_commit: Defaults to False if not specified in kwargs or configuration.\n :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session`\n :rtype: tuple[Engine,Session]\n\n From the Flask-SQLAlchemy documentation:\n\n An extra key ``'scopefunc'`` can be set on the ``options`` dict to\n specify a custom scope function. If it's not provided, Flask's app\n context stack identity is used. This will ensure that sessions are\n created and removed with the request/response cycle, and should be fine\n in most cases.", "id": "f1578:m0"} {"signature": "@propertydef connection(self) -> str:", "body": "return str(self.engine.url)", "docstring": "Return this manager's connection string.", "id": "f1578:c0:m1"} {"signature": "def add_cli_write_bel_annotation(main: click.Group) -> click.Group: ", "body": "@main.command()@click.option('', '', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(),help='')@click.pass_objdef write(manager: BELNamespaceManagerMixin, directory: str):\"\"\"\"\"\"with open(os.path.join(directory, manager.identifiers_namespace), '') as file:manager.write_bel_annotation(file)return main", "docstring": "Add a ``write_bel_annotation`` command to main :mod:`click` function.", "id": "f1579:m3"} {"signature": "def write_directory(self, directory: str) -> bool:", "body": "current_md5_hash = self.get_namespace_hash()md5_hash_path = os.path.join(directory, f'')if not os.path.exists(md5_hash_path):old_md5_hash = Noneelse:with open(md5_hash_path) as file:old_md5_hash = file.read().strip()if old_md5_hash == current_md5_hash:return Falsewith open(os.path.join(directory, f''), '') as file:self.write_bel_namespace(file, use_names=False)with open(md5_hash_path, '') as file:print(current_md5_hash, file=file)if self.has_names:with open(os.path.join(directory, f''), '') as file:self.write_bel_namespace(file, use_names=True)with open(os.path.join(directory, f''), '') as file:self.write_bel_namespace_mappings(file, desc='')return True", "docstring": "Write a BEL namespace for identifiers, names, name hash, and mappings to the given directory.", "id": "f1579:c1:m21"} {"signature": "def _iterate_namespace_models(self, **kwargs) -> Iterable:", "body": "return tqdm(self._get_query(self.namespace_model),total=self._count_model(self.namespace_model),**kwargs)", "docstring": "Return an iterator over the models to be converted to the namespace.", "id": "f1579:c1:m5"} {"signature": "@staticmethoddef _cli_add_write_bel_namespace(main: click.Group) -> click.Group:", "body": "return add_cli_write_bel_namespace(main)", "docstring": "Add the write BEL namespace command.", "id": "f1579:c1:m28"} {"signature": "def _add_annotation_to_graph(self, graph: BELGraph) -> None:", "body": "if '' not in graph.annotation_list:graph.annotation_list[''] = set()graph.annotation_list[''].add(self.module_name)", "docstring": "Add this manager as an annotation to the graph.", "id": "f1579:c1:m15"} {"signature": "@staticmethoddef _cli_add_write_bel_annotation(main: click.Group) -> click.Group:", "body": "return add_cli_write_bel_annotation(main)", "docstring": "Add the write BEL namespace command.", "id": "f1579:c1:m29"} {"signature": "@staticmethoddef _cli_add_to_bel_namespace(main: click.Group) -> click.Group:", "body": "return add_cli_to_bel_namespace(main)", "docstring": "Add the export BEL namespace command.", "id": "f1579:c1:m26"} {"signature": "def write_bel_namespace(self, file: TextIO, use_names: bool = False) -> None:", "body": "if not self.is_populated():self.populate()if use_names and not self.has_names:raise ValueErrorvalues = (self._get_namespace_name_to_encoding(desc='')if use_names elseself._get_namespace_identifier_to_encoding(desc=''))write_namespace(namespace_name=self._get_namespace_name(),namespace_keyword=self._get_namespace_keyword(),namespace_query_url=self.identifiers_url,values=values,file=file,)", "docstring": "Write as a BEL namespace file.", "id": "f1579:c1:m18"} {"signature": "def add_cli_clear_bel_namespace(main: click.Group) -> click.Group: ", "body": "@main.command()@click.pass_objdef drop(manager: BELNamespaceManagerMixin):\"\"\"\"\"\"namespace = manager.drop_bel_namespace()if namespace:click.echo(f'')return main", "docstring": "Add a ``clear_bel_namespace`` command to main :mod:`click` function.", "id": "f1579:m1"} {"signature": "@staticmethoddef _get_name(model) -> str:", "body": "return model.name", "docstring": "Extract the name from an instance of namespace_model.\n\n :param model: The model to convert", "id": "f1579:c1:m4"} {"signature": "@staticmethoddef _cli_add_clear_bel_namespace(main: click.Group) -> click.Group:", "body": "return add_cli_clear_bel_namespace(main)", "docstring": "Add the clear BEL namespace command.", "id": "f1579:c1:m27"} {"signature": "def upload_bel_namespace(self, update: bool = False) -> Namespace:", "body": "if not self.is_populated():self.populate()namespace = self._get_default_namespace()if namespace is None:log.info('', self._get_namespace_name())return self._make_namespace()if update:self._update_namespace(namespace)return namespace", "docstring": "Upload the namespace to the PyBEL database.\n\n :param update: Should the namespace be updated first?", "id": "f1579:c1:m16"} {"signature": "@abstractmethoddef _create_namespace_entry_from_model(self, model, namespace: Namespace) -> NamespaceEntry:", "body": "", "docstring": "Create a PyBEL NamespaceEntry model from a Bio2BEL model.\n\n :param model: The model to convert\n :param namespace: The PyBEL namespace to add to", "id": "f1579:c1:m1"} {"signature": "def add_cli_write_bel_namespace(main: click.Group) -> click.Group: ", "body": "@main.command()@click.option('', '', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(),help='')@click.pass_objdef write(manager: BELNamespaceManagerMixin, directory: str):\"\"\"\"\"\"manager.write_directory(directory)return main", "docstring": "Add a ``write_bel_namespace`` command to main :mod:`click` function.", "id": "f1579:m2"} {"signature": "@classmethoddef _get_namespace_name(cls) -> str:", "body": "return cls.identifiers_recommended or cls.module_name", "docstring": "Get the nicely formatted name of this namespace.", "id": "f1579:c1:m6"} {"signature": "def write_bel_namespace_mappings(self, file: TextIO, **kwargs) -> None:", "body": "json.dump(self._get_namespace_identifier_to_name(**kwargs), file, indent=, sort_keys=True)", "docstring": "Write a BEL namespace mapping file.", "id": "f1579:c1:m20"} {"signature": "def write_bel_annotation(self, file: TextIO) -> None:", "body": "if not self.is_populated():self.populate()values = self._get_namespace_name_to_encoding(desc='')write_annotation(keyword=self._get_namespace_keyword(),citation_name=self._get_namespace_name(),description='',values=values,file=file,)", "docstring": "Write as a BEL annotation file.", "id": "f1579:c1:m19"} {"signature": "def add_cli_to_bel_namespace(main: click.Group) -> click.Group: ", "body": "@main.command()@click.option('', '', is_flag=True)@click.pass_objdef upload(manager: BELNamespaceManagerMixin, update):\"\"\"\"\"\"namespace = manager.upload_bel_namespace(update=update)click.echo(f'')return main", "docstring": "Add a ``upload_bel_namespace`` command to main :mod:`click` function.", "id": "f1579:m0"} {"signature": "@classmethoddef _get_identifier(cls, model) -> str:", "body": "return getattr(model, f'')", "docstring": "Extract the identifier from an instance of namespace_model.\n\n :param model: The model to convert", "id": "f1579:c1:m2"} {"signature": "@staticmethoddef _cli_add_to_bel(main: click.Group) -> click.Group:", "body": "return add_cli_to_bel(main)", "docstring": "Add the export BEL command.", "id": "f1580:c1:m3"} {"signature": "@classmethoddef get_cli(cls) -> click.Group:", "body": "main = super().get_cli()@main.group()def bel():\"\"\"\"\"\"cls._cli_add_to_bel(bel)cls._cli_add_upload_bel(bel)return main", "docstring": "Get a :mod:`click` main function with added BEL commands.", "id": "f1580:c1:m5"} {"signature": "def to_indra_statements(self, *args, **kwargs):", "body": "graph = self.to_bel(*args, **kwargs)return to_indra_statements(graph)", "docstring": "Dump as a list of INDRA statements.\n\n :rtype: List[indra.Statement]", "id": "f1580:c1:m2"} {"signature": "def add_cli_flask(main: click.Group) -> click.Group: ", "body": "@main.command()@click.option('', '', is_flag=True)@click.option('', '')@click.option('', '')@click.option('', '', default=os.urandom())@click.pass_objdef web(manager, debug, port, host, secret_key):\"\"\"\"\"\"if not manager.is_populated():click.echo(''.format(manager.module_name))sys.exit()app = manager.get_flask_admin_app(url='', secret_key=secret_key)app.run(debug=debug, host=host, port=port)return main", "docstring": "Add a ``web`` comand main :mod:`click` function.", "id": "f1582:m0"} {"signature": "def _add_admin(self, app, **kwargs):", "body": "from flask_admin import Adminfrom flask_admin.contrib.sqla import ModelViewadmin = Admin(app, **kwargs)for flask_admin_model in self.flask_admin_models:if isinstance(flask_admin_model, tuple): if len(flask_admin_model) != :raise TypeErrormodel, view = flask_admin_modeladmin.add_view(view(model, self.session))else:admin.add_view(ModelView(flask_admin_model, self.session))return admin", "docstring": "Add a Flask Admin interface to an application.\n\n :param flask.Flask app: A Flask application\n :param kwargs: Keyword arguments are passed through to :class:`flask_admin.Admin`\n :rtype: flask_admin.Admin", "id": "f1582:c0:m1"} {"signature": "@classmethoddef get_cli(cls) -> click.Group:", "body": "main = super().get_cli()cls._cli_add_flask(main)return main", "docstring": "Add a :mod:`click` main function to use as a command line interface.", "id": "f1582:c0:m4"} {"signature": "@staticmethoddef _cli_add_flask(main: click.Group) -> click.Group:", "body": "return add_cli_flask(main)", "docstring": "Add the web command.", "id": "f1582:c0:m3"} {"signature": "@staticmethoddef _cli_add_drop(main: click.Group) -> click.Group:", "body": "return add_cli_drop(main)", "docstring": "Add the drop command.", "id": "f1583:c1:m12"} {"signature": "@abstractmethoddef summarize(self) -> Mapping[str, int]:", "body": "", "docstring": "Summarize the database.", "id": "f1583:c1:m4"} {"signature": "def create_all(self, check_first: bool = True):", "body": "self._metadata.create_all(self.engine, checkfirst=check_first)", "docstring": "Create the empty database (tables).\n\n :param bool check_first: Defaults to True, don't issue CREATEs for tables already present\n in the target database. Defers to :meth:`sqlalchemy.sql.schema.MetaData.create_all`", "id": "f1583:c1:m6"} {"signature": "def _get_query(self, model):", "body": "return self.session.query(model)", "docstring": "Get a query for the given model using this manager's session.\n\n :param model: A SQLAlchemy model class\n :return: a SQLAlchemy query", "id": "f1583:c1:m8"} {"signature": "@property@abstractmethoddef _base(self) -> DeclarativeMeta:", "body": "", "docstring": "Return the declarative base.\n\n It is usually sufficient to return an instance that is module-level.\n\n How to build an instance of :class:`sqlalchemy.ext.declarative.api.DeclarativeMeta` by using\n :func:`sqlalchemy.ext.declarative.declarative_base`:\n\n >>> from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base\n >>> Base: DeclarativeMeta = declarative_base()\n\n Then just override this abstract property like:\n\n >>> @property\n >>> def _base(self) -> DeclarativeMeta:\n >>> return Base\n\n Note that this property could effectively also be a static method.", "id": "f1583:c1:m0"} {"signature": "@classmethoddef get_cli(cls) -> click.Group:", "body": "main = super().get_cli()cls._cli_add_populate(main)cls._cli_add_drop(main)cls._cli_add_cache(main)cls._cli_add_summarize(main)return main", "docstring": "Get the :mod:`click` main function to use as a command line interface.", "id": "f1583:c1:m15"} {"signature": "def add_cli_cache(main: click.Group) -> click.Group: ", "body": "@main.group()def cache():\"\"\"\"\"\"@cache.command()@click.pass_objdef locate(manager):\"\"\"\"\"\"data_dir = get_data_dir(manager.module_name)click.echo(data_dir)@cache.command()@click.pass_objdef ls(manager):\"\"\"\"\"\"data_dir = get_data_dir(manager.module_name)for path in os.listdir(data_dir):click.echo(path)@cache.command()@click.pass_objdef clear(manager):\"\"\"\"\"\"clear_cache(manager.module_name)return main", "docstring": "Add several commands to main :mod:`click` function for handling the cache.", "id": "f1583:m2"} {"signature": "@staticmethoddef _cli_add_populate(main: click.Group) -> click.Group:", "body": "return add_cli_populate(main)", "docstring": "Add the populate command.", "id": "f1583:c1:m11"} {"signature": "@main.group()def bel():", "body": "", "docstring": "Manage BEL.", "id": "f1584:m10"} {"signature": "@main.command()@connection_option@click.option('', is_flag=True, help='')@click.option('', is_flag=True, help='')@click.option('', '', multiple=True, help='')def populate(connection, reset, force, skip):", "body": "for idx, name, manager in _iterate_managers(connection, skip):click.echo(click.style(f'', fg='', bold=True) +click.style(f'', fg='', bold=True))if reset:click.echo('')manager.drop_all()click.echo('')manager.create_all()elif manager.is_populated() and not force:click.echo(f'', color='')continuetry:manager.populate()except Exception:logger.exception('', name)click.secho(f'', fg='', bold=True)", "docstring": "Populate all.", "id": "f1584:m2"} {"signature": "@belns.command()@connection_option@click.option('', '', multiple=True, help='')@click.option('', '', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(),help='')@click.option('', '', is_flag=True, help='')def write(connection, skip, directory, force):", "body": "os.makedirs(directory, exist_ok=True)from .manager.namespace_manager import BELNamespaceManagerMixinfor idx, name, manager in _iterate_managers(connection, skip):if not (isinstance(manager, AbstractManager) and isinstance(manager, BELNamespaceManagerMixin)):continueclick.secho(name, fg='', bold=True)if force:try:click.echo(f'')manager.drop_all()click.echo('')clear_cache(name)click.echo('')manager.populate()except Exception:click.secho(f'', fg='')continuetry:r = manager.write_directory(directory)except TypeError as e:click.secho(f''.rstrip(), fg='')else:if not r:click.echo('')", "docstring": "Write a BEL namespace names/identifiers to terminology store.", "id": "f1584:m9"} {"signature": "@main.command()@connection_optiondef actions(connection):", "body": "session = _make_session(connection=connection)for action in Action.ls(session=session):click.echo(f'')", "docstring": "List all actions.", "id": "f1584:m13"} {"signature": "@main.command()@connection_option@click.option('', '', multiple=True, help='')def summarize(connection, skip):", "body": "for idx, name, manager in _iterate_managers(connection, skip):click.secho(name, fg='', bold=True)if not manager.is_populated():click.echo('')continueif isinstance(manager, BELNamespaceManagerMixin):click.secho(f'', fg='')if isinstance(manager, BELManagerMixin):try:click.secho(f'', fg='')except TypeError as e:click.secho(str(e), fg='')for field_name, count in sorted(manager.summarize().items()):click.echo(click.style('', fg='', bold=True) + f\"\")", "docstring": "Summarize all.", "id": "f1584:m6"} {"signature": "@cache.command()@click.option('', '', multiple=True, help='')def clear(skip):", "body": "for name in sorted(MODULES):if name in skip:continueclick.secho(f'', fg='', bold=True)clear_cache(name)", "docstring": "Clear all caches.", "id": "f1584:m5"} {"signature": "@main.group()def cache():", "body": "", "docstring": "Manage caches.", "id": "f1584:m4"} {"signature": "@main.command(help='')@click.confirmation_option('')@connection_option@click.option('', '', multiple=True, help='')def drop(connection, skip):", "body": "for idx, name, manager in _iterate_managers(connection, skip):click.secho(f'', fg='', bold=True)manager.drop_all()", "docstring": "Drop all.", "id": "f1584:m3"} {"signature": "def get_global_connection() -> str:", "body": "return config.connection", "docstring": "Return the global connection string.", "id": "f1585:m0"} {"signature": "def load_module(self, fullname):", "body": "if fullname in sys.modules:return sys.modules[fullname]end_name = fullname[len(self._group_with_dot):]for entry_point in iter_entry_points(group=self.group, name=end_name):mod = entry_point.load()sys.modules[fullname] = modreturn mod", "docstring": "Load a module if its name starts with :code:`self.group` and is registered.", "id": "f1586:c0:m4"} {"signature": "def __init__(self, group):", "body": "self.group = group", "docstring": "Initialize the importer with the group name.\n\n :param str group: a string representing the package resources entry_points group that will be used", "id": "f1586:c0:m0"} {"signature": "def find_module(self, fullname, path=None):", "body": "if not fullname.startswith(self._group_with_dot):returnend_name = fullname[len(self._group_with_dot):]for entry_point in iter_entry_points(group=self.group, name=None):if entry_point.name == end_name:return self", "docstring": "Find a module if its name starts with :code:`self.group` and is registered.", "id": "f1586:c0:m3"} {"signature": "@main.command()@click.argument('')@click.option('', '', type=click.File(''))@click.option('', '')@click.option('', '', is_flag=True)def belns(keyword: str, file: TextIO, encoding: Optional[str], use_names: bool):", "body": "directory = get_data_dir(keyword)obo_url = f''obo_path = os.path.join(directory, f'')obo_cache_path = os.path.join(directory, f'')obo_getter = make_obo_getter(obo_url, obo_path, preparsed_path=obo_cache_path)graph = obo_getter()convert_obo_graph_to_belns(graph,file=file,encoding=encoding,use_names=use_names,)", "docstring": "Write as a BEL namespace.", "id": "f1589:m2"} {"signature": "@click.group()def main():", "body": "", "docstring": "OBO Utilities.", "id": "f1589:m1"} {"signature": "@classmethoddef tearDownClass(cls):", "body": "cls.manager.session.close()super().tearDownClass()", "docstring": "Close the connection in the manager and deletes the temporary database.", "id": "f1590:c4:m1"} {"signature": "def setUp(self):", "body": "super().setUp()def mock_connection() -> str:\"\"\"\"\"\"return self.connectionself.mock_global_connection = mock.patch('', mock_connection)self.mock_module_connection = mock.patch('', mock_connection)", "docstring": "Set up the test with a mock connection string.\n\n Add two class-level variables: ``mock_global_connection`` and ``mock_module_connection`` that can be\n used as context managers to mock the bio2bel connection getter functions.", "id": "f1590:c2:m0"} {"signature": "def tearDown(self):", "body": "os.close(self.fd)os.remove(self.path)", "docstring": "Close the connection to the database and removes the files created for it.", "id": "f1590:c0:m1"} {"signature": "def make_temporary_cache_class_mixin(manager_cls: Type[AbstractManager]) -> Type[AbstractTemporaryCacheClassMixin]: ", "body": "class TemporaryCacheClassMixin(AbstractTemporaryCacheClassMixin):Manager = manager_clsreturn TemporaryCacheClassMixin", "docstring": "Build a testing class that has a Bio2BEL manager instance ready to go.", "id": "f1590:m0"} {"signature": "@classmethoddef setUpClass(cls):", "body": "if cls.Manager is ...:raise Bio2BELTestMissingManagerError('''')if not issubclass(cls.Manager, AbstractManager):raise Bio2BELManagerTypeError('')super().setUpClass()cls.manager = cls.Manager(connection=cls.connection)cls.populate()", "docstring": "Set up the class with the given manager and allows an optional populate hook to be overridden.", "id": "f1590:c4:m0"} {"signature": "def setUp(self):", "body": "super().setUp()self.fd, self.path = tempfile.mkstemp()self.connection = '' + self.pathlog.info('', self.connection)", "docstring": "Create a temporary file to use as a persistent database throughout tests in this class.", "id": "f1590:c0:m0"} {"signature": "def get_connection(module_name: str, connection: Optional[str] = None) -> str:", "body": "if connection is not None:return connectionmodule_name = module_name.lower()module_config_cls = get_module_config_cls(module_name)module_config = module_config_cls.load()return module_config.connection or config.connection", "docstring": "Return the SQLAlchemy connection string if it is set.\n\n Order of operations:\n\n 1. Return the connection if given as a parameter\n 2. Check the environment for BIO2BEL_{module_name}_CONNECTION\n 3. Look in the bio2bel config file for module-specific connection. Create if doesn't exist. Check the\n module-specific section for ``connection``\n 4. Look in the bio2bel module folder for a config file. Don't create if doesn't exist. Check the default section\n for ``connection``\n 5. Check the environment for BIO2BEL_CONNECTION\n 6. Check the bio2bel config file for default\n 7. Fall back to standard default cache connection\n\n :param module_name: The name of the module to get the configuration for\n :param connection: get the SQLAlchemy connection string\n :return: The SQLAlchemy connection string based on the configuration", "id": "f1592:m2"} {"signature": "def get_module_config_cls(module_name: str) -> Type[_AbstractModuleConfig]: ", "body": "class ModuleConfig(_AbstractModuleConfig):NAME = f''FILES = DEFAULT_CONFIG_PATHS + [os.path.join(DEFAULT_CONFIG_DIRECTORY, module_name, '')]return ModuleConfig", "docstring": "Build a module configuration class.", "id": "f1592:m1"} {"signature": "def create_all(engine, checkfirst=True):", "body": "Base.metadata.create_all(bind=engine, checkfirst=checkfirst)", "docstring": "Create the tables for Bio2BEL.", "id": "f1593:m2"} {"signature": "@classmethoddef ls(cls, session: Optional[Session] = None) -> List['']:", "body": "if session is None:session = _make_session()actions = session.query(cls).order_by(cls.created.desc()).all()session.close()return actions", "docstring": "Get all actions.", "id": "f1593:c0:m7"} {"signature": "def _store_helper(model: Action, session: Optional[Session] = None) -> None:", "body": "if session is None:session = _make_session()session.add(model)session.commit()session.close()", "docstring": "Help store an action.", "id": "f1593:m0"} {"signature": "@classmethoddef store_populate_failed(cls, resource: str, session: Optional[Session] = None) -> '':", "body": "action = cls.make_populate_failed(resource)_store_helper(action, session=session)return action", "docstring": "Store a \"populate failed\" event.\n\n :param resource: The normalized name of the resource to store\n\n Example:\n\n >>> from bio2bel.models import Action\n >>> Action.store_populate_failed('hgnc')", "id": "f1593:c0:m5"} {"signature": "@staticmethoddef make_drop(resource: str) -> '':", "body": "return Action(resource=resource.lower(), action='')", "docstring": "Make a ``drop`` instance of :class:`Action`.", "id": "f1593:c0:m3"} {"signature": "def _make_session(connection: Optional[str] = None) -> Session:", "body": "if connection is None:connection = get_global_connection()engine = create_engine(connection)create_all(engine)session_cls = sessionmaker(bind=engine)session = session_cls()return session", "docstring": "Make a session.", "id": "f1593:m1"} {"signature": "def create_application(connection: Optional[str] = None) -> Flask:", "body": "app = Flask(__name__)flask_bootstrap.Bootstrap(app)Admin(app)connection = connection or DEFAULT_CACHE_CONNECTIONengine, session = build_engine_session(connection)for name, add_admin in add_admins.items():url = ''.format(name)add_admin(app, session, url=url, endpoint=name, name=name)log.debug('', name, add_admin, url)app.register_blueprint(ui)return app", "docstring": "Create a Flask application.", "id": "f1594:m1"} {"signature": "def get_long_description():", "body": "with codecs.open(os.path.join(HERE, ''), encoding='') as f:long_description = f.read()return long_description", "docstring": "Get the long_description from the README.rst file. Assume UTF-8 encoding.", "id": "f1597:m2"} {"signature": "def find_meta(meta):", "body": "meta_match = re.search(r''.format(meta=meta),META_FILE, re.M)if meta_match:return meta_match.group()raise RuntimeError(''.format(meta=meta))", "docstring": "Extract __*meta*__ from META_FILE.", "id": "f1597:m1"} {"signature": "def cond_replace_value_some(ol,dst_value,*some,**kwargs):", "body": "cond_func = kwargs['']if('' in kwargs):cond_func_args = kwargs['']else:cond_func_args = []if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"seqs = list(some)new = copy.deepcopy(ol)selected = find_all(new,cond_func,*cond_func_args)selected_indexes = array_map(selected,lambda ele:ele[''])selected_indexes = select_seqs(selected_indexes,seqs)new = replace_seqs(new,dst_value,selected_indexes)if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\ndef afterCH(ele,ch):\n cond = (ord(str(ele)) > ord(ch))\n return(cond)\n\nnew = cond_replace_value_some(ol,\"REPLACED\",0,2,cond_func=afterCH,cond_func_args=['B'])\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\nrslt = cond_replace_value_some(ol,\"REPLACED\",0,2,cond_func=afterCH,cond_func_args=['B'],mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m198"} {"signature": "def broken_some(ol,*break_points):", "body": "bps = list(break_points)return(broken_seqs(ol,bps))", "docstring": "ol = initRange(0,20,1)\nol\nsecs = broken_some(ol,1,6,14,9)\nforEach(secs,print)", "id": "f1599:m207"} {"signature": "def remove_somenot(ol,value,*seqs,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"seqs = list(seqs)new = []length = ol.__len__()seq = -cpol = copy.deepcopy(ol)for i in range(,length):if(not(cpol[i]==value)):seq = seq + if(seq in seqs):passelse:new.append(cpol[i])else:new.append(cpol[i])if(mode == \"\"):return(new) else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nnew = remove_somenot(ol,'a',1,3)\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nrslt = remove_somenot(ol,'a',1,3,mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m119"} {"signature": "def mapfio(ol,**kwargs):", "body": "diff_funcs_arr = kwargs['']diff_args_arr = kwargs['']lngth = ol.__len__()rslt = []for i in range(,lngth):index = ivalue = ol[i]func = diff_funcs_arr[i]args = diff_args_arr[i]ele = func(index,*args)rslt.append(ele)return(rslt)", "docstring": "#mapfio v\u4e0d\u4f5c\u4e3amap_func\u53c2\u6570 NOT take value as a param for map_func\n#map_func diff_func(index,*diff_args)", "id": "f1599:m2"} {"signature": "def reverse(ol,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):new = copy.deepcopy(ol)new.reverse()return(new) else:ol.reverse()return(ol)'',''", "docstring": "from elist.elist import *\nol = [1,2,3,4]\nid(ol)\nnew = reverse(ol)\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,2,3,4]\nid(ol)\nrslt = reverse(ol,mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m137"} {"signature": "def divide(ol,interval):", "body": "length = ol.__len__()seqs = initRange(,length,interval)rslt = broken_seqs(ol,seqs)return(rslt)", "docstring": "ol = elel.initRange(0,20,1)\ninterval = 3\nrslt = elel.divide(ol,interval)\nrslt\nrslt = elel.divide(ol,4)\nrslt", "id": "f1599:m209"} {"signature": "def where(ol,value):", "body": "si = Noneei = Nonefor i in range(,ol.__len__()):ele = ol[i]if(value >ele):si = i elif(value == ele):return((i,i))else:ei = i return((si,ei))return((si,ei))", "docstring": "ol = [0, 4, 6, 8, 10, 14]\nwhere(ol,-1)\nwhere(ol,1)\nwhere(ol,2)\nwhere(ol,3)\nwhere(ol,4)\nwhere(ol,9)\nwhere(ol,14)\nwhere(ol,17)", "id": "f1599:m216"} {"signature": "def pop_some(ol,*indexes,**kwargs):", "body": "length = ol.__len__()indexes = list(map(lambda index:uniform_index(index,length),list(indexes)))if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):cpol = copy.deepcopy(ol)new = []popped = []for i in range(,length):if(i in indexes):popped.append(cpol[i])else:new.append(cpol[i])return({'':popped,'':new})else:tmp = []popped = []for i in range(,length):if(i in indexes):popped.append(ol[i])else:tmp.append(ol[i])ol.clear()for i in range(,tmp.__len__()):ol.append(tmp[i])return(popped)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_some(ol,0,2,5)\nol\nid(ol)\nid(rslt['list'])\n####\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_some(ol,0,2,5,mode=\"original\")\nrslt\nol\nid(ol)", "id": "f1599:m109"} {"signature": "def findv(ol,cond_func,cond_func_args=[]):", "body": "rslt = []for i in range(ol.__len__()):cond = cond_func(ol[i],*cond_func_args)if(cond):rslt.append((i,ol[i]))else:passreturn(rslt)", "docstring": "#mapv i\u4e0d\u4f5c\u4e3amap_func\u53c2\u6570,\u5171\u4eab\u76f8\u540c\u7684f,\u5171\u4eab\u76f8\u540c\u7684o\n# NOT take index as a param for map_func\n# share common other_args\n# share common cond_func\n# common_func(value,*common_args)", "id": "f1599:m25"} {"signature": "def cond_pop(ol,index,**kwargs):", "body": "cond_func = kwargs['']cond_func_args = kwargs['']index = uniform_index(index,ol.__len__())if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"value = ol[index]cond = cond_func(index,value,*cond_func_args)if(mode == \"\"):new = copy.deepcopy(ol)if(cond):popped = new.pop(index)else:popped = newreturn({'':popped,'':new})else:if(cond):popped = ol.pop(index)else:popped = olreturn(popped)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [{'data':0;'type':'number'},{'data':'x';'type':'str'},{'data':'y';'type':'str'},4]\n#cond_func_args is a array\ndef cond_func(index,value,cond_func_args):", "id": "f1599:m107"} {"signature": "def insert(ol,start_index,ele,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):length = ol.__len__()cpol = copy.deepcopy(ol)si = uniform_index(start_index,length)new = copy.deepcopy(cpol[:si])new.append(ele)new.extend(cpol[si:])return(new)else:ol.insert(start_index,ele)return(ol)", "docstring": "from elist.elist import *\nol = [1,2,3,4]\nele = 5\nid(ol)\ninsert(ol,2,ele,mode=\"original\")\nol\nid(ol)\n####\nol = [1,2,3,4]\nele = 5\nid(ol)\nnew = insert(ol,2,ele)\nnew\nid(new)", "id": "f1599:m67"} {"signature": "def rangize(break_points,length):", "body": "bps = array_map(break_points,uniform_index,length)bps.sort()bps = prepend(bps,)bps = append(bps,length)bps = uniqualize(bps)bpslen = bps.__len__()secs=[(,bps[])]for i in range(,bpslen-):r = (bps[i],bps[i+])secs.append(r)secs.append((bps[bpslen-],length))if(secs[][] == secs[][]):secs.pop()else:passif(secs[-][] == secs[-][]):secs.pop(-)else:passreturn(secs)", "docstring": "break_points = [1,3,9,12,-2]\nlength = 15\nsecs = rangize(break_points,length)\nforEach(secs,print)", "id": "f1599:m199"} {"signature": "def all_continuous_indexes_slices(ol,value):", "body": "rslt = []length = ol.__len__()cursor = begin = Noneslice = []while(cursor < length):cond1 = (ol[cursor] == value)cond2 = (begin == None)if(cond1 & cond2):begin = cursorslice.append(cursor)elif(cond1 & (not(cond2))):slice.append(cursor)elif((not(cond1)) & (not(cond2))):rslt.append(slice)begin = Noneslice = []else:passcursor = cursor + if(slice):rslt.append(slice)else:passreturn(rslt)", "docstring": "from elist.elist import *\nol = [1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nall_continuous_indexes_slices(ol,\"a\")", "id": "f1599:m103"} {"signature": "def some_continuous_indexesnot_slices(ol,value,*seqs):", "body": "seqs = list(seqs)rslt = []length = ol.__len__()seq = -cursor = begin = Noneslice = []while(cursor < length):cond1 = not(ol[cursor] == value)cond2 = (begin == None)if(cond1 & cond2):begin = cursorslice.append(cursor)elif(cond1 & (not(cond2))):slice.append(cursor)elif((not(cond1)) & (not(cond2))):seq = seq + if(seq in seqs):rslt.append(slice)else:passbegin = Noneslice = []else:passcursor = cursor + if(slice):seq = seq + if(seq in seqs):rslt.append(slice)else:passelse:passreturn(rslt)", "docstring": "from elist.elist import *\nol = [1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nsome_continuous_indexesnot_slices(ol,\"a\",0,2)", "id": "f1599:m100"} {"signature": "def reduce_left(ol,callback,initialValue):", "body": "length = ol.__len__()accumulator = initialValuefor i in range(,length):accumulator = callback(accumulator,ol[i])return(accumulator)", "docstring": "from elist.elist import *\ndef callback(accumulator,currentValue):\n accumulator.append(currentValue[0])\n accumulator.append(currentValue[1])\n return(accumulator)\n\nol = [(1,2),(\"a\",\"b\"),(\"x\",\"y\")]\nreduce_left(ol,callback,[])\n#array_reduce, reduceLeft ,reduce_left are the same", "id": "f1599:m175"} {"signature": "def toSource(ol):", "body": "return(ol.__repr__())", "docstring": "from elist.elist import *\nol = [1,2,3,4]\ntoSource(ol)", "id": "f1599:m142"} {"signature": "def index_first(ol,value):", "body": "return(ol.index(''))", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindex_first(ol,'a')\n####index_first, array_index, indexOf are the same\narray_index(ol,'a')\nindexOf(ol,'a')", "id": "f1599:m81"} {"signature": "def setitem_via_pathlist(ol,value,pathlist):", "body": "this = olfor i in range(,pathlist.__len__()-):key = pathlist[i]this = this.__getitem__(key)this.__setitem__(pathlist[-],value)return(ol)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\nsetitem_via_pathlist(y,\"500\",[1,1])\ny", "id": "f1599:m188"} {"signature": "def delitem_via_pathlist(ol,pathlist):", "body": "this = olfor i in range(,pathlist.__len__()-):key = pathlist[i]this = this.__getitem__(key)this.__delitem__(pathlist[-])return(ol)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\ndelitem_via_pathlist(y,[1,1])\ny", "id": "f1599:m190"} {"signature": "def replace_value_some(ol,src_value,dst_value,*seqs,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"return(replace_value_seqs(ol,src_value,dst_value,list(seqs),mode=mode))", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nnew = replace_value_some(ol,'a','AAA',0,1)\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nrslt = replace_value_some(ol,'a','AAA',0,1,mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m195"} {"signature": "def comprise(list1,list2,**kwargs):", "body": "if('' in kwargs):mode = kwargs['']else:mode = \"\"len_1 = list1.__len__()len_2 = list2.__len__()if(len_2>len_1):return(False)else:if(mode==\"\"):if(list2 == list1[:len_2]):return(True)else:return(False)else:end = len_1 - len_2for i in range(,end+):if(list2 == list1[i:(i+len_2)]):return(True)else:passreturn(False)", "docstring": "from elist.elist import *\ncomprise([1,2,3,4,5],[2,3,4],mode=\"loose\")\ncomprise([1,2,3,4,5],[2,3,4])\ncomprise([1,2,3,4,5],[2,3,4],mode=\"strict\")\ncomprise([1,2,3,4,5],[1,2,3,4],mode=\"strict\")\n#not recursive ,only one level\n#please refer to ListTree.search for recursive support", "id": "f1599:m138"} {"signature": "def pop_range(ol,start_index,end_index,**kwargs):", "body": "length = ol.__len__()start_index = uniform_index(start_index,length)end_index = uniform_index(end_index,length)if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):cpol = copy.deepcopy(ol)new = []popped = []for i in range(,start_index):new.append(cpol[i])for i in range(start_index,end_index):popped.append(cpol[i])for i in range(end_index,length):new.append(cpol[i])return({'':popped,'':new})else:tmp = []popped = []for i in range(,start_index):tmp.append(ol[i])for i in range(start_index,end_index):popped.append(ol[i])for i in range(end_index,length):tmp.append(ol[i])ol.clear()for i in range(,tmp.__len__()):ol.append(tmp[i])return(popped)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_range(ol,2,4)\nol\nid(ol)\nid(rslt['list'])\n####\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_range(ol,2,4,mode=\"original\")\nrslt\nol\nid(ol)", "id": "f1599:m108"} {"signature": "def is_lop(ch,block_op_pairs_dict=get_block_op_pairs('')):", "body": "for i in range(,block_op_pairs_dict.__len__()+):if(ch == block_op_pairs_dict[i][]):return(True)else:passreturn(False)", "docstring": "# is_lop('{',block_op_pairs_dict)\n# is_lop('[',block_op_pairs_dict)\n# is_lop('}',block_op_pairs_dict)\n# is_lop(']',block_op_pairs_dict)\n# is_lop('a',block_op_pairs_dict)", "id": "f1599:m244"} {"signature": "def is_leaf(obj):", "body": "if(is_list(obj)):length = obj.__len__()if(length == ):return(True)else:return(False)else:return(True)", "docstring": "the below is for nested-list\nany type is not list will be treated as a leaf\nempty list will be treated as a leaf\nfrom elist.elist import *\nis_leaf(1)\nis_leaf([1,2,3])\nis_leaf([])", "id": "f1599:m226"} {"signature": "def delitem_via_sibseqs(ol,*sibseqs):", "body": "pathlist = list(sibseqs)this = olfor i in range(,pathlist.__len__()-):key = pathlist[i]this = this.__getitem__(key)this.__delitem__(pathlist[-])return(ol)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\ndelitem_via_sibseqs(y,1,1)\ny", "id": "f1599:m191"} {"signature": "def cdr(ol,**kwargs):", "body": "if('' in kwargs):mode = kwargs['']else:mode = \"\"if(mode == \"\"):cpol = copy.deepcopy(ol)return(cpol[:])else:ol.pop()return(ol)", "docstring": "from elist.elist import *\nol=[1,2,3,4]\nid(ol)\nnew = cdr(ol)\nnew\nid(new)\n####\nol=[1,2,3,4]\nid(ol)\nrslt = cdr(ol,mode=\"original\")\nrslt\nid(rslt)", "id": "f1599:m64"} {"signature": "def cond_remove_seqs(ol,seqs,**kwargs):", "body": "cond_func = kwargs['']if('' in kwargs):cond_func_args = kwargs['']else:cond_func_args = []if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"new = copy.deepcopy(ol)selected = find_all(new,cond_func,*cond_func_args)selected_indexes = array_map(selected,lambda ele:ele[''])selected_indexes = pop_indexes(selected_indexes,seqs)['']new = pop_indexes(new,selected_indexes)['']if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\ndef afterCH(ele,ch):\n cond = (ord(str(ele)) > ord(ch))\n return(cond)\n\nnew = cond_remove_seqs(ol,[0,2],cond_func=afterCH,cond_func_args=['B'])\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\nrslt = cond_remove_seqs(ol,[0,2],cond_func=afterCH,cond_func_args=['B'],mode='original')\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m127"} {"signature": "def indexes_seqs(ol,value,seqs):", "body": "seqs = list(seqs)length = ol.__len__()indexes =[]seq = -for i in range(,length):if(value == ol[i]):seq = seq + if(seq in seqs):indexes.append(i)else:passelse:passreturn(indexes)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindexes_seqs(ol,'a',{0,2})\nindexes_seqs(ol,'a',{0,1})\nindexes_seqs(ol,'a',{1,2})\nindexes_seqs(ol,'a',{3,4})", "id": "f1599:m91"} {"signature": "def array_dualmap(ol,value_map_func,**kwargs):", "body": "def get_self(obj):return(obj)if('' in kwargs):index_map_func_args = kwargs['']else:index_map_func_args = []if('' in kwargs):value_map_func_args = kwargs['']else:value_map_func_args = []if('' in kwargs):index_map_func = kwargs['']else:index_map_func = get_selflength = ol.__len__()il = list(range(,length))nil = list(map(lambda ele:index_map_func(ele,*index_map_func_args),il))nvl = []for i in range(,length):ele = ol[i]v = value_map_func(nil[i],ele,*value_map_func_args)nvl.append(v)return(nvl)", "docstring": "from elist.elist import *\nol = ['a','b','c','d']\ndef index_map_func(index,prefix,suffix):\n s = prefix +str(index+97)+ suffix\n return(s)\n\ndef value_map_func(mapped_index,ele,prefix,suffix):\n s = prefix+mapped_index+': ' + str(ele) + suffix\n return(s)\n\n####\nrslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?'])\npobj(rslt)", "id": "f1599:m5"} {"signature": "def split(ol,value,**kwargs):", "body": "if('' in kwargs):whiches = kwargs[''] else:whiches = Noneindexes = indexes_all(ol,value)if(whiches == None):passelse:indexes = select_indexes(indexes,whiches)rslt = []rslt.append(ol[:indexes[]])si = indexes[]+for i in range(,indexes.__len__()):ei = indexes[i]ele = ol[si:ei]rslt.append(ele)si = ei + ele = ol[si:ol.__len__()]rslt.append(ele)return(rslt)", "docstring": "ol = ['a',1,'a',2,'a',3,'a',4,'a']\nsplit(ol,'a')\nsplit(ol,'a',whiches=[0])\nsplit(ol,'a',whiches=[1])\nsplit(ol,'a',whiches=[2])\nsplit(ol,'a',whiches=[0,2])\nol = [1,'a',2,'a',3,'a',4]\nsplit(ol,'a')\nsplit('x=bcdsef=g','=',whiches=[0])", "id": "f1599:m214"} {"signature": "def indexes_allnot(ol,value):", "body": "length = ol.__len__()indexes =[]for i in range(,length):if(value == ol[i]):passelse:indexes.append(i)return(indexes)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindexes_allnot(ol,'a')", "id": "f1599:m88"} {"signature": "def getStr_to_pathlist(gs):", "body": "def numize(w):try:int(w)except:try:float(w)except:return(w)else:return(float(w))else:return(int(w))def strip_quote(w):if(type(w) == type('')):if(w[]==w[-]):if((w[]==\"\") |(w[]=='')):return(w[:-])else:return(w)else:return(w)else:return(w)gs = gs[:-]pl = gs.split(\"\")pl = array_map(pl,numize)pl = array_map(pl,strip_quote)return(pl)", "docstring": "gs = \"[1]['1'][2]\"\ngetStr_to_pathlist(gs)\ngs = \"['u']['u1']\"\ngetStr_to_pathlist(gs)", "id": "f1599:m242"} {"signature": "def mapio(ol,map_func,**kwargs):", "body": "lngth = ol.__len__()diff_args_arr = kwargs['']rslt = []for i in range(,lngth):index = ivalue = ol[i]func = map_funcargs = diff_args_arr[i]ele = func(index,*args)rslt.append(ele)return(rslt)", "docstring": "#mapvo \u5171\u4eab\u76f8\u540c\u7684f,i\u4e0d\u4f5c\u4e3amap_func\u53c2\u6570\n# share common map_func,NOT take index as a param for map_func\n# common_func(value,*priv_args)", "id": "f1599:m13"} {"signature": "def intlize(l):", "body": "return(list(map(lambda ele:int(ele),l)))", "docstring": "from elist.elist import *\nl = [\"1\",\"3\",\"4\",\"5\"]\nintlize(l)", "id": "f1599:m131"} {"signature": "def findfiv(ol,cond_func_args,**kwargs):", "body": "lngth = ol.__len__()diff_funcs_arr = kwargs['']common_args_arr = init(lngth,map_func_args)rslt = findfivo(ol,cond_funcs=diff_funcs_arr,cond_func_args_array=common_args_arr)return(rslt)", "docstring": "#findfiv \u5171\u4eab\u76f8\u540c\u7684o share common other_args\n#cond_func diff_func(index,value,*common_args)", "id": "f1599:m24"} {"signature": "def update_desc_rcin_path(desc,sibs_len,pdesc_level):", "body": "psibs_len = pdesc_level.__len__()parent_breadth = desc[''][-]if(desc['']==(sibs_len - )):if(parent_breadth==(psibs_len -)):passelse:parent_rsib_breadth = parent_breadth + prsib_desc = pdesc_level[parent_rsib_breadth]if(prsib_desc['']):passelse:rcin_path = copy.deepcopy(prsib_desc[''])rcin_path.append()desc[''] = rcin_pathelse:passreturn(desc)", "docstring": "rightCousin\nnextCousin\nrightCin\nnextCin\nrcin\nncin\n\nparents are neighbors,and on the right", "id": "f1599:m238"} {"signature": "def rangize_supplement(spans,lngth):", "body": "rslt = []si = ei = spans[][]if(si == ei):passelse:rslt.append((si,ei))prev_ei = spans[][]for i in range(,spans.__len__()):si = prev_eiei = spans[i][]rslt.append((si,ei))prev_ei = spans[i][]if(prev_ei < lngth):rslt.append((prev_ei,lngth))else:rslt.append((prev_ei,lngth+))return(rslt)", "docstring": "spans = [(0, 3), (4, 7), (8, 10), (11, 12), (13, 16), (17, 20)]\nrangize_supplement(spans,24)", "id": "f1599:m201"} {"signature": "def select_some(ol,*seqs):", "body": "seqs = list(seqs)return(select_seqs(ol,seqs))", "docstring": "from elist.elist import *\nol = ['a','b','c','d']\nselect_some(ol,1,2)", "id": "f1599:m53"} {"signature": "def remove_lastnot(ol,value,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"length = ol.__len__()if(mode == \"\"):new = copy.deepcopy(ol)for i in range(length-,-,-):if(new[i] == value):passelse:new.pop(i)return(new)return(new)else:for i in range(length-,-,-):if(ol[i] == value):passelse:ol.pop(i)return(ol)return(ol)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,'a',3,'a',5,'a']\nid(ol)\nnew = remove_lastnot(ol,'a')\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a']\nid(ol)\nrslt = remove_lastnot(ol,'a',mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m115"} {"signature": "def range_decompress(cl):", "body": "def cond_func(ele):length = ele.__len__()cond = (length == )if(cond):return(ord(ele))else:x = ord(ele[])y = ord(ele[])return((x,y))if(type(cl[])==type()):T = Trueelif(cl[].__len__() == ):T = (type(cl[]) == type())else:T = (type(cl[][]) == type())if(T):l = cl else:l = array_map(cl,cond_func)rslt = []for i in range(,l.__len__()):ele = l[i]if(type(ele) == type()):arr = [ele]elif(ele.__len__() == ):arr = [ele]else:sv = ele[]ev = ele[]arr = init_range(sv,ev+,)if(T):passelse:arr = array_map(arr,chr)rslt.extend(arr)return(rslt)", "docstring": "#only support sorted-ints or sorted-ascii\ncl = [1, (5, 8), (13, 14), 18, (30, 34)]\nrange_decompress(cl)\ncl = [1, (5, 8), (13, 14), 18, (30, 34), 40]\nrange_decompress(cl)\ncl = [('a', 'd'), ('j', 'n'), 'u', ('y', 'z')]\nrange_decompress(cl)", "id": "f1599:m204"} {"signature": "def remove_allnot(ol,value,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"new = []length = ol.__len__()cpol = copy.deepcopy(ol)for i in range(,length):if(cpol[i]==value):new.append(cpol[i])else:passif(mode == \"\"):return(new) else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nnew = remove_allnot(ol,'a')\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nrslt = remove_allnot(ol,'a',mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m123"} {"signature": "def diff_indexes(l1,l2):", "body": "rslt = []for i in range(,l1.__len__()):if(l1[i]!=l2[i]):rslt.append(i)return(rslt)", "docstring": "from elist.elist import *\nl1 = [1,2,3,5]\nl2 = [0,2,3,4]\ndiff_indexes(l1,l2)", "id": "f1599:m177"} {"signature": "def indexes_some(ol,value,*seqs):", "body": "seqs = list(seqs)length = ol.__len__()indexes =[]seq = -for i in range(,length):if(value == ol[i]):seq = seq + if(seq in seqs):indexes.append(i)else:passelse:passreturn(indexes)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindexes_some(ol,'a',0,2)\nindexes_some(ol,'a',0,1)\nindexes_some(ol,'a',1,2)\nindexes_some(ol,'a',3,4)", "id": "f1599:m89"} {"signature": "def getitem_via_sibseqs(ol,*sibseqs):", "body": "pathlist = list(sibseqs)this = olfor i in range(,pathlist.__len__()):key = pathlist[i]this = this.__getitem__(key)return(this)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\ngetitem_via_sibseqs(y,1,1)", "id": "f1599:m185"} {"signature": "def cond_replace_value_all(ol,dst_value,**kwargs):", "body": "cond_func = kwargs['']if('' in kwargs):cond_func_args = kwargs['']else:cond_func_args = []if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"new = copy.deepcopy(ol)selected = find_all(new,cond_func,*cond_func_args)selected_indexes = array_map(selected,lambda ele:ele[''])new = replace_seqs(new,dst_value,selected_indexes)if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\ndef afterCH(ele,ch):\n cond = (ord(str(ele)) > ord(ch))\n return(cond)\n\nnew = cond_replace_value_all(ol,\"REPLACED\",cond_func=afterCH,cond_func_args=['B'])\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\nrslt = cond_replace_value_all(ol,\"REPLACED\",cond_func=afterCH,cond_func_args=['B'],mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m196"} {"signature": "def brkl2kvlist(arr,interval,sub_pos=,**kwargs):", "body": "lngth = arr.__len__()brkseqs1 = init_range(,lngth,interval)brkseqs2 = init_range(sub_pos,lngth,interval)brkseqs = interleave(brkseqs1,brkseqs2)l = broken_seqs(arr,brkseqs)kl = select_evens(l)vl = select_odds(l)if(\"\" in kwargs):single_key = kwargs['']else:single_key = Trueif(sub_pos == ):if(single_key):kl = mapv(kl,lambda ele:ele[])else:passelse:passreturn((kl,vl))", "docstring": "arr = [\"color1\",\"r1\",\"g1\",\"b1\",\"a1\",\"color2\",\"r2\",\"g2\",\"b2\",\"a2\"]\nbrkl2kvlist(arr,5)\n(['color1', 'color2'], [['r1', 'g1', 'b1', 'a1'], ['r2', 'g2', 'b2', 'a2']])\nbrkl2kvlist(arr,5,2)\n([['color1', 'r1'], ['color2', 'r2']], [['g1', 'b1', 'a1'], ['g2', 'b2', 'a2']])", "id": "f1599:m208"} {"signature": "def insert_sections_many(ol,secs,locs,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"secs = copy.deepcopy(secs)locs = copy.deepcopy(locs)brked = broken_seqs(ol,locs)seclen = secs.__len__()brklen = brked.__len__()if(locs[]==):new = secs[]length = seclen -if(length < brklen):for i in range(,length):new.extend(brked[i])new.extend(secs[i+])for i in range(length,brklen):new.extend(brked[i])elif(length == brklen):for i in range(,length):new.extend(brked[i])new.extend(secs[i+])else:for i in range(,brklen):new.extend(brked[i])new.extend(secs[i+])for i in range(brklen,length):new.extend(secs[i])else:new = brked[]length = brklen -if(length < seclen):for i in range(,length):new.extend(secs[i])new.extend(brked[i+])for i in range(length,seclen):new.extend(secs[i])elif(length == seclen):for i in range(,length):new.extend(secs[i])new.extend(brked[i+])else:for i in range(,seclen):new.extend(secs[i])new.extend(brked[i+])for i in range(seclen,length):new.extend(brked[i])if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "ol = initRange(0,20,1)\nol\nlocs = [1,6,14,9]\nsecs = [\n ['a','a','a'],\n ['b','b'],\n ['c','c','c','c'],\n ['d','d']\n]\nrslt = insert_sections_many(ol,secs,locs)\nrslt\n####\nol\nlocs = [0,3,6,9,12,15,16]\nsecs = [\n ['a','a','a'],\n ['b','b'],\n ['c','c','c','c'],\n ['d','d']\n]\nrslt = insert_sections_many(ol,secs,locs)\nrslt\n####\nol\nlocs = [1,6,14,9]\nsecs = [\n ['a','a','a'],\n ['b','b'],\n ['c','c','c','c'],\n ['d','d'],\n ['e'],\n ['f','f','f','f'],\n [777,777,777,777]\n]\nrslt = insert_sections_many(ol,secs,locs)\nrslt", "id": "f1599:m72"} {"signature": "def deepcopy(ol):", "body": "return(copy.deepcopy(ol))", "docstring": "from elist.elist import *\nol = [1,2,3,4]\nid(ol)\nnew = deepcopy(ol)\nnew\nid(new)", "id": "f1599:m135"} {"signature": "def pop(ol,index,**kwargs):", "body": "index = uniform_index(index,ol.__len__())if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):new = copy.deepcopy(ol)popped = new.pop(index)return({'':popped,'':new})else:popped = ol.pop(index)return(popped)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,2,3,4]\nid(ol)\nrslt = pop(ol,2)\npobj(rslt)\nol\nid(ol)\nid(rslt['list'])\n####\nol = [1,2,3,4]\nid(ol)\nrslt = pop(ol,2,mode=\"original\")\nrslt\nol\nid(ol)", "id": "f1599:m106"} {"signature": "def copy_within(ol,target, start=None, end=None):", "body": "length = ol.__len__()if(start==None):start = else:passif(end==None):end = lengthelse:passtarget = uniform_index(target,length)start = uniform_index(start,length)end = uniform_index(end,length)cplen = end - startcpend = target+cplenif(target+cplen > length):cpend = lengthelse:passshift = start - targetif(shift>=):for i in range(target,cpend):ol[i] = ol[i+shift]else:for i in range(cpend-,target-,-):ol[i] = ol[i+shift]return(ol)", "docstring": "from elist.elist import *\nol = [1, 2, 3, 4, 5]\nid(ol)\nrslt = copyWithin(ol,0,3,4)\nrslt\nid(rslt)\n####\nol = [1, 2, 3, 4, 5]\nid(ol)\nrslt = copyWithin(ol,0,3)\nrslt\nid(rslt)\n####\nol = [1, 2, 3, 4, 5]\nid(ol)\nrslt = copyWithin(ol,-2)\nrslt\nid(rslt)\n####copyWithin is the same as copy_within", "id": "f1599:m136"} {"signature": "def is_list(obj):", "body": "if(type(obj)==type([])):return(True)else:return(False)", "docstring": "from elist.elist import *\nis_list([1,2,3])\nis_list(200)", "id": "f1599:m205"} {"signature": "def rangize_supp(spans,lngth):", "body": "rslt = []si = ei = spans[][]if(si == ei):passelse:rslt.append((si,ei))prev_ei = spans[][]for i in range(,spans.__len__()):si = prev_eiei = spans[i][]rslt.append((si,ei))prev_ei = spans[i][]if(prev_ei < lngth):rslt.append((prev_ei,lngth))else:passreturn(rslt)", "docstring": "spans = [(0, 3), (4, 7), (8, 10), (11, 12), (13, 16), (17, 20)]\nrangize_supplement(spans,24)", "id": "f1599:m202"} {"signature": "def getitem_via_pathlist2(pathlist,ol):", "body": "this = olfor i in range(,pathlist.__len__()):key = pathlist[i]this = this.__getitem__(key)return(this)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\ngetitem_via_pathlist2([1,1],y)", "id": "f1599:m184"} {"signature": "def value_interval(ol,value):", "body": "si,ei = where(ol,value)if(si == None):sv = Noneelse:sv = ol[si]if(ei == None):ev = Noneelse:ev = ol[ei]return((sv,ev))", "docstring": "ol = [0, 4, 6, 8, 10, 14]\nvalue_interval(ol,-1)\nvalue_interval(ol,1)\nvalue_interval(ol,2)\nvalue_interval(ol,3)\nvalue_interval(ol,4)\nvalue_interval(ol,9)\nvalue_interval(ol,14)\nvalue_interval(ol,17)", "id": "f1599:m217"} {"signature": "def remove_seqsnot(ol,value,seqs,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"seqs = list(seqs)new = []length = ol.__len__()cpol = copy.deepcopy(ol)seq = -for i in range(,length):if(not(cpol[i]==value)):seq = seq + if(seq in seqs):passelse:new.append(cpol[i])else:new.append(cpol[i])if(mode == \"\"):return(new) else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nnew = remove_seqsnot(ol,'a',{1,3})\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nrslt = remove_seqsnot(ol,'a',{1,3},mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m121"} {"signature": "def value_indexes_mapping(l):", "body": "pt = copy.deepcopy(l)desc = {}vset = set({})for v in pt:vset.add(v)for v in vset:desc[v] = []for i in range(,l.__len__()):desc[l[i]].append(i)return(desc)", "docstring": "from elist.elist import *\nfrom elist.jprint import pobj\nl = ['a','b','b','a','c','b']\ndesc = value_indexes_mapping(l)\npobj(desc)", "id": "f1599:m181"} {"signature": "def update_desc_rsib_path(desc,sibs_len):", "body": "if(desc['']<(sibs_len-)):rsib_path = copy.deepcopy(desc[''])rsib_path[-] = desc['']+desc[''] = rsib_pathelse:passreturn(desc)", "docstring": "rightSibling\nnextSibling\nrightSib\nnextSib\nrsib\nnsib\n\nhave the same parent,and on the right", "id": "f1599:m236"} {"signature": "def prepend(ol,ele,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):new = [ele]cpol = copy.deepcopy(ol)new.extend(cpol)return(new)else:length = ol.__len__()ol.append(None)for i in range(length-,-,-):ol[i+] = ol[i]ol[] = elereturn(ol)", "docstring": "from elist.elist import *\nol = [1,2,3,4]\nele = 5\nid(ol)\nprepend(ol,ele,mode=\"original\")\nol\nid(ol)\n####\nol = [1,2,3,4]\nele = 5\nid(ol)\nnew = prepend(ol,ele)\nnew\nid(new)", "id": "f1599:m56"} {"signature": "def array_of(*eles):", "body": "return(list(eles))", "docstring": "from elist.elist import *\narray_of(1,2,4,5,6)", "id": "f1599:m134"} {"signature": "def findfivo(ol,*args,**kwargs):", "body": "args = list(args)lngth = args.__len__()if(lngth==):diff_funcs_arr = kwargs['']diff_args_arr = kwargs['']elif(lngth==):if('' in kwargs):diff_funcs_arr = args[]diff_args_arr = kwargs['']else:diff_funcs_arr = kwargs['']diff_args_arr = args[]else:diff_funcs_arr = args[]diff_args_arr = args[]lngth = ol.__len__()rslt = []for i in range(,lngth):index = ivalue = ol[i]func = diff_funcs_arr[i]args = diff_args_arr[i]cond = func(index,value,*args)if(cond):rslt.append((index,value))else:passreturn(rslt)", "docstring": "#findfivo f,i,v,o\u56db\u5143\u51b3\u5b9a fivo-4-tuple-engine\n#cond_func diff_func(index,value,*diff_args)", "id": "f1599:m21"} {"signature": "def get_children_handler(self,*args):", "body": "return(self.pdata)", "docstring": "list's children list is self", "id": "f1599:c2:m0"} {"signature": "def init_range(start,end,step):", "body": "return(list(range(start,end,step)))", "docstring": "init_range(1,20,2)", "id": "f1599:m130"} {"signature": "def cond_remove_all(ol,**kwargs):", "body": "cond_func = kwargs['']if('' in kwargs):cond_func_args = kwargs['']else:cond_func_args = []if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"new = copy.deepcopy(ol)selected = find_all(new,cond_func,*cond_func_args)selected_indexes = array_map(selected,lambda ele:ele[''])new = pop_indexes(new,selected_indexes)['']if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\ndef afterCH(ele,ch):\n cond = (ord(str(ele)) > ord(ch))\n return(cond)\n\nnew = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'])\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'X',3,'b',5,'c',6,'A',7,'b',8,'B',9]\nid(ol)\nrslt = cond_remove_all(ol,cond_func=afterCH,cond_func_args=['B'],mode='original')\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m126"} {"signature": "def replace_seqs(ol,value,indexes,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"indexes = list(indexes)new = []length = ol.__len__()cpol = copy.deepcopy(ol)for i in range(,length):if(i in indexes):new.append(value)else:new.append(cpol[i])if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nnew = replace_seqs(ol,'AAA',[1,3,7])\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a',6,'a']\nid(ol)\nrslt = replace_seqs(ol,'AAA',[1,3,7],mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)\n#replace_indexes = replace_seqs", "id": "f1599:m192"} {"signature": "def insert_many(ol,eles,locs,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"eles = copy.deepcopy(eles)locs = copy.deepcopy(locs)new = []length = ol.__len__()cpol = copy.deepcopy(ol)for i in range(,locs.__len__()):if(locs[i]>=length):passelse:locs[i] = uniform_index(locs[i],length)tmp = sorted_refer_to(eles,locs)eles = tmp['']locs = tmp['']label = eles.__len__()si = ei = for i in range(,locs.__len__()):if(locs[i]>=length):label = ibreakelse:ei = locs[i]new.extend(cpol[si:ei])new.append(eles[i])si = eifor i in range(label,locs.__len__()):new.append(eles[i])new.extend(cpol[ei:])if(mode == \"\"):return(new)else:ol.clear()ol.extend(new)return(ol)", "docstring": "from elist.elist import *\nol = [1,2,3,4,5]\neles = [7,77,777]\nlocs = [0,2,4]\nid(ol)\nnew = insert_many(ol,eles,locs)\nol\nnew\nid(new)\n####\nol = [1,2,3,4,5]\neles = [7,77,777]\nlocs = [0,2,4]\nid(ol)\nrslt = insert_many(ol,eles,locs,mode=\"original\")\nol\nrslt\nid(rslt)", "id": "f1599:m69"} {"signature": "def first_continuous_indexesnot_slice(ol,value):", "body": "length = ol.__len__()begin = Noneslice = []for i in range(,length):if(not(ol[i]==value)):begin = ibreakelse:passif(begin == None):return(None)else:slice.append(begin)for i in range(begin+,length):if(not(ol[i]==value)):slice.append(i)else:breakreturn(slice)", "docstring": "from elist.elist import *\nol = [\"a\",0,1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nfirst_continuous_indexesnot_slice(ol,\"a\")", "id": "f1599:m94"} {"signature": "def sort(ol,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):new = copy.deepcopy(ol)new.sort()return(new) else:ol.sort()return(ol)", "docstring": "from elist.elist import *\nol = [1,3,4,2]\nid(ol)\nnew = sort(ol)\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,3,4,2]\nid(ol)\nrslt = sort(ol,mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)", "id": "f1599:m74"} {"signature": "def init_desc_matrix(l):", "body": "leaf = is_leaf(l)root_desc = new_ele_description(leaf=leaf,depth=,breadth_path=[],path=[],parent_path=[],parent_breadth_path=[])if(leaf):root_desc[''] = else:passdesc_matrix = [[root_desc]]return(desc_matrix)", "docstring": "from elist.elist import *\nfrom elist.jprint import pobj\nl = [1,[4],2,[3,[5,6]]]\ndesc_matrix = init_desc_matrix(l)\npobj(desc_matrix)", "id": "f1599:m232"} {"signature": "def mapfi(ol,map_func_args,**kwargs):", "body": "diff_funcs_arr = kwargs['']lngth = ol.__len__()rslt = []for i in range(,lngth):index = ivalue = ol[i]func = diff_funcs_arr[i]args = map_func_argsele = func(index,*args)rslt.append(ele)return(rslt)", "docstring": "#mapfi \u5171\u4eab\u76f8\u540c\u7684o,v\u4e0d\u4f5c\u4e3amap_func\u53c2\u6570\n# share common other_args,NOT take value as a param for map_func\n#map_func diff_func(index,*common_args)", "id": "f1599:m7"} {"signature": "def index_which(ol,value,which):", "body": "length = ol.__len__()seq = -for i in range(,length):if(value == ol[i]):seq = seq + if(seq == which):return(i)else:passelse:passreturn(None)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindex_which(ol,'a',0)\nindex_which(ol,'a',1)\nindex_which(ol,'a',2)\nindex_which(ol,'a',3) == None", "id": "f1599:m85"} {"signature": "def which_continuous_indexes_slice(ol,value,which):", "body": "length = ol.__len__()seq = -cursor = begin = Noneslice = []while(cursor < length):cond1 = (ol[cursor] == value)cond2 = (begin == None)if(cond1 & cond2):begin = cursorslice.append(cursor)cursor = cursor + elif(cond1 & (not(cond2))):slice.append(cursor)cursor = cursor + elif((not(cond1)) & (not(cond2))):seq = seq + if(seq == which):return(slice)else:cursor = cursor + begin = Noneslice = []else:cursor = cursor + if(slice):seq = seq + else:passif(seq == which):return(slice)else:return([])", "docstring": "from elist.elist import *\nol = [1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nwhich_continuous_indexes_slice(ol,\"a\",0)\nwhich_continuous_indexes_slice(ol,\"a\",1)\nwhich_continuous_indexes_slice(ol,\"a\",2)\nwhich_continuous_indexes_slice(ol,\"a\",3)\nwhich_continuous_indexes_slice(ol,\"b\",0)", "id": "f1599:m97"} {"signature": "def getitem_via_pathlist(ol,pathlist):", "body": "this = olfor i in range(,pathlist.__len__()):key = pathlist[i]this = this.__getitem__(key)return(this)", "docstring": "from elist.elist import *\ny = ['a',['b',[\"bb\"]],'c']\ny[1][1]\ngetitem_via_pathlist(y,[1,1])", "id": "f1599:m183"} {"signature": "def last_continuous_indexes_slice(ol,value):", "body": "length = ol.__len__()end = Noneslice = []for i in range(length-,-,-):if(ol[i]==value):end = ibreakelse:passif(end == None):return(None)else:slice.append(end)for i in range(end-,-,-):if(ol[i]==value):slice.append(i)else:breakslice.reverse()return(slice)", "docstring": "from elist.elist import *\nol = [1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nlast_continuous_indexes_slice(ol,\"a\")", "id": "f1599:m95"} {"signature": "def mapivo(ol,map_func,**kwargs):", "body": "lngth = ol.__len__()common_funcs_arr = init(lngth,map_func)diff_args_arr = kwargs['']rslt = mapfivo(ol,map_funcs=common_funcs_arr,map_func_args_array=diff_args_arr)return(rslt)", "docstring": "#mapivo \u5171\u4eab\u76f8\u540c\u7684f share common map_func\n#map_func common_func(index,value,*diff_args)", "id": "f1599:m4"} {"signature": "def join(ol,separator=\"\"):", "body": "if(ol.__len__() == ):return(\"\")else:passcond = (type(ol[])==type(b''))if(cond):rslt = b''else:rslt =\"\"length = ol.__len__()for i in range(,length-):ele = ol[i]if(cond):passelse:ele = str(ele)rslt = rslt + ele + separatorif(cond):rslt = rslt + ol[length - ]else:rslt = rslt + str(ol[length - ])return(rslt)", "docstring": "from elist.elist import *\nol = [1,2,3,4]\njoin(ol,separator=\"-\")", "id": "f1599:m145"} {"signature": "def pop_indexes(ol,indexes,**kwargs):", "body": "length = ol.__len__()indexes = list(map(lambda index:uniform_index(index,length),list(indexes)))if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):cpol = copy.deepcopy(ol)new = []popped = []for i in range(,length):if(i in indexes):popped.append(cpol[i])else:new.append(cpol[i])return({'':popped,'':new})else:tmp = []popped = []for i in range(,length):if(i in indexes):popped.append(ol[i])else:tmp.append(ol[i])ol.clear()for i in range(,tmp.__len__()):ol.append(tmp[i])return(popped)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_indexes(ol,{0,-3,5})\nol\nid(ol)\nid(rslt['list'])\n####\nol = [1,2,3,4,5,6]\nid(ol)\nrslt = pop_indexes(ol,{0,-3,5},mode=\"original\")\nrslt\nol\nid(ol)", "id": "f1599:m110"} {"signature": "def remove_first(ol,value,**kwargs):", "body": "if('' in kwargs):mode = kwargs[\"\"]else:mode = \"\"if(mode == \"\"):new = copy.deepcopy(ol)new.remove(value)return(new)else:ol.remove(value)return(ol)", "docstring": "from elist.jprint import pobj\nfrom elist.elist import *\nol = [1,'a',3,'a',5,'a']\nid(ol)\nnew = remove_first(ol,'a')\nol\nnew\nid(ol)\nid(new)\n####\nol = [1,'a',3,'a',5,'a']\nid(ol)\nrslt = remove_first(ol,'a',mode=\"original\")\nol\nrslt\nid(ol)\nid(rslt)\n####array_remove is the same as remove_first", "id": "f1599:m112"} {"signature": "def all_continuous_indexesnot_slices(ol,value):", "body": "rslt = []length = ol.__len__()cursor = begin = Noneslice = []while(cursor < length):cond1 = not(ol[cursor] == value)cond2 = (begin == None)if(cond1 & cond2):begin = cursorslice.append(cursor)elif(cond1 & (not(cond2))):slice.append(cursor)elif((not(cond1)) & (not(cond2))):rslt.append(slice)begin = Noneslice = []else:passcursor = cursor + if(slice):rslt.append(slice)else:passreturn(rslt)", "docstring": "from elist.elist import *\nol = [1,\"a\",\"a\",2,3,\"a\",4,\"a\",\"a\",\"a\",5]\nall_continuous_indexesnot_slices(ol,\"a\")", "id": "f1599:m104"} {"signature": "def index_firstnot(ol,value):", "body": "length = ol.__len__()for i in range(,length):if(value == ol[i]):passelse:return(i)return(None)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindex_firstnot(ol,'a')\n####index_firstnot, array_indexnot, indexOfnot are the same\narray_indexnot(ol,'a')\nindexOfnot(ol,'a')", "id": "f1599:m82"} {"signature": "def get_next_char_level_in_j_str(curr_lv,curr_seq,j_str,block_op_pairs_dict=get_block_op_pairs(\"\")):", "body": "curr_ch = j_str[curr_seq]next_ch = j_str[curr_seq + ]cond = for i in range(,block_op_pairs_dict.__len__()+):if(curr_ch == block_op_pairs_dict[i][]):if(next_ch == block_op_pairs_dict[i][]):next_lv = curr_lv else:next_lv = curr_lv + cond = breakelif(curr_ch == block_op_pairs_dict[i][]):if(is_rop(next_ch,block_op_pairs_dict)):next_lv = curr_lv - else:next_lv = curr_lvcond = breakelse:passif(cond == ):passelif(is_rop(next_ch,block_op_pairs_dict)):next_lv = curr_lv - else: next_lv = curr_lvcurr_lv = next_lvcurr_seq = curr_seq + return(curr_lv,curr_lv,curr_seq)", "docstring": "the first-char is level-1\n when current is non-op, next-char-level = curr-level\n when current is lop, non-paired-rop-next-char-level = lop-level+1;\n when current is lop, paired-rop-next-char-level = lop-level\n when current is rop, next-char-level = rop-level - 1\n # {\"key_4_UF0aJJ6v\": \"value_1\", \"key_2_Hd0t\": [\"value_16\", \"value_8\", \"value_8\", \"value_15\", \"value_14\", \"value_19\", {......\n # 122222222222222222222222222222222222222222222333333333333333333333333333333333333333333333333333333333333333333333334......\n # {\\n\"key_4_UF0aJJ6v\": \"value_1\", \\n\"key_2_Hd0t\": [\\n\"value_16\", \\n\"value_8\", \\n\"value_8\", \\n\"value_15\", \\n\"value_14\", \\n\"value_19\",...... \n # 1 222222222222222222222222222222 2222222222222222 3333333333333 333333333333 333333333333 3333333333333 3333333333333 3333333333333......", "id": "f1599:m246"} {"signature": "def leaf_handler(self,*args):", "body": "desc = self.descpdesc = self.pdescdesc[''] = Truedesc[''] = pdesc[''].append(copy.deepcopy(desc['']))pdesc[''].append(copy.deepcopy(desc['']))", "docstring": "leaf child handler", "id": "f1599:c2:m3"} {"signature": "def index_lastnot(ol,value):", "body": "length = ol.__len__()for i in range(length-,-,-):if(value == ol[i]):passelse:return(i)return(None)", "docstring": "from elist.elist import *\nol = [1,'a',3,'a',4,'a',5]\nindex_lastnot(ol,'a')\n####lastIndexOfnot is the same as index_lastnot\nlastIndexOfnot(ol,'a')", "id": "f1599:m84"} {"signature": "def uniqualize(l,**kwargs):", "body": "if('' in kwargs):mode = kwargs['']else:mode = ''pt = copy.deepcopy(l)seqs =[]freq = {}for i in range(,pt.__len__()):v = pt[i]if(v in freq):freq[v] = freq[v] + else:freq[v] = seqs.append(i)npt = select_seqs(pt,seqs)pt = nptif(mode == ''):return(npt)else:l.clear()l.extend(npt)return(l)", "docstring": "from elist.elist import *\nl = [1, 2, 2]\nnew = uniqualize(l)\nnew\nid(l)\nid(new)\n####\nl = [1, 2, 2]\nrslt = uniqualize(l,mode=\"original\")\nrslt\nid(l)\nid(rslt)", "id": "f1599:m148"} {"signature": "def interleave(*arrays,**kwargs):", "body": "anum = arrays.__len__()rslt = []length = arrays[].__len__()for j in range(,length):for i in range(,anum):array = arrays[i]rslt.append(array[j])return(rslt)", "docstring": "arr1 = [1,2,3,4]\narr2 = ['a','b','c','d']\narr3 = ['@','#','%','*']\ninterleave(arr1,arr2,arr3)", "id": "f1599:m150"} {"signature": "def mapfvo(ol,**kwargs):", "body": "diff_funcs_arr = kwargs['']diff_args_arr = kwargs['']lngth = ol.__len__()rslt = []for i in range(,lngth):index = ivalue = ol[i]func = diff_funcs_arr[i]args = diff_args_arr[i]ele = func(value,*args)rslt.append(ele)return(rslt)", "docstring": "#mapfvo i\u4e0d\u4f5c\u4e3amap_func\u53c2\u6570 NOT take index as a param for map_func\n#map_func diff_func(value,*diff_args)", "id": "f1599:m3"} {"signature": "def same_indexes(l1,l2):", "body": "rslt = []for i in range(,l1.__len__()):if(l1[i]==l2[i]):rslt.append(i)return(rslt)", "docstring": "from elist.elist import *\nl1 = [1,2,3,5]\nl2 = [0,2,3,4]\nsame_indexes(l1,l2)", "id": "f1599:m179"} {"signature": "def select_regex_in(pl,regex):", "body": "def cond_func(ele,index,regex):if(type(ele)==type([])):cond = regex_in(ele,regex)else:m = regex.search(ele)if(m == None):cond = Falseelse:cond = Truereturn(cond)arr = cond_select_values_all2(pl,cond_func=cond_func, cond_func_args =[regex])return(arr)", "docstring": "regex = re.compile(\"^x.*x$\")\npl = ['bcd','xabcxx','xx','y']\nselect_regex_in(pl,'abc')", "id": "f1599:m159"} {"signature": "def car(ol):", "body": "return(ol[])", "docstring": "from elist.elist import *\nol=[1,2,3,4]\ncar(ol)", "id": "f1599:m63"} {"signature": "def pipe_shell_cmds(shell_CMDs):", "body": "len = shell_CMDs.__len__()p = {}p[] = subprocess.Popen(shlex.split(shell_CMDs[]), stdout=subprocess.PIPE,stderr=subprocess.PIPE)for i in range(,len):p[i] = subprocess.Popen(shlex.split(shell_CMDs[i]), stdin=p[i-].stdout, stdout=subprocess.PIPE,stderr=subprocess.PIPE)if(len > ):p[len] = subprocess.Popen(shlex.split(shell_CMDs[len]), stdin=p[len-].stdout, stdout=subprocess.PIPE,stderr=subprocess.PIPE)result = p[len].communicate()if(len > ):for i in range(,len+):returncode = p[i].wait()else:returncode = p[len].wait()return(result)", "docstring": "shell_CMDs = {}\nshell_CMDs[1] = 'netstat -n'\nshell_CMDs[2] = \"awk {'print $6'}\"", "id": "f1601:m0"} {"signature": "def stage(self, pipeline_name, stage_name, pipeline_counter=None):", "body": "return Stage(self, pipeline_name, stage_name, pipeline_counter=pipeline_counter)", "docstring": "Returns an instance of :class:`Stage`\n\n Args:\n pipeline_name (str): Name of the pipeline the stage belongs to\n stage_name (str): Name of the stage to act on\n pipeline_counter (int): The pipeline instance the stage is for.\n\n Returns:\n Stage: an instantiated :class:`Stage`.", "id": "f1613:c1:m8"} {"signature": "def pipeline(self, name):", "body": "return Pipeline(self, name)", "docstring": "Instantiates a :class:`Pipeline` with the given name.\n\n Args:\n name: The name of the pipeline you want to interact with\n\n Returns:\n Pipeline: an instantiated :class:`Pipeline`.", "id": "f1613:c1:m6"} {"signature": "def add_logged_in_session(self, response=None):", "body": "if not response:response = self.get('')self._set_session_cookie(response)if not self._session_id:raise AuthenticationFailed('')response = self.get('')match = re.search(r'',response.read().decode(''))if match:self._authenticity_token = match.group()else:raise AuthenticationFailed('')", "docstring": "Make the request appear to be coming from a browser\n\n This is to interact with older parts of Go that doesn't have a\n proper API call to be made. What will be done:\n\n 1. If no response passed in a call to `go/api/pipelines.xml` is\n made to get a valid session\n 2. `JSESSIONID` will be populated from this request\n 3. A request to `go/pipelines` will be so the\n `authenticity_token` (CSRF) can be extracted. It will then\n silently be injected into `post_args` on any POST calls that\n doesn't start with `go/api` from this point.\n\n Args:\n response: a :class:`Response` object from a previously successful\n API call. So we won't have to query `go/api/pipelines.xml`\n unnecessarily.\n\n Raises:\n HTTPError: when the HTTP request fails.\n AuthenticationFailed: when failing to get the `session_id`\n or the `authenticity_token`.", "id": "f1613:c1:m4"} {"signature": "def get(self, path):", "body": "return self.request(path)", "docstring": "Performs a HTTP GET request to the Go server\n\n Args:\n path (str): The full path on the Go server to request.\n This includes any query string attributes.\n\n Raises:\n HTTPError: when the HTTP request fails.\n\n Returns:\n file like object: The response from a\n :func:`urllib2.urlopen` call", "id": "f1613:c1:m1"} {"signature": "def list(self):", "body": "return self._get('')", "docstring": "Lists all available artifacts in this job.\n\n See the `Go artifact list documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#get-all-artifacts\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1618:c0:m1"} {"signature": "def get_directory(self, path_to_directory, timeout=, backoff=, max_wait=):", "body": "response = Nonestarted_at = Nonetime_elapsed = i = while time_elapsed < timeout:response = self._get(''.format(path_to_directory))if response:breakelse:if started_at is None:started_at = time.time()time.sleep(min(backoff * ( ** i), max_wait))i += time_elapsed = time.time() - started_atreturn response", "docstring": "Gets an artifact directory by its path.\n\n See the `Go artifact directory documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#get-artifact-directory\n\n .. note::\n Getting a directory relies on Go creating a zip file of the\n directory in question. Because of this Go will zip the file in\n the background and return a 202 Accepted response. It's then up\n to the client to check again later and get the final file.\n\n To work with normal assumptions this :meth:`get_directory` will\n retry itself up to ``timeout`` seconds to get a 200 response to\n return. At that point it will then return the response as is, no\n matter whether it's still 202 or 200. The retry is done with an\n exponential backoff with a max value between retries. See the\n ``backoff`` and ``max_wait`` variables.\n\n If you want to handle the retry logic yourself then use :meth:`get`\n and add '.zip' as a suffix on the directory.\n\n Args:\n path_to_directory (str): The path to the directory to get.\n It can be nested eg ``target/dist.zip``\n timeout (int): How many seconds we will wait in total for a\n successful response from Go when we're receiving 202\n backoff (float): The initial value used for backoff, raises\n exponentially until it reaches ``max_wait``\n max_wait (int): The max time between retries\n\n Returns:\n Response: :class:`gocd.api.response.Response` object\n A successful response is a zip-file.", "id": "f1618:c0:m3"} {"signature": "def get(self, path_to_file):", "body": "return self._get(path_to_file)", "docstring": "Gets an artifact directory by its path.\n\n See the `Go artifact file documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#get-artifact-file\n\n Args:\n path_to_file (str): The path to file to get. It can be nested eg\n ``dist/foobar-widgets-1.2.0.jar``\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1618:c0:m2"} {"signature": "def stage(self, name, pipeline_counter=None):", "body": "return Stage(self.server,pipeline_name=self.name,stage_name=name,pipeline_counter=pipeline_counter,)", "docstring": "Helper to instantiate a :class:`gocd.api.stage.Stage` object\n\n Args:\n name: The name of the stage\n pipeline_counter:\n\n Returns:", "id": "f1619:c0:m10"} {"signature": "def unpause(self):", "body": "return self._post('', headers={\"\": True})", "docstring": "Unpauses the pipeline\n\n See the `Go pipeline unpause documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#unpause-a-pipeline\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1619:c0:m4"} {"signature": "def __init__(self, server, name):", "body": "self.server = serverself.name = name", "docstring": "A wrapper for the `Go pipeline API`__\n\n .. __: http://api.go.cd/current/#pipelines\n\n Args:\n server (Server): A configured instance of\n :class:gocd.server.Server\n name (str): The name of the pipeline we're working on", "id": "f1619:c0:m0"} {"signature": "def console_output(self, instance=None):", "body": "if instance is None:instance = self.instance()for stage in instance['']:for job in stage['']:if job[''] not in self.final_results:continueartifact = self.artifact(instance[''],stage[''],job[''],stage[''])output = artifact.get('')yield ({'': self.name,'': instance[''],'': stage[''],'': stage[''],'': job[''],'': job[''],},output.body)", "docstring": "Yields the output and metadata from all jobs in the pipeline\n\n Args:\n instance: The result of a :meth:`instance` call, if not supplied\n the latest of the pipeline will be used.\n\n Yields:\n tuple: (metadata (dict), output (str)).\n\n metadata contains:\n - pipeline\n - pipeline_counter\n - stage\n - stage_counter\n - job\n - job_result", "id": "f1619:c0:m9"} {"signature": "def status(self):", "body": "return self._get('')", "docstring": "Returns the current status of this pipeline\n\n See the `Go pipeline status documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#get-pipeline-status\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1619:c0:m5"} {"signature": "def artifact(self, counter, stage, job, stage_counter=):", "body": "return Artifact(self.server, self.name, counter, stage, job, stage_counter)", "docstring": "Helper to instantiate an :class:`gocd.api.artifact.Artifact` object\n\n Args:\n counter (int): The pipeline counter to get the artifact for\n stage: Stage name\n job: Job name\n stage_counter: Defaults to 1\n\n Returns:\n Artifact: :class:`gocd.api.artifact.Artifact` object", "id": "f1619:c0:m8"} {"signature": "def get(self):", "body": "return self._get(self.name, headers={\"\": self._accept_header_value})", "docstring": "Get template config for specified template name.\n\n See `The template config object`__ for example responses.\n\n .. __: https://api.go.cd/current/#the-template-config-object\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1620:c0:m1"} {"signature": "def __init__(self, server, name, api_version=):", "body": "self.server = serverself.name = nameself.api_version = api_version", "docstring": "A wrapper for the `Go template config API`__\n\n .. __: https://api.go.cd/current/#template-config\n\n Args:\n server (Server): A configured instance of\n :class:gocd.server.Server\n name (str): The name of the template we're working on", "id": "f1620:c0:m0"} {"signature": "@propertydef pipelines(self):", "body": "if not self.response:return set()elif self._pipelines is None and self.response:self._pipelines = set()for group in self.response.payload:for pipeline in group['']:self._pipelines.add(pipeline[''])return self._pipelines", "docstring": "Returns a set of all pipelines from the last response\n\n Returns:\n set: Response success: all the pipelines available in the response\n Response failure: an empty set", "id": "f1622:c0:m3"} {"signature": "def get(self):", "body": "return self._get(self.name, headers={\"\": self._accept_header_value})", "docstring": "Gets SCM material for specified material name\n\n See `The global scm config object`__ for example responses.\n\n .. __: https://api.go.cd/current/#the-global-scm-config-object\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1623:c0:m2"} {"signature": "def __init__(self, server, name=\"\"):", "body": "self.server = serverself.name = name", "docstring": "A wrapper for the `Go pluggable SCM API`__\n\n .. __: https://api.go.cd/current/#scms\n\n Args:\n server (Server): A configured instance of\n :class:gocd.server.Server\n name (str): The name of the SCM material", "id": "f1623:c0:m0"} {"signature": "def history(self, offset=):", "body": "return self._get(''.format(offset=offset or ))", "docstring": "Lists previous instances/runs of the stage\n\n See the `Go stage history documentation`__ for example responses.\n\n .. __: http://api.go.cd/current/#get-stage-history\n\n Args:\n offset (int, optional): How many instances to skip for this response.\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1625:c0:m3"} {"signature": "def instance(self, counter=None, pipeline_counter=None):", "body": "pipeline_counter = pipeline_counter or self.pipeline_counterpipeline_instance = Noneif not pipeline_counter:pipeline_instance = self.server.pipeline(self.pipeline_name).instance()self.pipeline_counter = int(pipeline_instance[''])if not counter:if pipeline_instance is None:pipeline_instance = (self.server.pipeline(self.pipeline_name).instance(pipeline_counter))for stages in pipeline_instance['']:if stages[''] == self.stage_name:return self.instance(counter=int(stages['']),pipeline_counter=pipeline_counter)return self._get(''.format(pipeline_counter=pipeline_counter, counter=counter))", "docstring": "Returns all the information regarding a specific stage run\n\n See the `Go stage instance documentation`__ for examples.\n\n .. __: http://api.go.cd/current/#get-stage-instance\n\n Args:\n counter (int): The stage instance to fetch.\n If falsey returns the latest stage instance from :meth:`history`.\n pipeline_counter (int): The pipeline instance for which to fetch\n the stage. If falsey returns the latest pipeline instance.\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1625:c0:m4"} {"signature": "def cancel(self):", "body": "return self._post('', headers={\"\": True})", "docstring": "Cancels a currently running stage\n\n Returns:\n Response: :class:`gocd.api.response.Response` object", "id": "f1625:c0:m2"} {"signature": "@propertydef is_ok(self):", "body": "return self.status_code == self.ok_status", "docstring": "Whether this response is considered successful\n\n Returns\n bool: True if `status_code` is `ok_status`", "id": "f1626:c0:m1"} {"signature": "@propertydef payload(self):", "body": "if self.is_json:if not self._body_parsed:if hasattr(self._body, ''):body = self._body.decode('')else:body = self._bodyself._body_parsed = json.loads(body)return self._body_parsedelse:return self._body", "docstring": "Returns:\n `str` when not json.\n `dict` when json.", "id": "f1626:c0:m6"} {"signature": "def make_formatter(format_name):", "body": "if \"\" in format_name:from json import dumpsimport datetimedef jsonhandler(obj): obj.isoformat() if isinstance(obj, (datetime.datetime, datetime.date)) else objif format_name == \"\":def jsondumps(data): return dumps(data, default=jsonhandler, indent=, separators=('', ''))else:def jsondumps(data): return dumps(data, default=jsonhandler)def jsonify(data):if isinstance(data, dict):print(jsondumps(data))elif isinstance(data, list):print(jsondumps([device._asdict() for device in data]))else:print(dumps({'': data}))return jsonifyelse:def printer(data):if isinstance(data, dict):print(data)else:for row in data:print(row)return printer", "docstring": "Returns a callable that outputs the data. Defaults to print.", "id": "f1630:m0"} {"signature": "def main():", "body": "args = argparser().parse_args(sys.argv[:])password = os.environ.get('') or args.passwordnetgear = Netgear(password, args.host, args.user, args.port, args.ssl, args.url, args.force_login_v2)results = run_subcommand(netgear, args)formatter = make_formatter(args.format)if results is None:print(\"\")else:formatter(results)", "docstring": "Scan for devices and print results.", "id": "f1630:m3"} {"signature": "def allow_block_device(self, mac_addr, device_status=BLOCK):", "body": "_LOGGER.info(\"\")if self.config_started:_LOGGER.error(\"\")return Falseif not self.config_start():_LOGGER.error(\"\")return Falsesuccess, _ = self._make_request(SERVICE_DEVICE_CONFIG, \"\",{\"\": device_status, \"\": mac_addr})if not success:_LOGGER.error(\"\")return Falseif not self.config_finish():_LOGGER.error(\"\")return Falsereturn True", "docstring": "Allow or Block a device via its Mac Address.\nPass in the mac address for the device that you want to set. Pass in the\ndevice_status you wish to set the device to: Allow (allow device to access the\nnetwork) or Block (block the device from accessing the network).", "id": "f1631:c0:m9"} {"signature": "def _convert(value, to_type, default=None):", "body": "try:return default if value is None else to_type(value)except ValueError:return default", "docstring": "Convert value to to_type, returns default if fails.", "id": "f1631:m6"} {"signature": "def login(self):", "body": "if not self.force_login_v2:v1_result = self.login_v1()if v1_result:return v1_resultreturn self.login_v2()", "docstring": "Login to the router.\n\nWill be called automatically by other actions.", "id": "f1631:c0:m1"} {"signature": "def _xml_get(e, name):", "body": "r = e.find(name)if r is not None:return r.textreturn None", "docstring": "Returns the value of the subnode \"name\" of element e.\n\nReturns None if the subnode doesn't exist", "id": "f1631:m2"} {"signature": "def _make_request(self, service, method, params=None, body=\"\",need_auth=True):", "body": "if need_auth and not self.cookie:if not self.login():return False, Noneheaders = self._get_headers(service, method, need_auth)if not body:if not params:params = \"\"if isinstance(params, dict):_map = paramsparams = \"\"for k in _map:params += \"\" + k + \">\" + _map[k] + \"\" + k + \"\"body = CALL_BODY.format(service=SERVICE_PREFIX + service,method=method, params=params)message = SOAP_REQUEST.format(session_id=SESSION_ID, body=body)try:response = requests.post(self.soap_url, headers=headers,data=message, timeout=, verify=False)if need_auth and _is_unauthorized_response(response):self.cookie = None_LOGGER.warning(\"\")if self.login():headers = self._get_headers(service, method, need_auth)response = requests.post(self.soap_url, headers=headers,data=message, timeout=, verify=False)success = _is_valid_response(response)if not success:_LOGGER.error(\"\")_LOGGER.debug(\"\", response.status_code, str(response.headers), response.text)return success, responseexcept requests.exceptions.RequestException:_LOGGER.exception(\"\")return False, None", "docstring": "Make an API request to the router.", "id": "f1631:c0:m11"} {"signature": "def send(self, url, http_method, **client_args):", "body": "response = super(Resource, self).send(url, http_method, **client_args)if response.status_code in (requests.codes.ok, requests.codes.created):try:self.update_from_dict(self.client.get_response_data(response, self.Meta.parse_json))except ValueError:passreturn response if response is not None else None", "docstring": "Make the actual request to the API, updating the resource if necessary\n:param url: Endpoint URL\n:param http_method: The method used to make the request to the API\n:param client_args: Arguments to be sent to the auth client\n:return:", "id": "f1650:c2:m5"} {"signature": "def save(self, force_create=False, fields=None):", "body": "values = {}fields = fields or self.fieldsfor field_name in fields:value = getattr(self, field_name)if isinstance(value, Resource):value = value.get_id()if isinstance(value, list):if len(value) > and isinstance(value[], Resource):value = Noneelse:final_value_list = []for item in value:final_value_list.append(item.isoformat() if isinstance(item, datetime) else item)value = final_value_listif isinstance(value, datetime):value = value.isoformat()if value is not None:values[field_name] = valuehttp_headers = {'': ''} if self.Meta.json_data is True else Nonejson = values if self.Meta.json_data is True else Nonedata = values if self.Meta.json_data is False else Noneif self.get_resource_endpoint() is not None and force_create is False:return self.send(self.get_resource_endpoint(), \"\", headers=http_headers, json=json, data=data)else:return self.send(self.get_collection_endpoint(), \"\", headers=http_headers, json=json, data=data)", "docstring": "Saves (creates or updates) resource on the server\n:param force_create: If True, forces resource creation even if it already has an Id.\n:param fields: List of fields to be saved. If None, all fields will be saved.\n:return:", "id": "f1650:c2:m6"} {"signature": "def filter(self, **search_args):", "body": "search_args = search_args or {}raw_resources = []for url, paginator_params in self.paginator.get_urls(self.get_collection_endpoint()):search_args.update(paginator_params)response = self.paginator.process_response(self.send(url, \"\", params=search_args))raw_resources += self.client.get_response_data(response, self.Meta.parse_json)[self.json_collection_attribute] if self.json_collection_attribute is not None else self.client.get_response_data(response, self.Meta.parse_json)resources = []for raw_resource in raw_resources:try:resource = self.resource_class(self.client)except (ValueError, TypeError):continueelse:resource.update_from_dict(raw_resource)resources.append(resource)return resources", "docstring": "Get a filtered list of resources\n:param search_args: To be translated into ?arg1=value1&arg2=value2...\n:return: A list of resources", "id": "f1650:c3:m3"} {"signature": "@classmethoddef get_collection_endpoint(cls):", "body": "return cls.Meta.collection_endpoint if cls.Meta.collection_endpoint is not None else cls.__name__.lower() + \"\"", "docstring": "Get the relative path to the API resource collection\n\nIf self.collection_endpoint is not set, it will default to the lowercase name of the resource class plus an \"s\" and the terminating \"/\"\n:param cls: Resource class\n:return: Relative path to the resource collection", "id": "f1650:c0:m2"} {"signature": "def __init__(self, auth_client, **kwargs):", "body": "for name, value in iteritems(kwargs):setattr(self, name, value)super(Resource, self).__init__(auth_client)", "docstring": "Initializes the resource\n:param auth_client: Client to make (non)authorized requests\n:param kwargs: Initial value for attributes\n:return:", "id": "f1650:c2:m0"} {"signature": "def get_resource_endpoint(self):", "body": "return super(Resource, self).get_resource_endpoint(self.get_id())", "docstring": "Get the relative path to the specific API resource\n:return: Relative path to the resource", "id": "f1650:c2:m3"} {"signature": "def get(self, resource_id):", "body": "response = self.send(self.get_resource_endpoint(resource_id), \"\")try:resource = self.resource_class(self.client)except (ValueError, TypeError):return Noneelse:resource.update_from_dict(self.client.get_response_data(response, self.Meta.parse_json))return resource", "docstring": "Get one single resource from the API\n:param resource_id: Id of the resource to be retrieved\n:return: Retrieved resource", "id": "f1650:c3:m2"} {"signature": "def refresh(self):", "body": "if self.get_resource_endpoint() is not None:return self.send(self.get_resource_endpoint(), \"\")", "docstring": "Refreshes a resource by checking against the API\n:return:", "id": "f1650:c2:m7"} {"signature": "def __init__(self, auth_client):", "body": "self.client = auth_client", "docstring": "Initializes the instance\n:param auth_client: Client to make (non)authorized requests\n:return:", "id": "f1650:c0:m0"} {"signature": "def send(self, relative_path, http_method, **requests_args):", "body": "url = urljoin(self.base_url, relative_path)return self.session.request(http_method, url, **requests_args)", "docstring": "Subclasses must implement this method, that will be used to send API requests with proper auth\n:param relative_path: URL path relative to self.base_url\n:param http_method: HTTP method\n:param requests_args: kargs to be sent to requests\n:return:", "id": "f1651:c0:m1"} {"signature": "def __init__(self, many=False):", "body": "self.many = manyself.name = None", "docstring": "Initialize the field\n:param many: Set to True if this field will host a list of items", "id": "f1652:c0:m0"} {"signature": "def __get__(self, instance, owner):", "body": "if instance is not None and self.name is not None:return instance.__dict__.get(self.name)else:return self", "docstring": "Normal descriptor get method\n:param instance: Resource instance where the field lives\n:param instance: Resource class where the field lives\n:return: Value stored in instance.name (TODO: maybe change this in the future to instance.Cache.name)", "id": "f1652:c0:m1"} {"signature": "def __set__(self, instance, value):", "body": "if instance is not None and self.name is not None:instance.__dict__[self.name] = value", "docstring": "Normal descriptor set method\n:param instance: Resource instance where the field lives\n:param value: Value to store in instance.name (TODO: maybe change this in the future to instance.Cache.name)", "id": "f1652:c0:m2"} {"signature": "def __set__(self, instance, value):", "body": "if self.many is False:if isinstance(value, str):value = parse(value)else:datetime_list = []for datetime_value in value:if isinstance(datetime_value, str):datetime_value = parse(datetime_value)datetime_list.append(datetime_value)value = datetime_listsuper(DateTimeField, self).__set__(instance, value)", "docstring": "Normal descriptor set method\n:param instance: Resource instance where the field lives\n:param value: Might be a datetime object or a string to be parsed", "id": "f1652:c5:m0"} {"signature": "def get_version():", "body": "contents = read_file(os.path.join('', ''))version = re.search('', contents)version = version.group().replace('', '').strip()return version", "docstring": "Returns version number, without module import (which can lead to ImportError\n if some dependencies are unavailable before install.", "id": "f1655:m1"} {"signature": "def __init__(self, name, default, category=None, field=None, verbose_name=None, help_text='', static=True,readonly=False):", "body": "self.name = nameself.category = categoryself.default = defaultself.static = staticself.help_text = help_textif static:readonly = Trueself.readonly = readonlyif verbose_name is None:verbose_name = name.replace('', '').capitalize()self.verbose_name = verbose_nameif field is None:self.field = get_field_for_proxy(self)else:self.field = fieldupdate_field_from_proxy(self.field, self)", "docstring": ":param str|unicode name: Preference name.\n\n:param default: Default (initial) value.\n\n:param str|unicode category: Category name the preference belongs to.\n\n:param Field field: Django model field to represent this preference.\n\n:param str|unicode verbose_name: Field verbose name.\n\n:param str|unicode help_text: Field help text.\n\n:param bool static: Leave this preference static (do not store in DB).\n\n:param bool readonly: Make this field read only.", "id": "f1668:c3:m0"} {"signature": "def update_field_from_proxy(field_obj, pref_proxy):", "body": "attr_names = ('', '', '')for attr_name in attr_names:setattr(field_obj, attr_name, getattr(pref_proxy, attr_name))", "docstring": "Updates field object with data from a PrefProxy object.\n\n :param models.Field field_obj:\n\n :param PrefProxy pref_proxy:", "id": "f1668:m1"} {"signature": "def traverse_local_prefs(stepback=):", "body": "locals_dict = get_frame_locals(stepback+)for k in locals_dict:if not k.startswith('') and k.upper() == k:yield k, locals_dict", "docstring": "Generator to walk through variables considered as preferences\n in locals dict of a given frame.\n\n :param int stepback:\n\n :rtype: tuple", "id": "f1668:m5"} {"signature": "@classmethoddef read_prefs(cls, mem_prefs):", "body": "db_prefs = {'' % (pref[''], pref['']): pref for pref incls.objects.values().order_by('', '')}new_prefs = []for app, prefs in mem_prefs.items():for pref_name, pref_proxy in prefs.items():if not pref_proxy.static: key = '' % (app, pref_name)if key in db_prefs:pref_proxy.db_value = db_prefs[key]['']else:new_prefs.append(cls(app=app, name=pref_name, text=pref_proxy.default))if new_prefs:try:cls.objects.bulk_create(new_prefs)except IntegrityError: pass", "docstring": "Initializes preferences entries in DB according to currently discovered prefs.\n\n :param dict mem_prefs:", "id": "f1669:c0:m1"} {"signature": "def bind_proxy(values, category=None, field=None, verbose_name=None, help_text='', static=True, readonly=False):", "body": "addrs = OrderedDict()depth = for local_name, locals_dict in traverse_local_prefs(depth):addrs[id(locals_dict[local_name])] = local_nameproxies = []locals_dict = get_frame_locals(depth)for value in values: id_val = id(value)if id_val in addrs:local_name = addrs[id_val]local_val = locals_dict[local_name]if isinstance(local_val, PatchedLocal) and not isinstance(local_val, PrefProxy):proxy = PrefProxy(local_name, value.val,category=category,field=field,verbose_name=verbose_name,help_text=help_text,static=static,readonly=readonly,)app_name = locals_dict[''].split('')[-] prefs = get_prefs()if app_name not in prefs:prefs[app_name] = OrderedDict()prefs[app_name][local_name.lower()] = proxylocals_dict[local_name] = proxyproxies.append(proxy)return proxies", "docstring": "Binds PrefProxy objects to module variables used by apps as preferences.\n\n :param list|tuple values: Preference values.\n\n :param str|unicode category: Category name the preference belongs to.\n\n :param Field field: Django model field to represent this preference.\n\n :param str|unicode verbose_name: Field verbose name.\n\n :param str|unicode help_text: Field help text.\n\n :param bool static: Leave this preference static (do not store in DB).\n\n :param bool readonly: Make this field read only.\n\n :rtype: list", "id": "f1670:m4"} {"signature": "def register_prefs(*args, **kwargs):", "body": "swap_settings_module = bool(kwargs.get('', True))if __PATCHED_LOCALS_SENTINEL not in get_frame_locals():raise SitePrefsException('')bind_proxy(args, **kwargs)unpatch_locals()swap_settings_module and proxy_settings_module()", "docstring": "Registers preferences that should be handled by siteprefs.\n\n Expects preferences as *args.\n\n Use keyword arguments to batch apply params supported by\n ``PrefProxy`` to all preferences not constructed by ``pref`` and ``pref_group``.\n\n Batch kwargs:\n\n :param str|unicode help_text: Field help text.\n\n :param bool static: Leave this preference static (do not store in DB).\n\n :param bool readonly: Make this field read only.\n\n :param bool swap_settings_module: Whether to automatically replace settings module\n with a special ``ProxyModule`` object to access dynamic values of settings\n transparently (so not to bother with calling ``.value`` of ``PrefProxy`` object).", "id": "f1670:m10"} {"signature": "def autodiscover_siteprefs(admin_site=None):", "body": "if admin_site is None:admin_site = admin.siteif '' not in sys.argv[] or (len(sys.argv) > and sys.argv[] in MANAGE_SAFE_COMMANDS):import_prefs()Preference.read_prefs(get_prefs())register_admin_models(admin_site)", "docstring": "Automatically discovers and registers all preferences available in all apps.\n\n :param admin.AdminSite admin_site: Custom AdminSite object.", "id": "f1670:m6"} {"signature": "def on_pref_update(*args, **kwargs):", "body": "Preference.update_prefs(*args, **kwargs)Preference.read_prefs(get_prefs())", "docstring": "Triggered on dynamic preferences model save.\n Issues DB save and reread.", "id": "f1670:m0"} {"signature": "def patch_locals(depth=):", "body": "for name, locals_dict in traverse_local_prefs(depth):locals_dict[name] = PatchedLocal(name, locals_dict[name])get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL] = True", "docstring": "Temporarily (see unpatch_locals()) replaces all module variables\n considered preferences with PatchedLocal objects, so that every\n variable has different hash returned by id().", "id": "f1670:m7"} {"signature": "def proxy_settings_module(depth=):", "body": "proxies = []modules = sys.modulesmodule_name = get_frame_locals(depth)['']module_real = modules[module_name]for name, locals_dict in traverse_local_prefs(depth):value = locals_dict[name]if isinstance(value, PrefProxy):proxies.append(name)new_module = type(module_name, (ModuleType, ModuleProxy), {})(module_name) new_module.bind(module_real, proxies)modules[module_name] = new_module", "docstring": "Replaces a settings module with a Module proxy to intercept\n an access to settings.\n\n :param int depth: Frame count to go backward.", "id": "f1670:m9"} {"signature": "def score(self):", "body": "return sum([self.scores[len(w)] for w in self.words()])", "docstring": "The total score for the words found, according to the rules.", "id": "f1675:c8:m4"} {"signature": "def expand(self, problem):", "body": "return [self.child_node(problem, action)for action in problem.actions(self.state)]", "docstring": "List the nodes reachable in one step from this node.", "id": "f1675:c1:m2"} {"signature": "def recursive_best_first_search(problem, h=None):", "body": "h = memoize(h or problem.h, '')def RBFS(problem, node, flimit):if problem.goal_test(node.state):return node, successors = node.expand(problem)if len(successors) == :return None, infinityfor s in successors:s.f = max(s.path_cost + h(s), node.f)while True:successors.sort(lambda x,y: cmp(x.f, y.f)) best = successors[]if best.f > flimit:return None, best.fif len(successors) > :alternative = successors[].felse:alternative = infinityresult, best.f = RBFS(problem, best, min(flimit, alternative))if result is not None:return result, best.fnode = Node(problem.initial)node.f = h(node)result, bestf = RBFS(problem, node, infinity)return result", "docstring": "[Fig. 3.26]", "id": "f1675:m11"} {"signature": "def boggle_hill_climbing(board=None, ntimes=, verbose=True):", "body": "finder = BoggleFinder()if board is None:board = random_boggle()best = len(finder.set_board(board))for _ in range(ntimes):i, oldc = mutate_boggle(board)new = len(finder.set_board(board))if new > best:best = newif verbose: print(best, _, board)else:board[i] = oldc if verbose:print_boggle(board)return board, best", "docstring": "Solve inverse Boggle by hill-climbing: find a high-scoring board by\n starting with a random one and changing it.", "id": "f1675:m26"} {"signature": "def mate(self, other):", "body": "c = random.randrange(len(self.genes))return self.__class__(self.genes[:c] + other.genes[c:])", "docstring": "Return a new individual crossing self and other.", "id": "f1675:c3:m1"} {"signature": "def nodes(self):", "body": "return list(self.dict.keys())", "docstring": "Return a list of nodes in the graph.", "id": "f1675:c4:m5"} {"signature": "def online_dfs_agent(s1):", "body": "unimplemented()", "docstring": "[Fig. 4.21]", "id": "f1675:m16"} {"signature": "def RandomGraph(nodes=list(range()), min_links=, width=, height=,curvature=lambda: random.uniform(, )):", "body": "g = UndirectedGraph()g.locations = {}for node in nodes:g.locations[node] = (random.randrange(width), random.randrange(height))for i in range(min_links):for node in nodes:if len(g.get(node)) < min_links:here = g.locations[node]def distance_to_node(n):if n is node or g.get(node,n): return infinityreturn distance(g.locations[n], here)neighbor = argmin(nodes, distance_to_node)d = distance(g.locations[neighbor], here) * curvature()g.connect(node, neighbor, int(d))return g", "docstring": "Construct a random graph, with the specified nodes, and random links.\n The nodes are laid out randomly on a (width x height) rectangle.\n Then each node is connected to the min_links nearest neighbors.\n Because inverse links are added, some nodes will have more connections.\n The distance between nodes is the hypotenuse times curvature(),\n where curvature() defaults to a random number between 1.1 and 1.5.", "id": "f1675:m21"} {"signature": "def compare_graph_searchers():", "body": "compare_searchers(problems=[GraphProblem('', '', romania),GraphProblem('', '', romania),GraphProblem('', '', australia)],header=['', '', '', ''])", "docstring": "Prints a table of results like this:\n >>> compare_graph_searchers()\n Searcher Romania(A, B) Romania(O, N) Australia \n breadth_first_tree_search < 21/ 22/ 59/B> <1158/1159/3288/N> < 7/ 8/ 22/WA>\n breadth_first_search < 7/ 11/ 18/B> < 19/ 20/ 45/N> < 2/ 6/ 8/WA>\n depth_first_graph_search < 8/ 9/ 20/B> < 16/ 17/ 38/N> < 4/ 5/ 11/WA>\n iterative_deepening_search < 11/ 33/ 31/B> < 656/1815/1812/N> < 3/ 11/ 11/WA>\n depth_limited_search < 54/ 65/ 185/B> < 387/1012/1125/N> < 50/ 54/ 200/WA>\n recursive_best_first_search < 5/ 6/ 15/B> <5887/5888/16532/N> < 11/ 12/ 43/WA>", "id": "f1675:m30"} {"signature": "def boggle_neighbors(n2, cache={}):", "body": "if cache.get(n2):return cache.get(n2)n = exact_sqrt(n2)neighbors = [None] * n2for i in range(n2):neighbors[i] = []on_top = i < non_bottom = i >= n2 - non_left = i % n == on_right = (i+) % n == if not on_top:neighbors[i].append(i - n)if not on_left: neighbors[i].append(i - n - )if not on_right: neighbors[i].append(i - n + )if not on_bottom:neighbors[i].append(i + n)if not on_left: neighbors[i].append(i + n - )if not on_right: neighbors[i].append(i + n + )if not on_left: neighbors[i].append(i - )if not on_right: neighbors[i].append(i + )cache[n2] = neighborsreturn neighbors", "docstring": "Return a list of lists, where the i-th element is the list of indexes\n for the neighbors of square i.", "id": "f1675:m24"} {"signature": "def depth_first_tree_search(problem):", "body": "return tree_search(problem, Stack())", "docstring": "Search the deepest nodes in the search tree first.", "id": "f1675:m3"} {"signature": "def value(self, state):", "body": "abstract", "docstring": "For optimization problems, each state has a value. Hill-climbing\n and related algorithms try to maximize this value.", "id": "f1675:c0:m5"} {"signature": "def hill_climbing(problem):", "body": "current = Node(problem.initial)while True:neighbors = current.expand(problem)if not neighbors:breakneighbor = argmax_random_tie(neighbors,lambda node: problem.value(node.state))if problem.value(neighbor.state) <= problem.value(current.state):breakcurrent = neighborreturn current.state", "docstring": "From the initial node, keep choosing the neighbor with highest value,\n stopping when no neighbor is better. [Fig. 4.2]", "id": "f1675:m12"} {"signature": "def lrta_star_agent(s1):", "body": "unimplemented()", "docstring": "[Fig. 4.24]", "id": "f1675:m17"} {"signature": "def __init__(self, initial, goal=None):", "body": "self.initial = initial; self.goal = goal", "docstring": "The constructor specifies the initial state, and possibly a goal\n state, if there is a unique goal. Your subclass's constructor can add\n other arguments.", "id": "f1675:c0:m0"} {"signature": "def __len__(self):", "body": "return len(self.found)", "docstring": "The number of words found.", "id": "f1675:c8:m5"} {"signature": "def breadth_first_tree_search(problem):", "body": "return tree_search(problem, FIFOQueue())", "docstring": "Search the shallowest nodes in the search tree first.", "id": "f1675:m2"} {"signature": "def connect(self, A, B, distance=):", "body": "self.connect1(A, B, distance)if not self.directed: self.connect1(B, A, distance)", "docstring": "Add a link from A and B of given distance, and also add the inverse\n link if the graph is undirected.", "id": "f1675:c4:m2"} {"signature": "def result(self, state, row):", "body": "col = state.index(None)new = state[:]new[col] = rowreturn new", "docstring": "Place the next queen at the given row.", "id": "f1675:c6:m2"} {"signature": "def UndirectedGraph(dict=None):", "body": "return Graph(dict=dict, directed=False)", "docstring": "Build a Graph where every edge (including future ones) goes both ways.", "id": "f1675:m20"} {"signature": "def graph_search(problem, frontier):", "body": "frontier.append(Node(problem.initial))explored = set()while frontier:node = frontier.pop()if problem.goal_test(node.state):return nodeexplored.add(node.state)frontier.extend(child for child in node.expand(problem)if child.state not in exploredand child not in frontier)return None", "docstring": "Search through the successors of a problem to find a goal.\n The argument frontier should be an empty queue.\n If two paths reach a state, only use the first one. [Fig. 3.7]", "id": "f1675:m1"} {"signature": "def lookup(self, prefix, lo=, hi=None):", "body": "words = self.wordsif hi is None: hi = len(words)i = bisect.bisect_left(words, prefix, lo, hi)if i < len(words) and words[i].startswith(prefix):return i, (words[i] == prefix)else:return None, False", "docstring": "See if prefix is in dictionary, as a full word or as a prefix.\n Return two values: the first is the lowest i such that\n words[i].startswith(prefix), or is None; the second is\n True iff prefix itself is in the Wordlist.", "id": "f1675:c7:m1"} {"signature": "def mutate(self):", "body": "abstract", "docstring": "Change a few of my genes.", "id": "f1675:c3:m2"} {"signature": "def genetic_search(problem, fitness_fn, ngen=, pmut=, n=):", "body": "s = problem.initial_statestates = [problem.result(s, a) for a in problem.actions(s)]random.shuffle(states)return genetic_algorithm(states[:n], problem.value, ngen, pmut)", "docstring": "Call genetic_algorithm on the appropriate parts of a problem.\n This requires the problem to have states that can mate and mutate,\n plus a value method that scores states.", "id": "f1675:m18"} {"signature": "def child_node(self, problem, action):", "body": "next = problem.result(self.state, action)return Node(next, self, action,problem.path_cost(self.path_cost, self.state, action, next))", "docstring": "Fig. 3.10", "id": "f1675:c1:m3"} {"signature": "def depth_limited_search(problem, limit=):", "body": "def recursive_dls(node, problem, limit):if problem.goal_test(node.state):return nodeelif node.depth == limit:return ''else:cutoff_occurred = Falsefor child in node.expand(problem):result = recursive_dls(child, problem, limit)if result == '':cutoff_occurred = Trueelif result is not None:return resultreturn if_(cutoff_occurred, '', None)return recursive_dls(Node(problem.initial), problem, limit)", "docstring": "[Fig. 3.17]", "id": "f1675:m8"} {"signature": "def path_cost(self, c, state1, action, state2):", "body": "return c + ", "docstring": "Return the cost of a solution path that arrives at state2 from\n state1 via action, assuming cost c to get up to state1. If the problem\n is such that the path doesn't matter, this function will only look at\n state2. If the path does matter, it will consider c and maybe state1\n and action. The default method costs 1 for every step in the path.", "id": "f1675:c0:m4"} {"signature": "def simulated_annealing(problem, schedule=exp_schedule()):", "body": "current = Node(problem.initial)for t in range(sys.maxsize):T = schedule(t)if T == :return currentneighbors = current.expand(problem)if not neighbors:return currentnext = random.choice(neighbors)delta_e = problem.value(next.state) - problem.value(current.state)if delta_e > or probability(math.exp(delta_e/T)):current = next", "docstring": "[Fig. 4.5]", "id": "f1675:m14"} {"signature": "def ModelBasedReflexAgentProgram(rules, update_state):", "body": "def program(percept):program.state = update_state(program.state, program.action, percept)rule = rule_match(program.state, rules)action = rule.actionreturn actionprogram.state = program.action = Nonereturn program", "docstring": "This agent takes action based on the percept and state. [Fig. 2.12]", "id": "f1676:m4"} {"signature": "def percept(self, agent):", "body": "return (agent.location, self.status[agent.location])", "docstring": "Returns the agent's location, and the location status (Dirty/Clean).", "id": "f1676:c8:m2"} {"signature": "def TableDrivenVacuumAgent():", "body": "table = {((loc_A, ''),): '',((loc_A, ''),): '',((loc_B, ''),): '',((loc_B, ''),): '',((loc_A, ''), (loc_A, '')): '',((loc_A, ''), (loc_A, '')): '',((loc_A, ''), (loc_A, ''), (loc_A, '')): '',((loc_A, ''), (loc_A, ''), (loc_A, '')): '',}return Agent(TableDrivenAgentProgram(table))", "docstring": "[Fig. 2.3]", "id": "f1676:m7"} {"signature": "def delete_thing(self, thing):", "body": "try:self.things.remove(thing)except ValueError as e:print(e)print(\"\")print(\"\" % (thing, thing.location))print(\"\" % [(thing, thing.location)for thing in self.things])if thing in self.agents:self.agents.remove(thing)", "docstring": "Remove a thing from the environment.", "id": "f1676:c2:m12"} {"signature": "def RandomAgentProgram(actions):", "body": "return lambda percept: random.choice(actions)", "docstring": "An agent that chooses an action at random, ignoring all percepts.", "id": "f1676:m2"} {"signature": "def list_things_at(self, location, tclass=Thing):", "body": "return [thing for thing in self.thingsif thing.location == location and isinstance(thing, tclass)]", "docstring": "Return all things exactly at a given location.", "id": "f1676:c2:m9"} {"signature": "def run(self, steps=):", "body": "for step in range(steps):if self.is_done(): returnself.step()", "docstring": "Run the Environment for given number of time steps.", "id": "f1676:c2:m8"} {"signature": "def execute_action(self, agent, action):", "body": "if action == '':agent.location = loc_Bagent.performance -= elif action == '':agent.location = loc_Aagent.performance -= elif action == '':if self.status[agent.location] == '':agent.performance += self.status[agent.location] = ''", "docstring": "Change agent's location and/or location's status; track performance.\n Score 10 for each dirt cleaned; -1 for each move.", "id": "f1676:c8:m3"} {"signature": "def turn_heading(self, heading, inc):", "body": "return turn_heading(heading, inc)", "docstring": "Return the heading to the left (inc=+1) or right (inc=-1) of heading.", "id": "f1676:c3:m11"} {"signature": "def rule_match(state, rules):", "body": "for rule in rules:if rule.matches(state):return rule", "docstring": "Find the first rule that matches state.", "id": "f1676:m5"} {"signature": "def default_location(self, thing):", "body": "return None", "docstring": "Default location to place a new thing with unspecified location.", "id": "f1676:c2:m4"} {"signature": "def add_walls(self):", "body": "for x in range(self.width):self.add_thing(Wall(), (x, ))self.add_thing(Wall(), (x, self.height-))for y in range(self.height):self.add_thing(Wall(), (, y))self.add_thing(Wall(), (self.width-, y))", "docstring": "Put walls around the entire perimeter of the grid.", "id": "f1676:c3:m9"} {"signature": "def SimpleReflexAgentProgram(rules, interpret_input):", "body": "def program(percept):state = interpret_input(percept)rule = rule_match(state, rules)action = rule.actionreturn actionreturn program", "docstring": "This agent takes action based solely on the percept. [Fig. 2.10]", "id": "f1676:m3"} {"signature": "def RandomVacuumAgent():", "body": "return Agent(RandomAgentProgram(['', '', '', '']))", "docstring": "Randomly choose one of the actions from the vacuum environment.", "id": "f1676:m6"} {"signature": "def move_to(self, thing, destination):", "body": "thing.bump = self.some_things_at(destination, Obstacle)if not thing.bump:thing.location = destinationfor o in self.observers:o.thing_moved(thing)", "docstring": "Move a thing to a new location.", "id": "f1676:c3:m6"} {"signature": "def can_grab(self, thing):", "body": "return False", "docstring": "Returns True if this agent can grab this thing.\n Override for appropriate subclasses of Agent and Thing.", "id": "f1676:c1:m1"} {"signature": "def add_thing(self, thing, location=None):", "body": "if not isinstance(thing, Thing):thing = Agent(thing)assert thing not in self.things, \"\"thing.location = location or self.default_location(thing)self.things.append(thing)if isinstance(thing, Agent):thing.performance = self.agents.append(thing)", "docstring": "Add a thing to the environment, setting its location. For\n convenience, if thing is an agent program we make a new agent\n for it. (Shouldn't need to override this.", "id": "f1676:c2:m11"} {"signature": "def present(self, results):", "body": "for (score, d) in results:doc = self.documents[d]print (\"\"% ( * score, doc.url, doc.title[:].expandtabs()))", "docstring": "Present the results as a list.", "id": "f1677:c2:m5"} {"signature": "def all_shifts(text):", "body": "return [shift_encode(text, n) for n in range(len(alphabet))]", "docstring": "Return a list of all 26 possible encodings of text by a shift cipher.", "id": "f1677:m7"} {"signature": "def score(self, word, docid):", "body": "return (math.log( + self.index[word][docid])/ math.log( + self.documents[docid].nwords))", "docstring": "Compute a score for this word on this docid.", "id": "f1677:c2:m4"} {"signature": "def score(self, plaintext):", "body": "s = for bi in bigrams(plaintext):s = s * self.P2[bi]return s", "docstring": "Return a score for text based on how common letters pairs are.", "id": "f1677:c5:m1"} {"signature": "def index_collection(self, filenames):", "body": "for filename in filenames:self.index_document(open(filename).read(), filename)", "docstring": "Index a whole collection of files.", "id": "f1677:c2:m1"} {"signature": "def words(text, reg=re.compile('')):", "body": "return reg.findall(text.lower())", "docstring": "Return a list of the words in text, ignoring punctuation and\n converting everything to lowercase (to canonicalize).\n >>> words(\"``EGAD!'' Edgar cried.\")\n ['egad', 'edgar', 'cried']", "id": "f1677:m1"} {"signature": "def add(self, ngram):", "body": "CountingProbDist.add(self, ngram)self.cond_prob[ngram[:-]].add(ngram[-])", "docstring": "Count 1 for P[(w1, ..., wn)] and for P(wn | (w1, ..., wn-1)", "id": "f1677:c1:m1"} {"signature": "def add_sequence(self, words):", "body": "n = self.nwords = ['',] * (n-) + wordsfor i in range(len(words)-n):self.add(tuple(words[i:i+n]))", "docstring": "Add each of the tuple words[i:i+n], using a sliding window.\n Prefix some copies of the empty word, '', to make the start work.", "id": "f1677:c1:m2"} {"signature": "def __init__(self, stopwords=''):", "body": "update(self, index=DefaultDict(DefaultDict()),stopwords=set(words(stopwords)), documents=[])", "docstring": "Create an IR System. Optionally specify stopwords.", "id": "f1677:c2:m0"} {"signature": "def viterbi_segment(text, P):", "body": "n = len(text)words = [''] + list(text)best = [] + [] * nfor i in range(n+):for j in range(, i):w = text[j:i]if P[w] * best[i - len(w)] >= best[i]:best[i] = P[w] * best[i - len(w)]words[i] = wsequence = []; i = len(words)-while i > :sequence[:] = [words[i]]i = i - len(words[i])return sequence, best[-]", "docstring": "Find the best segmentation of the string of characters, given the\n UnigramTextModel P.", "id": "f1677:m0"} {"signature": "def best_policy(mdp, U):", "body": "pi = {}for s in mdp.states:pi[s] = argmax(mdp.actions(s), lambda a:expected_utility(a, s, U, mdp))return pi", "docstring": "Given an MDP and a utility function U, determine the best policy,\n as a mapping from state to action. (Equation 17.4)", "id": "f1678:m1"} {"signature": "def T(self, state, action):", "body": "abstract", "docstring": "Transition model. From a state and an action, return a list\n of (probability, result-state) pairs.", "id": "f1678:c0:m2"} {"signature": "def value_iteration(mdp, epsilon=):", "body": "U1 = dict([(s, ) for s in mdp.states])R, T, gamma = mdp.R, mdp.T, mdp.gammawhile True:U = U1.copy()delta = for s in mdp.states:U1[s] = R(s) + gamma * max([sum([p * U[s1] for (p, s1) in T(s, a)])for a in mdp.actions(s)])delta = max(delta, abs(U1[s] - U[s]))if delta < epsilon * ( - gamma) / gamma:return U", "docstring": "Solving an MDP by value iteration. [Fig. 17.4]", "id": "f1678:m0"} {"signature": "def policy_evaluation(pi, U, mdp, k=):", "body": "R, T, gamma = mdp.R, mdp.T, mdp.gammafor i in range(k):for s in mdp.states:U[s] = R(s) + gamma * sum([p * U[s1] for (p, s1) in T(s, pi[s])])return U", "docstring": "Return an updated utility mapping U from each state in the MDP to its\n utility, using an approximation (modified policy iteration).", "id": "f1678:m4"} {"signature": "def WeightedMajority(predictors, weights):", "body": "def predict(example):return weighted_mode((predictor(example) for predictor in predictors),weights)return predict", "docstring": "Return a predictor that takes a weighted vote.", "id": "f1680:m16"} {"signature": "def setproblem(self, target, inputs=None, exclude=()):", "body": "self.target = self.attrnum(target)exclude = list(map(self.attrnum, exclude))if inputs:self.inputs = removeall(self.target, inputs)else:self.inputs = [a for a in self.attrsif a != self.target and a not in exclude]if not self.values:self.values = list(map(unique, list(zip(*self.examples))))self.check_me()", "docstring": "Set (or change) the target and/or inputs.\n This way, one DataSet can be used multiple ways. inputs, if specified,\n is a list of attributes, or specify exclude as a list of attributes\n to not use in inputs. Attributes can be -n .. n, or an attrname.\n Also computes the list of possible values, if that wasn't done yet.", "id": "f1680:c0:m1"} {"signature": "def replicated_dataset(dataset, weights, n=None):", "body": "n = n or len(dataset.examples)result = copy.copy(dataset)result.examples = weighted_replicate(dataset.examples, weights, n)return result", "docstring": "Copy dataset, replicating each example in proportion to its weight.", "id": "f1680:m19"} {"signature": "def DecisionListLearner(dataset):", "body": "def decision_list_learning(examples):if not examples:return [(True, False)]t, o, examples_t = find_examples(examples)if not t:raise Failurereturn [(t, o)] + decision_list_learning(examples - examples_t)def find_examples(examples):\"\"\"\"\"\"unimplemented()def passes(example, test):\"\"unimplemented()def predict(example):\"\"for test, outcome in predict.decision_list:if passes(example, test):return outcomepredict.decision_list = decision_list_learning(set(dataset.examples))return predict", "docstring": "[Fig. 18.11]", "id": "f1680:m10"} {"signature": "def EnsembleLearner(learners):", "body": "def train(dataset):predictors = [learner(dataset) for learner in learners]def predict(example):return mode(predictor(example) for predictor in predictors)return predictreturn train", "docstring": "Given a list of learning algorithms, have them vote.", "id": "f1680:m14"} {"signature": "def weighted_mode(values, weights):", "body": "totals = defaultdict(int)for v, w in zip(values, weights):totals[v] += wreturn max(list(totals.keys()), key=totals.get)", "docstring": "Return the value with the greatest total weight.\n >>> weighted_mode('abbaa', [1,2,3,1,2])\n 'b", "id": "f1680:m17"} {"signature": "def WeightedLearner(unweighted_learner):", "body": "def train(dataset, weights):return unweighted_learner(replicated_dataset(dataset, weights))return train", "docstring": "Given a learner that takes just an unweighted dataset, return\n one that takes also a weight for each example. [p. 749 footnote 14]", "id": "f1680:m18"} {"signature": "def NaiveBayesLearner(dataset):", "body": "targetvals = dataset.values[dataset.target]target_dist = CountingProbDist(targetvals)attr_dists = dict(((gv, attr), CountingProbDist(dataset.values[attr]))for gv in targetvalsfor attr in dataset.inputs)for example in dataset.examples:targetval = example[dataset.target]target_dist.add(targetval)for attr in dataset.inputs:attr_dists[targetval, attr].add(example[attr])def predict(example):\"\"\"\"\"\"def class_probability(targetval):return (target_dist[targetval]* product(attr_dists[targetval, attr][example[attr]]for attr in dataset.inputs))return argmax(targetvals, class_probability)return predict", "docstring": "Just count how many times each value of each input attribute\n occurs, conditional on the target value. Count the different\n target values too.", "id": "f1680:m6"} {"signature": "def add(self, o):", "body": "self.smooth_for(o)self.dictionary[o] += self.n_obs += self.sampler = None", "docstring": "Add an observation o to the distribution.", "id": "f1680:c1:m1"} {"signature": "def AdaBoost(L, K):", "body": "def train(dataset):examples, target = dataset.examples, dataset.targetN = len(examples)epsilon = /(*N)w = [/N] * Nh, z = [], []for k in range(K):h_k = L(dataset, w)h.append(h_k)error = sum(weight for example, weight in zip(examples, w)if example[target] != h_k(example))error = clip(error, epsilon, -epsilon)for j, example in enumerate(examples):if example[target] == h_k(example):w[j] *= error / ( - error)w = normalize(w)z.append(math.log(( - error) / error))return WeightedMajority(h, z)return train", "docstring": "[Fig. 18.34]", "id": "f1680:m15"} {"signature": "def attrnum(self, attr):", "body": "if attr < :return len(self.attrs) + attrelif isinstance(attr, str):return self.attrnames.index(attr)else:return attr", "docstring": "Returns the number used for attr, which can be a name, or -n .. n-1.", "id": "f1680:c0:m5"} {"signature": "def add_example(self, example):", "body": "self.check_example(example)self.examples.append(example)", "docstring": "Add an example to the list of examples, checking it first.", "id": "f1680:c0:m3"} {"signature": "def parse_csv(input, delim=''):", "body": "lines = [line for line in input.splitlines() if line.strip()]return [list(map(num_or_str, line.split(delim))) for line in lines]", "docstring": "r\"\"\"Input is a string consisting of lines, each line has comma-delimited\n fields. Convert this into a list of lists. Blank lines are skipped.\n Fields that look like numbers are converted to numbers.\n The delim defaults to ',' but '\\t' and None are also reasonable values.\n >>> parse_csv('1, 2, 3 \\n 0, 2, na')\n [[1, 2, 3], [0, 2, 'na']]", "id": "f1680:m4"} {"signature": "def Xor(n):", "body": "return Parity(, n, name=\"\")", "docstring": "Return a DataSet with n examples of 2-input xor.", "id": "f1680:m32"} {"signature": "def NeuralNetLearner(dataset, sizes):", "body": "activations = [[ for i in range(n)] for n in sizes]weights = []def predict(example):unimplemented()return predict", "docstring": "Layered feed-forward network.", "id": "f1680:m11"} {"signature": "def weighted_replicate(seq, weights, n):", "body": "assert len(seq) == len(weights)weights = normalize(weights)wholes = [int(w*n) for w in weights]fractions = [(w*n) % for w in weights]return (flatten([x] * nx for x, nx in zip(seq, wholes))+ weighted_sample_with_replacement(seq, fractions, n - sum(wholes)))", "docstring": "Return n selections from seq, with the count of each element of\n seq proportional to the corresponding weight (filling in fractions\n randomly).\n >>> weighted_replicate('ABC', [1,2,1], 4)\n ['A', 'B', 'B', 'C']", "id": "f1680:m20"} {"signature": "def __init__(self, examples=None, attrs=None, attrnames=None, target=-,inputs=None, values=None, distance=mean_boolean_error,name='', source='', exclude=()):", "body": "update(self, name=name, source=source, values=values, distance=distance)if isinstance(examples, str):self.examples = parse_csv(examples)elif examples is None:self.examples = parse_csv(DataFile(name+'').read())else:self.examples = examplesif not attrs and self.examples:attrs = list(range(len(self.examples[])))self.attrs = attrsif isinstance(attrnames, str):self.attrnames = attrnames.split()else:self.attrnames = attrnames or attrsself.setproblem(target, inputs=inputs, exclude=exclude)", "docstring": "Accepts any of DataSet's fields. Examples can also be a\n string or file from which to parse examples using parse_csv.\n Optional parameter: exclude, as documented in .setproblem().\n >>> DataSet(examples='1, 2, 3')\n ", "id": "f1680:c0:m0"} {"signature": "def add(self, val, subtree):", "body": "self.branches[val] = subtree", "docstring": "Add a branch. If self.attr = val, go to the given subtree.", "id": "f1680:c2:m2"} {"signature": "def leave1out(learner, dataset):", "body": "return cross_validation(learner, dataset, k=len(dataset.examples))", "docstring": "Leave one out cross-validation over the dataset.", "id": "f1680:m25"} {"signature": "def min_conflicts(csp, max_steps=):", "body": "csp.current = current = {}for var in csp.vars:val = min_conflicts_value(csp, var, current)csp.assign(var, val, current)for i in range(max_steps):conflicted = csp.conflicted_vars(current)if not conflicted:return currentvar = random.choice(conflicted)val = min_conflicts_value(csp, var, current)csp.assign(var, val, current)return None", "docstring": "Solve a CSP by stochastic hillclimbing on the number of conflicts.", "id": "f1681:m11"} {"signature": "def conflicted_vars(self, current):", "body": "return [var for var in self.varsif self.nconflicts(var, current[var], current) > ]", "docstring": "Return a list of variables in current assignment that are in conflict", "id": "f1681:c0:m14"} {"signature": "def nconflicts(self, var, val, assignment):", "body": "n = len(self.vars)c = self.rows[val] + self.downs[var+val] + self.ups[var-val+n-]if assignment.get(var, None) == val:c -= return c", "docstring": "The number of conflicts, as recorded with each assignment.\n Count conflicts in row and in up, down diagonals. If there\n is a queen there, it can't conflict with itself, so subtract 3.", "id": "f1681:c2:m1"} {"signature": "def choices(self, var):", "body": "return (self.curr_domains or self.domains)[var]", "docstring": "Return all values for var that aren't currently ruled out.", "id": "f1681:c0:m11"} {"signature": "def MapColoringCSP(colors, neighbors):", "body": "if isinstance(neighbors, str):neighbors = parse_neighbors(neighbors)return CSP(list(neighbors.keys()), UniversalDict(colors), neighbors,different_values_constraint)", "docstring": "Make a CSP for the problem of coloring a map with different colors\n for any two adjacent regions. Arguments are a list of colors, and a\n dict of {region: [neighbor,...]} entries. This dict may also be\n specified as a string of the form defined by parse_neighbors.", "id": "f1681:m17"} {"signature": "def revise(csp, Xi, Xj, removals):", "body": "revised = Falsefor x in csp.curr_domains[Xi][:]:if every(lambda y: not csp.constraints(Xi, x, Xj, y),csp.curr_domains[Xj]):csp.prune(Xi, x, removals)revised = Truereturn revised", "docstring": "Return true if we remove a value.", "id": "f1681:m1"} {"signature": "def min_conflicts_value(csp, var, current):", "body": "return argmin_random_tie(csp.domains[var],lambda val: csp.nconflicts(var, val, current))", "docstring": "Return the value that will give var the least number of conflicts.\n If there is a tie, choose at random.", "id": "f1681:m12"} {"signature": "def tree_csp_solver(csp):", "body": "n = len(csp.vars)assignment = {}root = csp.vars[]X, parent = topological_sort(csp.vars, root)for Xj in reversed(X):if not make_arc_consistent(parent[Xj], Xj, csp):return Nonefor Xi in X:if not csp.curr_domains[Xi]:return Noneassignment[Xi] = csp.curr_domains[Xi][]return assignment", "docstring": "[Fig. 6.11]", "id": "f1681:m13"} {"signature": "def unordered_domain_values(var, assignment, csp):", "body": "return csp.choices(var)", "docstring": "The default value order.", "id": "f1681:m5"} {"signature": "def support_pruning(self):", "body": "if self.curr_domains is None:self.curr_domains = dict((v, list(self.domains[v]))for v in self.vars)", "docstring": "Make sure we can prune values from domains. (We want to pay\n for this only if we use it.)", "id": "f1681:c0:m8"} {"signature": "def mrv(assignment, csp):", "body": "return argmin_random_tie([v for v in csp.vars if v not in assignment],lambda var: num_legal_values(csp, var, assignment))", "docstring": "Minimum-remaining-values heuristic.", "id": "f1681:m3"} {"signature": "def result(self, state, xxx_todo_changeme):", "body": "(var, val) = xxx_todo_changemereturn state + ((var, val),)", "docstring": "Perform an action and return the new state.", "id": "f1681:c0:m6"} {"signature": "def forward_checking(csp, var, value, assignment, removals):", "body": "for B in csp.neighbors[var]:if B not in assignment:for b in csp.curr_domains[B][:]:if not csp.constraints(var, value, B, b):csp.prune(B, b, removals)if not csp.curr_domains[B]:return Falsereturn True", "docstring": "Prune neighbor values inconsistent with var=value.", "id": "f1681:m8"} {"signature": "def display(self, assignment):", "body": "print('', self, '', assignment)", "docstring": "Show a human-readable representation of the CSP.", "id": "f1681:c0:m4"} {"signature": "def Zebra():", "body": "Colors = ''.split()Pets = ''.split()Drinks = ''.split()Countries = ''.split()Smokes = ''.split()vars = Colors + Pets + Drinks + Countries + Smokesdomains = {}for var in vars:domains[var] = list(range(, ))domains[''] = []domains[''] = []neighbors = parse_neighbors(\"\"\"\"\"\", vars)for type in [Colors, Pets, Drinks, Countries, Smokes]:for A in type:for B in type:if A != B:if B not in neighbors[A]: neighbors[A].append(B)if A not in neighbors[B]: neighbors[B].append(A)def zebra_constraint(A, a, B, b, recurse=):same = (a == b)next_to = abs(a - b) == if A == '' and B == '': return sameif A == '' and B == '': return sameif A == '' and B == '': return next_toif A == '' and B == '': return next_toif A == '' and B == '': return sameif A == '' and B == '': return sameif A == '' and B == '': return sameif A == '' and B == '': return sameif A == '' and B == '': return sameif A == '' and B == '': return next_toif A == '' and B == '': return sameif A == '' and B == '': return (a - ) == bif recurse == : return zebra_constraint(B, b, A, a, )if ((A in Colors and B in Colors) or(A in Pets and B in Pets) or(A in Drinks and B in Drinks) or(A in Countries and B in Countries) or(A in Smokes and B in Smokes)): return not sameraise ''return CSP(vars, domains, neighbors, zebra_constraint)", "docstring": "Return an instance of the Zebra Puzzle.", "id": "f1681:m21"} {"signature": "def lcv(var, assignment, csp):", "body": "return sorted(csp.choices(var),key=lambda val: csp.nconflicts(var, val, assignment))", "docstring": "Least-constraining-values heuristic.", "id": "f1681:m6"} {"signature": "def nconflicts(self, var, val, assignment):", "body": "def conflict(var2):return (var2 in assignmentand not self.constraints(var, val, var2, assignment[var2]))return count_if(conflict, self.neighbors[var])", "docstring": "Return the number of conflicts var=val has with other variables.", "id": "f1681:c0:m3"} {"signature": "def AC3(csp, queue=None, removals=None):", "body": "if queue is None:queue = [(Xi, Xk) for Xi in csp.vars for Xk in csp.neighbors[Xi]]csp.support_pruning()while queue:(Xi, Xj) = queue.pop()if revise(csp, Xi, Xj, removals):if not csp.curr_domains[Xi]:return Falsefor Xk in csp.neighbors[Xi]:if Xk != Xi:queue.append((Xk, Xi))return True", "docstring": "[Fig. 6.3]", "id": "f1681:m0"} {"signature": "def parse_neighbors(neighbors, vars=[]):", "body": "dict = DefaultDict([])for var in vars:dict[var] = []specs = [spec.split('') for spec in neighbors.split('')]for (A, Aneighbors) in specs:A = A.strip()dict.setdefault(A, [])for B in Aneighbors.split():dict[A].append(B)dict[B].append(A)return dict", "docstring": "Convert a string of the form 'X: Y Z; Y: Z' into a dict mapping\n regions to neighbors. The syntax is a region name followed by a ':'\n followed by zero or more region names, followed by ';', repeated for\n each region name. If you say 'X: Y' you don't need 'Y: X'.\n >>> parse_neighbors('X: Y Z; Y: Z')\n {'Y': ['X', 'Z'], 'X': ['Y', 'Z'], 'Z': ['X', 'Y']}", "id": "f1681:m18"} {"signature": "def actions(self, state):", "body": "if len(state) == len(self.vars):return []else:assignment = dict(state)var = find_if(lambda v: v not in assignment, self.vars)return [(var, val) for val in self.domains[var]if self.nconflicts(var, val, assignment) == ]", "docstring": "Return a list of applicable actions: nonconflicting\n assignments to an unassigned variable.", "id": "f1681:c0:m5"} {"signature": "def first_unassigned_variable(assignment, csp):", "body": "return find_if(lambda var: var not in assignment, csp.vars)", "docstring": "The default variable order.", "id": "f1681:m2"} {"signature": "def assign(self, var, val, assignment):", "body": "oldval = assignment.get(var, None)if val != oldval:if oldval is not None: self.record_conflict(assignment, var, oldval, -)self.record_conflict(assignment, var, val, +)CSP.assign(self, var, val, assignment)", "docstring": "Assign var, and keep track of conflicts.", "id": "f1681:c2:m2"} {"signature": "def display(self, assignment):", "body": "n = len(self.vars)for val in range(n):for var in range(n):if assignment.get(var,'') == val: ch = ''elif (var+val) % == : ch = ''else: ch = ''print(ch, end='')print('', end='')for var in range(n):if assignment.get(var,'') == val: ch = ''else: ch = ''print(str(self.nconflicts(var, val, assignment))+ch, end='')print()", "docstring": "Print the queens and the nconflicts values (for debugging).", "id": "f1681:c2:m5"} {"signature": "def __init__(self, vars, domains, neighbors, constraints):", "body": "vars = vars or list(domains.keys())update(self, vars=vars, domains=domains,neighbors=neighbors, constraints=constraints,initial=(), curr_domains=None, nassigns=)", "docstring": "Construct a CSP problem. If vars is empty, it becomes domains.keys().", "id": "f1681:c0:m0"} {"signature": "def pl_fc_entails(KB, q):", "body": "count = dict([(c, len(conjuncts(c.args[]))) for c in KB.clausesif c.op == ''])inferred = DefaultDict(False)agenda = [s for s in KB.clauses if is_prop_symbol(s.op)]while agenda:p = agenda.pop()if p == q: return Trueif not inferred[p]:inferred[p] = Truefor c in KB.clauses_with_premise(p):count[c] -= if count[c] == :agenda.append(c.args[])return False", "docstring": "Use forward chaining to see if a PropDefiniteKB entails symbol q.\n [Fig. 7.15]\n >>> pl_fc_entails(Fig[7,15], expr('Q'))\n True", "id": "f1683:m23"} {"signature": "def expr(s):", "body": "if isinstance(s, Expr): return sif isnumber(s): return Expr(s)s = s.replace('', '').replace('', '')s = s.replace('', '').replace('', '')s = re.sub(r'', r'', s)return eval(s, {'':Expr})", "docstring": "Create an Expr representing a logic expression by parsing the input\n string. Symbols and numbers are automatically converted to Exprs.\n In addition you can use alternative spellings of these operators:\n 'x ==> y' parses as (x >> y) # Implication\n 'x <== y' parses as (x << y) # Reverse implication\n 'x <=> y' parses as (x % y) # Logical equivalence\n 'x =/= y' parses as (x ^ y) # Logical disequality (xor)\n But BE CAREFUL; precedence of implication is wrong. expr('P & Q ==> R & S')\n is ((P & (Q >> R)) & S); so you must use expr('(P & Q) ==> (R & S)').\n >>> expr('P <=> Q(1)')\n (P <=> Q(1))\n >>> expr('P & Q | ~R(x, F(x))')\n ((P & Q) | ~R(x, F(x)))", "id": "f1683:m1"} {"signature": "def ask_generator(self, query):", "body": "if pl_fc_entails(self.clauses, query):yield {}", "docstring": "Yield the empty substitution if KB implies query; else nothing.", "id": "f1683:c3:m1"} {"signature": "def variables(s):", "body": "result = set([])def walk(s):if is_variable(s):result.add(s)else:for arg in s.args:walk(arg)walk(s)return result", "docstring": "Return a set of the variables in expression s.\n >>> ppset(variables(F(x, A, y)))\n set([x, y])\n >>> ppset(variables(F(G(x), z)))\n set([x, z])\n >>> ppset(variables(expr('F(x, x) & G(x, y) & H(y, z) & R(A, z, z)')))\n set([x, y, z])", "id": "f1683:m5"} {"signature": "def move_not_inwards(s):", "body": "if s.op == '':NOT = lambda b: move_not_inwards(~b)a = s.args[]if a.op == '': return move_not_inwards(a.args[]) if a.op =='': return associate('', list(map(NOT, a.args)))if a.op =='': return associate('', list(map(NOT, a.args)))return selif is_symbol(s.op) or not s.args:return selse:return Expr(s.op, *list(map(move_not_inwards, s.args)))", "docstring": "Rewrite sentence s by moving negation sign inward.\n >>> move_not_inwards(~(A | B))\n (~A & ~B)\n >>> move_not_inwards(~(A & B))\n (~A | ~B)\n >>> move_not_inwards(~(~(A | ~B) | ~~C))\n ((A | ~B) & ~C)", "id": "f1683:m15"} {"signature": "def subst(s, x):", "body": "if isinstance(x, list):return [subst(s, xi) for xi in x]elif isinstance(x, tuple):return tuple([subst(s, xi) for xi in x])elif not isinstance(x, Expr):return xelif is_var_symbol(x.op):return s.get(x, x)else:return Expr(x.op, *[subst(s, arg) for arg in x.args])", "docstring": "Substitute the substitution s into the expression x.\n >>> subst({x: 42, y:0}, F(x) + y)\n (F(42) + 0)", "id": "f1683:m40"} {"signature": "def is_definite_clause(s):", "body": "if is_symbol(s.op):return Trueelif s.op == '':antecedent, consequent = s.argsreturn (is_symbol(consequent.op)and every(lambda arg: is_symbol(arg.op), conjuncts(antecedent)))else:return False", "docstring": "returns True for exprs s of the form A & B & ... & C ==> D,\n where all literals are positive. In clause form, this is\n ~A | ~B | ... | ~C | D, where exactly one clause is positive.\n >>> is_definite_clause(expr('Farmer(Mac)'))\n True\n >>> is_definite_clause(expr('~Farmer(Mac)'))\n False\n >>> is_definite_clause(expr('(Farmer(f) & Rabbit(r)) ==> Hates(f, r)'))\n True\n >>> is_definite_clause(expr('(Farmer(f) & ~Rabbit(r)) ==> Hates(f, r)'))\n False\n >>> is_definite_clause(expr('(Farmer(f) | Rabbit(r)) ==> Hates(f, r)'))\n False", "id": "f1683:m6"} {"signature": "def ask(self, query):", "body": "for result in self.ask_generator(query):return resultreturn False", "docstring": "Return a substitution that makes the query true, or,\n failing that, return False.", "id": "f1683:c0:m2"} {"signature": "def tell(self, sentence):", "body": "abstract", "docstring": "Add the sentence to the KB.", "id": "f1683:c0:m1"} {"signature": "def standardize_variables(sentence, dic=None):", "body": "if dic is None: dic = {}if not isinstance(sentence, Expr):return sentenceelif is_var_symbol(sentence.op):if sentence in dic:return dic[sentence]else:v = Expr('' % next(standardize_variables.counter))dic[sentence] = vreturn velse:return Expr(sentence.op,*[standardize_variables(a, dic) for a in sentence.args])", "docstring": "Replace all the variables in sentence with new variables.\n >>> e = expr('F(a, b, c) & G(c, A, 23)')\n >>> len(variables(standardize_variables(e)))\n 3\n >>> variables(e).intersection(variables(standardize_variables(e)))\n set([])\n >>> is_variable(standardize_variables(expr('x')))\n True", "id": "f1683:m42"} {"signature": "def diff(y, x):", "body": "if y == x: return ONEelif not y.args: return ZEROelse:u, op, v = y.args[], y.op, y.args[-]if op == '': return diff(u, x) + diff(v, x)elif op == '' and len(args) == : return -diff(u, x)elif op == '': return diff(u, x) - diff(v, x)elif op == '': return u * diff(v, x) + v * diff(u, x)elif op == '': return (v*diff(u, x) - u*diff(v, x)) / (v * v)elif op == '' and isnumber(x.op):return (v * u ** (v - ) * diff(u, x))elif op == '': return (v * u ** (v - ) * diff(u, x)+ u ** v * Expr('')(u) * diff(v, x))elif op == '': return diff(u, x) / uelse: raise ValueError(\"\" % (op, y, x))", "docstring": "Return the symbolic derivative, dy/dx, as an Expr.\n However, you probably want to simplify the results with simp.\n >>> diff(x * x, x)\n ((x * 1) + (x * 1))\n >>> simp(diff(x * x, x))\n (2 * x)", "id": "f1683:m47"} {"signature": "def is_var_symbol(s):", "body": "return is_symbol(s) and s[].islower()", "docstring": "A logic variable symbol is an initial-lowercase string.", "id": "f1683:m3"} {"signature": "def to_cnf(s):", "body": "if isinstance(s, str): s = expr(s)s = eliminate_implications(s) s = move_not_inwards(s) return distribute_and_over_or(s)", "docstring": "Convert a propositional logical sentence s to conjunctive normal form.\n That is, to the form ((A | ~B | ...) & (B | C | ...) & ...) [p. 253]\n >>> to_cnf(\"~(B|C)\")\n (~B & ~C)\n >>> to_cnf(\"B <=> (P1|P2)\")\n ((~P1 | B) & (~P2 | B) & (P1 | P2 | ~B))\n >>> to_cnf(\"a | (b & c) | d\")\n ((b | a | d) & (c | a | d))\n >>> to_cnf(\"A & (B | (D & E))\")\n (A & (D | B) & (E | B))\n >>> to_cnf(\"A | (B | (C | (D & E)))\")\n ((D | A | B | C) & (E | A | B | C))", "id": "f1683:m13"} {"signature": "def dpll(clauses, symbols, model):", "body": "unknown_clauses = [] for c in clauses:val = pl_true(c, model)if val == False:return Falseif val != True:unknown_clauses.append(c)if not unknown_clauses:return modelP, value = find_pure_symbol(symbols, unknown_clauses)if P:return dpll(clauses, removeall(P, symbols), extend(model, P, value))P, value = find_unit_clause(clauses, model)if P:return dpll(clauses, removeall(P, symbols), extend(model, P, value))P, symbols = symbols[], symbols[:]return (dpll(clauses, symbols, extend(model, P, True)) ordpll(clauses, symbols, extend(model, P, False)))", "docstring": "See if the clauses are true in a partial model.", "id": "f1683:m25"} {"signature": "def __hash__(self):", "body": "return hash(self.op) ^ hash(tuple(self.args))", "docstring": "Need a hash method so Exprs can live in dicts.", "id": "f1683:c2:m5"} {"signature": "def tell(self, sentence):", "body": "assert is_definite_clause(sentence), \"\"self.clauses.append(sentence)", "docstring": "Add a definite clause to this KB.", "id": "f1683:c3:m0"} {"signature": "def eliminate_implications(s):", "body": "if not s.args or is_symbol(s.op): return s args = list(map(eliminate_implications, s.args))a, b = args[], args[-]if s.op == '':return (b | ~a)elif s.op == '':return (a | ~b)elif s.op == '':return (a | ~b) & (b | ~a)elif s.op == '':assert len(args) == return (a & ~b) | (~a & b)else:assert s.op in ('', '', '')return Expr(s.op, *args)", "docstring": "Change >>, <<, and <=> into &, |, and ~. That is, return an Expr\n that is equivalent to s, but has only &, |, and ~ as logical operators.\n >>> eliminate_implications(A >> (~B << C))\n ((~B | ~C) | ~A)\n >>> eliminate_implications(A ^ B)\n ((A & ~B) | (~A & B))", "id": "f1683:m14"} {"signature": "def pl_true(exp, model={}):", "body": "op, args = exp.op, exp.argsif exp == TRUE:return Trueelif exp == FALSE:return Falseelif is_prop_symbol(op):return model.get(exp)elif op == '':p = pl_true(args[], model)if p is None: return Noneelse: return not pelif op == '':result = Falsefor arg in args:p = pl_true(arg, model)if p is True: return Trueif p is None: result = Nonereturn resultelif op == '':result = Truefor arg in args:p = pl_true(arg, model)if p is False: return Falseif p is None: result = Nonereturn resultp, q = argsif op == '':return pl_true(~p | q, model)elif op == '':return pl_true(p | ~q, model)pt = pl_true(p, model)if pt is None: return Noneqt = pl_true(q, model)if qt is None: return Noneif op == '':return pt == qtelif op == '':return pt != qtelse:raise ValueError(\"\" + str(exp))", "docstring": "Return True if the propositional logic expression is true in the model,\n and False if it is false. If the model does not specify the value for\n every proposition, this may return None to indicate 'not obvious';\n this may happen even when the expression is tautological.", "id": "f1683:m12"} {"signature": "def tt_check_all(kb, alpha, symbols, model):", "body": "if not symbols:if pl_true(kb, model):result = pl_true(alpha, model)assert result in (True, False)return resultelse:return Trueelse:P, rest = symbols[], symbols[:]return (tt_check_all(kb, alpha, rest, extend(model, P, True)) andtt_check_all(kb, alpha, rest, extend(model, P, False)))", "docstring": "Auxiliary routine to implement tt_entails.", "id": "f1683:m9"} {"signature": "def KB_AgentProgram(KB):", "body": "steps = itertools.count()def program(percept):t = next(steps)KB.tell(make_percept_sentence(percept, t))action = KB.ask(make_action_query(t))KB.tell(make_action_sentence(action, t))return actiondef make_percept_sentence(self, percept, t):return Expr(\"\")(percept, t)def make_action_query(self, t):return expr(\"\" % t)def make_action_sentence(self, action, t):return Expr(\"\")(action[expr('')], t)return program", "docstring": "A generic logical knowledge-based agent program. [Fig. 7.1]", "id": "f1683:m0"} {"signature": "def ask_generator(self, query):", "body": "abstract", "docstring": "Yield all the substitutions that make query true.", "id": "f1683:c0:m3"} {"signature": "def parse_definite_clause(s):", "body": "assert is_definite_clause(s)if is_symbol(s.op):return [], selse:antecedent, consequent = s.argsreturn conjuncts(antecedent), consequent", "docstring": "Return the antecedents and the consequent of a definite clause.", "id": "f1683:m7"} {"signature": "def disjuncts(s):", "body": "return dissociate('', [s])", "docstring": "Return a list of the disjuncts in the sentence s.\n >>> disjuncts(A | B)\n [A, B]\n >>> disjuncts(A & B)\n [(A & B)]", "id": "f1683:m20"} {"signature": "def ppsubst(s):", "body": "ppdict(s)", "docstring": "Pretty-print substitution s", "id": "f1683:m54"} {"signature": "def unit_clause_assign(clause, model):", "body": "P, value = None, Nonefor literal in disjuncts(clause):sym, positive = inspect_literal(literal)if sym in model:if model[sym] == positive:return None, None elif P:return None, None else:P, value = sym, positivereturn P, value", "docstring": "Return a single variable/value pair that makes clause true in\n the model, if possible.\n >>> unit_clause_assign(A|B|C, {A:True})\n (None, None)\n >>> unit_clause_assign(B|~C, {A:True})\n (None, None)\n >>> unit_clause_assign(~A|~B, {A:True})\n (B, False)", "id": "f1683:m28"} {"signature": "def unify(x, y, s):", "body": "if s is None:return Noneelif x == y:return selif is_variable(x):return unify_var(x, y, s)elif is_variable(y):return unify_var(y, x, s)elif isinstance(x, Expr) and isinstance(y, Expr):return unify(x.args, y.args, unify(x.op, y.op, s))elif isinstance(x, str) or isinstance(y, str):return Noneelif issequence(x) and issequence(y) and len(x) == len(y):if not x: return sreturn unify(x[:], y[:], unify(x[], y[], s))else:return None", "docstring": "Unify expressions x,y with substitution s; return a substitution that\n would make x,y equal, or None if x,y can not unify. x and y can be\n variables (e.g. Expr('x')), constants, lists, or Exprs. [Fig. 9.1]\n >>> ppsubst(unify(x + y, y + C, {}))\n {x: y, y: C}", "id": "f1683:m35"} {"signature": "def __init__(self, name, rules, lexicon):", "body": "update(self, name=name, rules=rules, lexicon=lexicon)self.categories = DefaultDict([])for lhs in lexicon:for word in lexicon[lhs]:self.categories[word].append(lhs)", "docstring": "A grammar has a set of rules and a lexicon.", "id": "f1684:c0:m0"} {"signature": "def extender(self, edge):", "body": "(j, k, B, _, _) = edgefor (i, j, A, alpha, B1b) in self.chart[j]:if B1b and B == B1b[]:self.add_edge([i, k, A, alpha + [edge], B1b[:]])", "docstring": "See what edges can be extended by this edge.", "id": "f1684:c1:m6"} {"signature": "def isa(self, word, cat):", "body": "return cat in self.categories[word]", "docstring": "Return True iff word is of category cat", "id": "f1684:c0:m2"} {"signature": "def generate_random(grammar=E_, s=''):", "body": "import randomdef rewrite(tokens, into):for token in tokens:if token in grammar.rules:rewrite(random.choice(grammar.rules[token]), into)elif token in grammar.lexicon:into.append(random.choice(grammar.lexicon[token]))else:into.append(token)return intoreturn ''.join(rewrite(s.split(), []))", "docstring": "Replace each token in s by a random entry in grammar (recursively).\n This is useful for testing a grammar, e.g. generate_random(E_)", "id": "f1684:m2"} {"signature": "def rewrites_for(self, cat):", "body": "return self.rules.get(cat, ())", "docstring": "Return a sequence of possible rhs's that cat can be rewritten as.", "id": "f1684:c0:m1"} {"signature": "def Lexicon(**rules):", "body": "for (lhs, rhs) in list(rules.items()):rules[lhs] = [word.strip() for word in rhs.split('')]return rules", "docstring": "Create a dictionary mapping symbols to alternative words.\n >>> Lexicon(Art = \"the | a | an\")\n {'Art': ['the', 'a', 'an']}", "id": "f1684:m1"} {"signature": "def __init__(self, grammar, trace=False):", "body": "update(self, grammar=grammar, trace=trace)", "docstring": "A datastructure for parsing a string; and methods to do the parse.\n self.chart[i] holds the edges that end just before the i'th word.\n Edges are 5-element lists of [start, end, lhs, [found], [expects]].", "id": "f1684:c1:m0"} {"signature": "def Rules(**rules):", "body": "for (lhs, rhs) in list(rules.items()):rules[lhs] = [alt.strip().split() for alt in rhs.split('')]return rules", "docstring": "Create a dictionary mapping symbols to alternative sequences.\n >>> Rules(A = \"B C | D E\")\n {'A': [['B', 'C'], ['D', 'E']]}", "id": "f1684:m0"} {"signature": "def actions(self, state):", "body": "abstract", "docstring": "Return a list of the allowable moves at this point.", "id": "f1685:c0:m0"} {"signature": "def alphabeta_full_search(state, game):", "body": "player = game.to_move(state)def max_value(state, alpha, beta):if game.terminal_test(state):return game.utility(state, player)v = -infinityfor a in game.actions(state):v = max(v, min_value(game.result(state, a), alpha, beta))if v >= beta:return valpha = max(alpha, v)return vdef min_value(state, alpha, beta):if game.terminal_test(state):return game.utility(state, player)v = infinityfor a in game.actions(state):v = min(v, max_value(game.result(state, a), alpha, beta))if v <= alpha:return vbeta = min(beta, v)return vreturn argmax(game.actions(state),lambda a: min_value(game.result(state, a),-infinity, infinity))", "docstring": "Search game to determine best action; use alpha-beta pruning.\n As in [Fig. 5.7], this version searches all the way to the leaves.", "id": "f1685:m1"} {"signature": "def result(self, state, move):", "body": "abstract", "docstring": "Return the state that results from making a move from a state.", "id": "f1685:c0:m1"} {"signature": "def random_player(game, state):", "body": "return random.choice(game.actions(state))", "docstring": "A player that chooses a legal move at random.", "id": "f1685:m4"} {"signature": "def minimax_decision(state, game):", "body": "player = game.to_move(state)def max_value(state):if game.terminal_test(state):return game.utility(state, player)v = -infinityfor a in game.actions(state):v = max(v, min_value(game.result(state, a)))return vdef min_value(state):if game.terminal_test(state):return game.utility(state, player)v = infinityfor a in game.actions(state):v = min(v, max_value(game.result(state, a)))return vreturn argmax(game.actions(state),lambda a: min_value(game.result(state, a)))", "docstring": "Given a state in a game, calculate the best move by searching\n forward all the way to the terminal states. [Fig. 5.3]", "id": "f1685:m0"} {"signature": "def k_in_row(self, board, move, player, xxx_todo_changeme):", "body": "(delta_x, delta_y) = xxx_todo_changemex, y = moven = while board.get((x, y)) == player:n += x, y = x + delta_x, y + delta_yx, y = movewhile board.get((x, y)) == player:n += x, y = x - delta_x, y - delta_yn -= return n >= self.k", "docstring": "Return true if there is a line through move on board for player.", "id": "f1685:c2:m7"} {"signature": "def to_move(self, state):", "body": "return state.to_move", "docstring": "Return the player whose move it is in this state.", "id": "f1685:c0:m4"} {"signature": "def play_game(game, *players):", "body": "state = game.initialwhile True:for player in players:move = player(game, state)state = game.result(state, move)if game.terminal_test(state):return game.utility(state, game.to_move(game.initial))", "docstring": "Play an n-person, move-alternating game.\n >>> play_game(Fig52Game(), alphabeta_player, alphabeta_player)\n 3", "id": "f1685:m6"} {"signature": "def markov_blanket_sample(X, e, bn):", "body": "Xnode = bn.variable_node(X)Q = ProbDist(X)for xi in bn.variable_values(X):ei = extend(e, X, xi)Q[xi] = Xnode.p(xi, e) * product(Yj.p(ei[Yj.variable], ei)for Yj in Xnode.children)return probability(Q.normalize()[True])", "docstring": "Return a sample from P(X | mb) where mb denotes that the\n variables in the Markov blanket of X take their values from event\n e (which must assign a value to each). The Markov blanket of X is\n X's parents, children, and children's parents.", "id": "f1686:m18"} {"signature": "def pointwise_product(self, other, bn):", "body": "vars = list(set(self.vars) | set(other.vars))cpt = dict((event_values(e, vars), self.p(e) * other.p(e))for e in all_events(vars, bn, {}))return Factor(vars, cpt)", "docstring": "Multiply two factors, combining their variables.", "id": "f1686:c4:m1"} {"signature": "def all_events(vars, bn, e):", "body": "if not vars:yield eelse:X, rest = vars[], vars[:]for e1 in all_events(rest, bn, e):for x in bn.variable_values(X):yield extend(e1, X, x)", "docstring": "Yield every way of extending e with values for all vars.", "id": "f1686:m11"} {"signature": "def __init__(self, node_specs=[]):", "body": "update(self, nodes=[], vars=[])for node_spec in node_specs:self.add(node_spec)", "docstring": "nodes must be ordered with parents before children.", "id": "f1686:c2:m0"} {"signature": "def add(self, node_spec):", "body": "node = BayesNode(*node_spec)assert node.variable not in self.varsassert every(lambda parent: parent in self.vars, node.parents)self.nodes.append(node)self.vars.append(node.variable)for parent in node.parents:self.variable_node(parent).children.append(node)", "docstring": "Add a node to the net. Its parents must already be in the\n net, and its variable must not.", "id": "f1686:c2:m1"} {"signature": "def __setitem__(self, val, p):", "body": "if val not in self.values:self.values.append(val)self.prob[val] = p", "docstring": "Set P(val) = p.", "id": "f1686:c0:m2"} {"signature": "def prior_sample(bn):", "body": "event = {}for node in bn.nodes:event[node.variable] = node.sample(event)return event", "docstring": "Randomly sample from bn's full joint distribution. The result\n is a {variable: value} dict. [Fig. 14.13]", "id": "f1686:m12"} {"signature": "def elimination_ask(X, e, bn):", "body": "assert X not in e, \"\"factors = []for var in reversed(bn.vars):factors.append(make_factor(var, e, bn))if is_hidden(var, X, e):factors = sum_out(var, factors, bn)return pointwise_product(factors, bn).normalize()", "docstring": "Compute bn's P(X|e) by variable elimination. [Fig. 14.11]\n >>> elimination_ask('Burglary', dict(JohnCalls=T, MaryCalls=T), burglary\n ... ).show_approx()\n 'False: 0.716, True: 0.284", "id": "f1686:m6"} {"signature": "def p(self, value, event):", "body": "assert isinstance(value, bool)ptrue = self.cpt[event_values(event, self.parents)]return if_(value, ptrue, - ptrue)", "docstring": "Return the conditional probability\n P(X=value | parents=parent_values), where parent_values\n are the values of parents in event. (event must assign each\n parent a value.)\n >>> bn = BayesNode('X', 'Burglary', {T: 0.2, F: 0.625})\n >>> bn.p(False, {'Burglary': False, 'Earthquake': True})\n 0.375", "id": "f1686:c3:m1"} {"signature": "def normalize(self):", "body": "total = float(sum(self.prob.values()))if not (-epsilon < total < +epsilon):for val in self.prob:self.prob[val] /= totalreturn self", "docstring": "Make sure the probabilities of all values sum to 1.\n Returns the normalized distribution.\n Raises a ZeroDivisionError if the sum of the values is 0.\n >>> P = ProbDist('Flip'); P['H'], P['T'] = 35, 65\n >>> P = P.normalize()\n >>> print '%5.3f %5.3f' % (P.prob['H'], P.prob['T'])\n 0.350 0.650", "id": "f1686:c0:m3"} {"signature": "def enumerate_joint_ask(X, e, P):", "body": "assert X not in e, \"\"Q = ProbDist(X) Y = [v for v in P.variables if v != X and v not in e] for xi in P.values(X):Q[xi] = enumerate_joint(Y, extend(e, X, xi), P)return Q.normalize()", "docstring": "Return a probability distribution over the values of the variable X,\n given the {var:val} observations e, in the JointProbDist P. [Section 13.3]\n >>> P = JointProbDist(['X', 'Y'])\n >>> P[0,0] = 0.25; P[0,1] = 0.5; P[1,1] = P[2,1] = 0.125\n >>> enumerate_joint_ask('X', dict(Y=1), P).show_approx()\n '0: 0.667, 1: 0.167, 2: 0.167'", "id": "f1686:m2"} {"signature": "def fixed_lag_smoothing(e_t, hmm, d):", "body": "unimplemented()", "docstring": "[Fig. 15.6]", "id": "f1686:m20"} {"signature": "def values(self, var):", "body": "return self.vals[var]", "docstring": "Return the set of possible values for a variable.", "id": "f1686:c1:m3"} {"signature": "def enumerate_joint(vars, e, P):", "body": "if not vars:return P[e]Y, rest = vars[], vars[:]return sum([enumerate_joint(rest, extend(e, Y, y), P)for y in P.values(Y)])", "docstring": "Return the sum of those entries in P consistent with e,\n provided vars is P's remaining variables (the ones not in e).", "id": "f1686:m3"} {"signature": "def likelihood_weighting(X, e, bn, N):", "body": "W = dict((x, ) for x in bn.variable_values(X))for j in range(N):sample, weight = weighted_sample(bn, e) W[sample[X]] += weightreturn ProbDist(X, W)", "docstring": "Estimate the probability distribution of variable X given\n evidence e in BayesNet bn. [Fig. 14.15]\n >>> seed(1017)\n >>> likelihood_weighting('Burglary', dict(JohnCalls=T, MaryCalls=T),\n ... burglary, 10000).show_approx()\n 'False: 0.702, True: 0.298'", "id": "f1686:m15"} {"signature": "def assert_amnesty(self, input_code, errors, expected):", "body": "input_code = textwrap.dedent(input_code)expected = textwrap.dedent(expected)errors_by_line = defaultdict(list)for error in errors:errors_by_line[error.linenum].append(error)output_lines = itertools.chain.from_iterable(fix_pylint(line, errors_by_line[lineno])for lineno, linein enumerate(StringIO(input_code), start=))self.assertEqual(expected.split(u''), \"\".join(output_lines).split(u''))", "docstring": "Assert that fix_pylint produces ``expected`` when fed ``input_code`` and the\nlist of errors ``errors``.\n\nArguments:\n input_code: A string of python code. Will be textwrap.dedented.\n errors: A list of PylintErrors\n expected: A string of python code. Will be textwrap.dedented.", "id": "f1688:c0:m0"} {"signature": "def find_line_markers(source):", "body": "markers = {}for lineno, line in enumerate(source.splitlines(), start=):m = re.search(r\"\", line)if m:markers[lineno] = m.group()return markers", "docstring": "Find line markers in program source.\n\n Returns a dict mapping line numbers to the marker on that line.", "id": "f1700:m0"} {"signature": "def assert_not_file(self, filename):", "body": "self.assertFalse(os.path.isfile(filename))", "docstring": "Assert that a file doesn't exist.", "id": "f1702:c0:m3"} {"signature": "def merge_configs(main, tweaks):", "body": "for section in tweaks.sections():for option in tweaks.options(section):value = tweaks.get(section, option)if option.endswith(\"\"):option = option[:-]value = main.get(section, option) + valuemain.set(section, option, value)", "docstring": "Merge tweaks into a main config file.", "id": "f1703:m0"} {"signature": "def list_main(argv_unused): ", "body": "print(\"\")for filename in pkg_resources.resource_listdir(\"\", \"\"):print(filename)return ", "docstring": "list\n List the FILENAMEs that edx_lint can provide.", "id": "f1704:m0"} {"signature": "@click.command()@click.option('', default=sys.stdin, type=click.File(),help=\"\")@click_log.simple_verbosity_option(default=u'')def pylint_amnesty(pylint_output):", "body": "errors = defaultdict(lambda: defaultdict(set))for pylint_error in parse_pylint_output(pylint_output):errors[pylint_error.filename][pylint_error.linenum].add(pylint_error)for file_with_errors in sorted(errors):try:opened_file = open(file_with_errors)except IOError:LOG.warning(u\"\", file_with_errors, exc_info=True)else:with opened_file as input_file:output_lines = []for line_num, line in enumerate(input_file, start=):output_lines.extend(fix_pylint(line,errors[file_with_errors][line_num]))with open(file_with_errors, '') as output_file:output_file.writelines(output_lines)", "docstring": "Add ``# pylint: disable`` clauses to add exceptions to all existing pylint errors in a codebase.", "id": "f1705:m3"} {"signature": "def format_pylint_disables(error_names, tag=True):", "body": "tag_str = \"\" if tag else \"\"if error_names:return u\"\".format(disabled=\"\".join(sorted(error_names)),tag=tag_str,)else:return \"\"", "docstring": "Format a list of error_names into a 'pylint: disable=' line.", "id": "f1705:m1"} {"signature": "def visit_module(self, node):", "body": "with open(FILENAME, \"\") as f:f.write(node.file)f.write(\"\")", "docstring": "Called for each module being examined.", "id": "f1710:c0:m0"} {"signature": "def register_checkers(linter):", "body": "if FILENAME:linter.register_checker(ModuleTracingChecker(linter))", "docstring": "Register checkers.", "id": "f1710:m0"} {"signature": "def register_checkers(linter):", "body": "linter.register_checker(RangeChecker(linter))", "docstring": "Register checkers.", "id": "f1714:m0"} {"signature": "@utils.check_messages(MESSAGE_ID)def visit_call(self, node):", "body": "if not isinstance(node.func, astroid.Name):returnif node.func.name not in self.RANGE_FUNCTIONS:returnfirst = node.args[]if not isinstance(first, astroid.Const):returnif not isinstance(first.value, int):returnthree1 = Falseif len(node.args) == :third = node.args[]if isinstance(third, astroid.Const):if isinstance(third.value, int) and third.value == :three1 = Trueif first.value == :if len(node.args) == :self.add_message(self.MESSAGE_ID, args=(node.func.name, \"\"), node=node)elif three1:self.add_message(self.MESSAGE_ID, args=(node.func.name, \"\"), node=node)elif three1:self.add_message(self.MESSAGE_ID, args=(node.func.name, \"\"), node=node)", "docstring": "Called for every function call in the source code.", "id": "f1714:c0:m0"} {"signature": "def check_visitors(cls):", "body": "for name in dir(cls):if name.startswith(\"\"):if name[:] not in CLASS_NAMES:raise Exception(u\"\".format(name))return cls", "docstring": "Check that a checker's visitors are correctly named.\n\n A checker has methods named visit_NODETYPE, but it's easy to mis-name\n a visit method, and it will never be called. This decorator checks\n the class to see that all of its visitors are named after an existing\n node class.", "id": "f1716:m0"} {"signature": "def register_checkers(linter):", "body": "linter.register_checker(UnicodeFormatStringChecker(linter))", "docstring": "Register checkers.", "id": "f1718:m0"} {"signature": "def process_module(self, node):", "body": "self._unicode_literals = \"\" in node.future_imports", "docstring": "Called for each module being examined.", "id": "f1718:c0:m1"} {"signature": "def register(linter):", "body": "for mod in MODS:mod.register_checkers(linter)", "docstring": "Registering additional checkers.\n However, we will also use it to amend existing checker config.", "id": "f1719:m0"} {"signature": "def validate(self):", "body": "with open(self.filename, \"\") as f:text = f.read()start_last_line = text.rfind(b\"\", , -)if start_last_line == -:return Falseoriginal_text = text[:start_last_line+]last_line = text[start_last_line+:]expected_hash = hashlib.sha1(original_text).hexdigest().encode('')match = re.search(b\"\", last_line)if not match:return Falseactual_hash = match.group()return actual_hash == expected_hash", "docstring": "Check if the file still has its original contents.\n\nReturns True if the file is unchanged, False if it has been tampered\nwith.", "id": "f1722:c0:m2"} {"signature": "def guess_array_memory_usage( bam_readers, dtype, use_strand=False ):", "body": "ARRAY_COUNT = if not isinstance( bam_readers, list ):bam_readers = [ bam_readers ]if isinstance( dtype, basestring ):dtype = NUMPY_DTYPES.get( dtype, None )use_strand = use_strand + dtypes = guess_numpy_dtypes_from_idxstats( bam_readers, default=None, force_dtype=False )if not [ dt for dt in dtypes if dt is not None ]:dtypes = guess_numpy_dtypes_from_idxstats( bam_readers, default=dtype or numpy.uint64, force_dtype=True )elif dtype:dtypes = [ dtype if dt else None for dt in dtypes ]read_groups = []no_read_group = Falsefor bam in bam_readers:rgs = bam.get_read_groups()if rgs:for rg in rgs:if rg not in read_groups:read_groups.append( rg )else:no_read_group = Trueread_groups = len( read_groups ) + no_read_groupmax_ref_size = array_byte_overhead = sys.getsizeof( numpy.zeros( ( ), dtype=numpy.uint64 ) )array_count = ARRAY_COUNT * use_strand * read_groupsfor bam in bam_readers:for i, ( name, length ) in enumerate( bam.get_references() ):if dtypes[i] is not None:max_ref_size = max( max_ref_size, ( length + length * dtypes[i]().nbytes * array_count + ( array_byte_overhead * ( array_count + ) ) ) )return max_ref_size", "docstring": "Returns an estimate for the maximum amount of memory to be consumed by numpy arrays.", "id": "f1726:m4"} {"signature": "def _extractall(self, path=\"\", members=None):", "body": "import copyimport operatorfrom tarfile import ExtractErrordirectories = []if members is None:members = selffor tarinfo in members:if tarinfo.isdir():directories.append(tarinfo)tarinfo = copy.copy(tarinfo)tarinfo.mode = self.extract(tarinfo, path)if sys.version_info < (, ):def sorter(dir1, dir2):return cmp(dir1.name, dir2.name)directories.sort(sorter)directories.reverse()else:directories.sort(key=operator.attrgetter(''), reverse=True)for tarinfo in directories:dirpath = os.path.join(path, tarinfo.name)try:self.chown(tarinfo, dirpath)self.utime(tarinfo, dirpath)self.chmod(tarinfo, dirpath)except ExtractError:e = sys.exc_info()[]if self.errorlevel > :raiseelse:self._dbg(, \"\" % e)", "docstring": "Extract all members from the archive to the current working\n directory and set owner, modification time and permissions on\n directories afterwards. `path' specifies a different directory\n to extract to. `members' is optional and must be a subset of the\n list returned by getmembers().", "id": "f1732:m17"} {"signature": "def main(version=DEFAULT_VERSION):", "body": "options = _parse_args()tarball = download_setuptools(download_base=options.download_base)return _install(tarball, _build_install_args(options))", "docstring": "Install or upgrade setuptools and EasyInstall", "id": "f1732:m20"} {"signature": "def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,to_dir=os.curdir, delay=):", "body": "to_dir = os.path.abspath(to_dir)try:from urllib.request import urlopenexcept ImportError:from urllib2 import urlopentgz_name = \"\" % versionurl = download_base + tgz_namesaveto = os.path.join(to_dir, tgz_name)src = dst = Noneif not os.path.exists(saveto): try:log.warn(\"\", url)src = urlopen(url)data = src.read()dst = open(saveto, \"\")dst.write(data)finally:if src:src.close()if dst:dst.close()return os.path.realpath(saveto)", "docstring": "Download distribute from a specified location and return its filename\n\n `version` should be a valid distribute version number that is available\n as an egg for download under the `download_base` URL (which should end\n with a '/'). `to_dir` is the directory where the egg will be downloaded.\n `delay` is the number of seconds to pause before an actual download\n attempt.", "id": "f1732:m4"} {"signature": "def show():", "body": "sys.stdout.write(colorful.bold('') + '')sys.stdout.write(colorful.dimmed('') + '')sys.stdout.write(colorful.italic('') + '')sys.stdout.write(colorful.underlined('') + '')sys.stdout.write(colorful.inversed('') + '')sys.stdout.write(colorful.concealed('') + '')sys.stdout.write(colorful.struckthrough('') + '')sys.stdout.write(colorful.red('') + '')sys.stdout.write(colorful.green('') + '')sys.stdout.write(colorful.yellow('') + '')sys.stdout.write(colorful.blue('') + '')sys.stdout.write(colorful.magenta('') + '')sys.stdout.write(colorful.cyan('') + '')sys.stdout.write(colorful.white('') + '')sys.stdout.write(colorful.on_red('') + '')sys.stdout.write(colorful.on_green('') + '')sys.stdout.write(colorful.on_yellow('') + '')sys.stdout.write(colorful.on_blue('') + '')sys.stdout.write(colorful.on_magenta('') + '')sys.stdout.write(colorful.on_cyan('') + '')sys.stdout.write(colorful.on_white('') + '')", "docstring": "Show the modifiers and colors", "id": "f1741:m0"} {"signature": "def translate_style(style, colormode, colorpalette):", "body": "style_parts = iter(style.split(''))ansi_start_sequence = []ansi_end_sequence = []try:part = Nonefor mod_part in style_parts:part = mod_partif part not in ansi.MODIFIERS:break mod_start_code, mod_end_code = resolve_modifier_to_ansi_code(part, colormode)ansi_start_sequence.append(mod_start_code)ansi_end_sequence.append(mod_end_code)else: raise StopIteration()if part != '':ansi_start_code, ansi_end_code = translate_colorname_to_ansi_code(part, ansi.FOREGROUND_COLOR_OFFSET, colormode, colorpalette)ansi_start_sequence.append(ansi_start_code)ansi_end_sequence.append(ansi_end_code)next(style_parts)part = next(style_parts)ansi_start_code, ansi_end_code = translate_colorname_to_ansi_code(part, ansi.BACKGROUND_COLOR_OFFSET, colormode, colorpalette)ansi_start_sequence.append(ansi_start_code)ansi_end_sequence.append(ansi_end_code)except StopIteration: passreturn ''.join(ansi_start_sequence), ''.join(ansi_end_sequence)", "docstring": "Translate the given style to an ANSI escape code\nsequence.\n\n``style`` examples are:\n\n* green\n* bold\n* red_on_black\n* bold_green\n* italic_yellow_on_cyan\n\n:param str style: the style to translate\n:param int colormode: the color mode to use. See ``translate_rgb_to_ansi_code``\n:parma dict colorpalette: the color palette to use for the color name mapping", "id": "f1747:m3"} {"signature": "def update_palette(self, colorpalette):", "body": "self.colorpalette.update(colors.sanitize_color_palette(colorpalette))", "docstring": "Update the currently active color palette\nwith the given color palette", "id": "f1747:c2:m10"} {"signature": "@colorpalette.setterdef colorpalette(self, colorpalette):", "body": "if isinstance(colorpalette, str): colorpalette = colors.parse_colors(colorpalette)self._colorpalette = colors.sanitize_color_palette(colorpalette)", "docstring": "Set the colorpalette which should be used", "id": "f1747:c2:m2"} {"signature": "def str(self, string):", "body": "return ColorfulString(string, string)", "docstring": "Create a new ColorfulString instance of the given\nunstyled string.\n\nThis method should be used to create a ColorfulString\nwhich is actually not styled yet but can safely be concatinated\nwith other ColorfulStrings like:\n\n>>> s = colorful.str('Hello ')\n>>> s =+ colorful.black('World')\n>>> str(s)\n'Hello \\033[30mWorld\\033[39m'\n\n:param str string: the string to use for the ColorfulString", "id": "f1747:c2:m13"} {"signature": "def setup(self, colormode=None, colorpalette=None, extend_colors=False):", "body": "if colormode:self.colormode = colormodeif colorpalette:if extend_colors:self.update_palette(colorpalette)else:self.colorpalette = colorpalette", "docstring": "Setup this colorful object by setting a ``colormode`` and\nthe ``colorpalette`. The ``extend_colors`` flag is used\nto extend the currently active color palette instead of\nreplacing it.\n\n:param int colormode: the color mode to use. See ``translate_rgb_to_ansi_code``\n:parma dict colorpalette: the colorpalette to use. This ``dict`` should map\n color names to it's corresponding RGB value\n:param bool extend_colors: extend the active color palette instead of replacing it", "id": "f1747:c2:m3"} {"signature": "def use_256_ansi_colors(self):", "body": "self.colormode = terminal.ANSI_256_COLORS", "docstring": "Use 256 ANSI colors for this colorful object", "id": "f1747:c2:m7"} {"signature": "def translate_rgb_to_ansi_code(red, green, blue, offset, colormode):", "body": "if colormode == terminal.NO_COLORS: return '', ''if colormode == terminal.ANSI_8_COLORS or colormode == terminal.ANSI_16_COLORS:color_code = ansi.rgb_to_ansi16(red, green, blue)start_code = ansi.ANSI_ESCAPE_CODE.format(code=color_code + offset - ansi.FOREGROUND_COLOR_OFFSET)end_code = ansi.ANSI_ESCAPE_CODE.format(code=offset + ansi.COLOR_CLOSE_OFFSET)return start_code, end_codeif colormode == terminal.ANSI_256_COLORS:color_code = ansi.rgb_to_ansi256(red, green, blue)start_code = ansi.ANSI_ESCAPE_CODE.format(code=''.format(base= + offset, code=color_code))end_code = ansi.ANSI_ESCAPE_CODE.format(code=offset + ansi.COLOR_CLOSE_OFFSET)return start_code, end_codeif colormode == terminal.TRUE_COLORS:start_code = ansi.ANSI_ESCAPE_CODE.format(code=''.format(base= + offset, red=red, green=green, blue=blue))end_code = ansi.ANSI_ESCAPE_CODE.format(code=offset + ansi.COLOR_CLOSE_OFFSET)return start_code, end_coderaise ColorfulError(''.format(colormode))", "docstring": "Translate the given RGB color into the appropriate ANSI escape code\nfor the given color mode.\nThe offset is used for the base color which is used.\n\nThe ``colormode`` has to be one of:\n * 0: no colors / disabled\n * 8: use ANSI 8 colors\n * 16: use ANSI 16 colors (same as 8 but with brightness)\n * 256: use ANSI 256 colors\n * 0xFFFFFF / 16777215: use 16 Million true colors\n\n:param int red: the red channel value\n:param int green: the green channel value\n:param int blue: the blue channel value\n:param int offset: the offset to use for the base color\n:param int colormode: the color mode to use. See explanation above", "id": "f1747:m0"} {"signature": "def resolve_modifier_to_ansi_code(modifiername, colormode):", "body": "if colormode == terminal.NO_COLORS: return '', ''try:start_code, end_code = ansi.MODIFIERS[modifiername]except KeyError:raise ColorfulError(''.format(modifiername, ansi.MODIFIERS.keys()))else:return ansi.ANSI_ESCAPE_CODE.format(code=start_code), ansi.ANSI_ESCAPE_CODE.format(code=end_code)", "docstring": "Resolve the given modifier name to a valid\nANSI escape code.\n\n:param str modifiername: the name of the modifier to resolve\n:param int colormode: the color mode to use. See ``translate_rgb_to_ansi_code``\n\n:returns str: the ANSI escape code for the modifier\n\n:raises ColorfulError: if the given modifier name is invalid", "id": "f1747:m2"} {"signature": "def use_true_colors(self):", "body": "self.colormode = terminal.TRUE_COLORS", "docstring": "Use true colors for this colorful object", "id": "f1747:c2:m8"} {"signature": "def style_string(string, ansi_style, colormode, nested=False):", "body": "ansi_start_code, ansi_end_code = ansi_styleif PY2:if isinstance(string, str):string = string.decode(DEFAULT_ENCODING)string = UNICODE(string).replace(ansi.NEST_PLACEHOLDER, ansi_start_code)return ''.format(start_code=ansi_start_code,string=string,end_code=ansi_end_code,nest_ph=ansi.NEST_PLACEHOLDER if nested else '')", "docstring": "Style the given string according to the given\nANSI style string.\n\n:param str string: the string to style\n:param tuple ansi_style: the styling string returned by ``translate_style``\n:param int colormode: the color mode to use. See ``translate_rgb_to_ansi_code``\n\n:returns: a string containing proper ANSI sequence", "id": "f1747:m4"} {"signature": "@propertydef colorpalette(self):", "body": "return self._colorpalette", "docstring": "Get the current used color palette", "id": "f1747:c2:m1"} {"signature": "def parse_colors(path):", "body": "if path.endswith(\"\"):return parse_rgb_txt_file(path)elif path.endswith(\"\"):return parse_json_color_file(path)raise TypeError(\"\")", "docstring": "Parse the given color files.\n\n Supported are:\n * .txt for X11 colors\n * .json for colornames", "id": "f1749:m0"} {"signature": "def split_phonemes(letter, onset=True, nucleus=True, coda=True):", "body": "if len(letter) != or not is_hangul(letter):raise ValueError('' % letter)offset = ord(letter) - FIRST_HANGUL_OFFSETphonemes = [None] * if onset:phonemes[] = ONSETS[offset // (NUM_NUCLEUSES * NUM_CODAS)]if nucleus:phonemes[] = NUCLEUSES[(offset // NUM_CODAS) % NUM_NUCLEUSES]if coda:phonemes[] = CODAS[offset % NUM_CODAS]return tuple(phonemes)", "docstring": "Splits Korean phonemes as known as \"\uc790\uc18c\" from a Hangul letter.\n\n :returns: (onset, nucleus, coda)\n :raises ValueError: `letter` is not a Hangul single letter.", "id": "f1751:m3"} {"signature": "def pick(word, morph, **kwargs):", "body": "return registry.pick(word, morph, **kwargs)", "docstring": "Shortcut for :class:`ParticleRegistry.pick` of the default registry.", "id": "f1753:m2"} {"signature": "def postfix(word, morph, **kwargs):", "body": "return registry.postfix(word, morph, **kwargs)", "docstring": "Shortcut for :class:`ParticleRegistry.postfix` of the default registry.", "id": "f1753:m3"} {"signature": "def guess_coda(word):", "body": "word = filter_only_significant(word)return guess_coda_from_significant_word(word)", "docstring": "Guesses the coda of the given word as correct as possible. If it fails\n to guess the coda, returns ``None``.", "id": "f1755:m0"} {"signature": "def pick_coda_from_letter(letter):", "body": "try:__, __, coda =split_phonemes(letter, onset=False, nucleus=False, coda=True)except ValueError:return Noneelse:return coda", "docstring": "Picks only a coda from a Hangul letter. It returns ``None`` if the\n given letter is not Hangul.", "id": "f1755:m3"} {"signature": "@cached_propertydef morphs(self):", "body": "seen = set()saw = seen.addmorphs = chain([self.morph1, self.morph2], self.tolerances)unique_morphs = (x for x in morphs if x and not (x in seen or saw(x)))return tuple(sorted(unique_morphs, key=len, reverse=True))", "docstring": "The tuple containing the given morphs and all the possible tolerant\n morphs. Longer is first.", "id": "f1757:c0:m7"} {"signature": "def __getitem__(self, key):", "body": "if isinstance(key, slice):word = key.startmorph = key.stop or self.morph1tolerance_style = key.step or DEFAULT_TOLERANCE_STYLEelse:word, morph = key, self.morph1tolerance_style = DEFAULT_TOLERANCE_STYLEreturn self.allomorph(word, morph, tolerance_style)", "docstring": "The syntax sugar to determine one of allomorphic morphs based on a\n word::\n\n eun = Particle(u'\uc740', u'\ub294')\n assert eun[u'\ub098\uc624'] == u'\ub294'\n assert eun[u'\ubaa8\ub9ac\uc548'] == u'\uc740'", "id": "f1757:c0:m5"} {"signature": "def singleton_particle(*bases):", "body": "return with_metaclass(SingletonParticleMeta, SingletonParticle, *bases)", "docstring": "Defines a singleton instance immediately when defining the class. The\n name of the class will refer the instance instead.", "id": "f1757:m0"} {"signature": "def rule(self, coda):", "body": "if coda:return self.morph1else:return self.morph2", "docstring": "Determines one of allomorphic morphs based on a coda.", "id": "f1757:c0:m3"} {"signature": "def _add_magic(self, data):", "body": "if self.magic:return self.magic + datareturn data", "docstring": "Add magic", "id": "f1760:c0:m27"} {"signature": "@staticmethoddef _aes_encrypt(data, algorithm, key):", "body": "if algorithm[''] == '':mode = AES.MODE_CBCelse:raise Exception(''% algorithm[''])iv_size = algorithm['']block_size = iv_sizeinclude_iv = Trueif ''in algorithm and algorithm['']:if len(algorithm['']) != algorithm['']:raise Exception('')iv_value = algorithm['']include_iv = Falseelse:iv_value = get_random_bytes(iv_size)numpad = block_size - (len(data) % block_size)data = data + numpad * chr(numpad)enc = AES.new(key, mode, iv_value).encrypt(data)if include_iv:enc = iv_value + encreturn enc", "docstring": "AES encrypt", "id": "f1760:c0:m38"} {"signature": "def unseal(self, data, return_options=False):", "body": "data = self._remove_magic(data)data = urlsafe_nopadding_b64decode(data)options = self._read_header(data)data = self._add_magic(data)data = self._unsign_data(data, options)data = self._remove_magic(data)data = self._remove_header(data, options)data = self._decrypt_data(data, options)data = self._decompress_data(data, options)data = self._unserialize_data(data, options)if return_options:return data, optionselse:return data", "docstring": "Unseal data", "id": "f1760:c0:m13"} {"signature": "def get_options(self):", "body": "return self.options", "docstring": "Get options used for sealing", "id": "f1760:c0:m9"} {"signature": "def _read_version(self, data):", "body": "version = ord(data[])if version not in self.VERSIONS:raise Exception('' % version)return version", "docstring": "Read header version from data", "id": "f1760:c0:m31"} {"signature": "@staticmethoddef _generate_key(pass_id, passphrases, salt, algorithm):", "body": "if pass_id not in passphrases:raise Exception('' % pass_id)passphrase = passphrases[pass_id]if len(passphrase) < :raise Exception('')digestmod = EncryptedPickle._get_hashlib(algorithm[''])encoder = PBKDF2(passphrase, salt,iterations=algorithm[''],digestmodule=digestmod)return encoder.read(algorithm[''])", "docstring": "Generate and return PBKDF2 key", "id": "f1760:c0:m34"} {"signature": "def _unsign_data(self, data, options):", "body": "if options[''] not in self.signature_algorithms:raise Exception(''% options[''])signature_algorithm =self.signature_algorithms[options['']]algorithm = self._get_algorithm_info(signature_algorithm)key_salt = ''if algorithm['']:key_salt = data[-algorithm['']:]data = data[:-algorithm['']]key = self._generate_key(options[''],self.signature_passphrases, key_salt, algorithm)data = self._decode(data, algorithm, key)return data", "docstring": "Verify and remove signature", "id": "f1760:c0:m19"} {"signature": "def verify_signature(self, data):", "body": "data = self._remove_magic(data)data = urlsafe_nopadding_b64decode(data)options = self._read_header(data)data = self._add_magic(data)self._unsign_data(data, options)", "docstring": "Verify sealed data signature", "id": "f1760:c0:m14"} {"signature": "def set_magic(self, magic):", "body": "if magic is None or isinstance(magic, str):self.magic = magicelse:raise TypeError('')", "docstring": "Set magic (prefix)", "id": "f1760:c0:m10"} {"signature": "def seal(self, data, options=None):", "body": "options = self._set_options(options)data = self._serialize_data(data, options)data = self._compress_data(data, options)data = self._encrypt_data(data, options)data = self._add_header(data, options)data = self._add_magic(data)data = self._sign_data(data, options)data = self._remove_magic(data)data = urlsafe_nopadding_b64encode(data)data = self._add_magic(data)return data", "docstring": "Seal data", "id": "f1760:c0:m12"} {"signature": "def _decode(self, data, algorithm, key=None):", "body": "if algorithm[''] == '':verify_signature = data[-algorithm['']:]data = data[:-algorithm['']]signature = self._hmac_generate(data, algorithm, key)if not const_equal(verify_signature, signature):raise Exception('')return dataelif algorithm[''] == '':return self._aes_decrypt(data, algorithm, key)elif algorithm[''] == '':return dataelif algorithm[''] == '':return json.loads(data)elif algorithm[''] == '':return dataelif algorithm[''] == '':return self._zlib_decompress(data, algorithm)else:raise Exception('' % algorithm[''])", "docstring": "Decode data with specific algorithm", "id": "f1760:c0:m17"} {"signature": "def _decrypt_data(self, data, options):", "body": "if options[''] not in self.encryption_algorithms:raise Exception(''% options[''])encryption_algorithm =self.encryption_algorithms[options['']]algorithm = self._get_algorithm_info(encryption_algorithm)key_salt = ''if algorithm['']:key_salt = data[-algorithm['']:]data = data[:-algorithm['']]key = self._generate_key(options[''],self.encryption_passphrases, key_salt, algorithm)data = self._decode(data, algorithm, key)return data", "docstring": "Decrypt data", "id": "f1760:c0:m21"} {"signature": "def _set_options(self, options):", "body": "if not options:return self.options.copy()options = options.copy()if '' in options:self.set_magic(options[''])del(options[''])if '' in options:flags = options['']del(options[''])for key, value in flags.iteritems():if not isinstance(value, bool):raise TypeError('' % key)else:flags = self.options['']if '' in options:del(options[''])for key, value in options.iteritems():if not isinstance(value, int):raise TypeError('' % key)if value < or value > :raise ValueError('' % key)new_options = self.options.copy()new_options.update(options)new_options[''].update(flags)return new_options", "docstring": "Private function for setting options used for sealing", "id": "f1760:c0:m8"} {"signature": "@staticmethoddef _zlib_compress(data, algorithm):", "body": "if algorithm[''] == '':encoder = zlib.compressobj(algorithm[''], zlib.DEFLATED, -)compressed = encoder.compress(data)compressed += encoder.flush()return compressedelse:raise Exception(''% algorithm[''])", "docstring": "GZIP compress", "id": "f1760:c0:m40"} {"signature": "def set_algorithms(self, signature=None, encryption=None,serialization=None, compression=None):", "body": "self.signature_algorithms =self._update_dict(signature, self.DEFAULT_SIGNATURE)self.encryption_algorithms =self._update_dict(encryption, self.DEFAULT_ENCRYPTION)self.serialization_algorithms =self._update_dict(serialization, self.DEFAULT_SERIALIZATION)self.compression_algorithms =self._update_dict(compression, self.DEFAULT_COMPRESSION)", "docstring": "Set algorithms used for sealing. Defaults can not be overridden.", "id": "f1760:c0:m5"} {"signature": "def threadFunc(root):", "body": "th = threading.currentThread()auto.Logger.WriteLine(''.format(th.ident, th.name), auto.ConsoleColor.Cyan)time.sleep()auto.InitializeUIAutomationInCurrentThread()auto.GetConsoleWindow().CaptureToImage('')newRoot = auto.GetRootControl() auto.EnumAndLogControl(newRoot, )auto.UninitializeUIAutomationInCurrentThread()auto.Logger.WriteLine(''.format(th.ident, th.name), auto.ConsoleColor.Cyan)", "docstring": "If you want to use functionalities related to Controls and Patterns in a new thread.\nYou must call InitializeUIAutomationInCurrentThread first in the thread\n and call UninitializeUIAutomationInCurrentThread when the thread exits.\nBut you can't use use a Control or a Pattern created in a different thread.\nSo you can't create a Control or a Pattern in main thread and then pass it to a new thread and use it.", "id": "f1777:m0"} {"signature": "@propertydef RowSpan(self) -> int:", "body": "return self.pattern.CurrentRowSpan", "docstring": "Property RowSpan.\nCall IUIAutomationGridItemPattern::get_CurrentRowSpan.\nReturn int, the number of rows spanned by the grid item.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentrowspan", "id": "f1782:c49:m5"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationvaluepattern", "id": "f1782:c75:m0"} {"signature": "def RightClick(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "SetCursorPos(x, y)screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.RightDown | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep()mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Simulate mouse right click at point x, y.\nx: int.\ny: int.\nwaitTime: float.", "id": "f1782:m14"} {"signature": "@propertydef CurrentView(self) -> int:", "body": "return self.pattern.CurrentCurrentView", "docstring": "Property CurrentView.\nCall IUIAutomationMultipleViewPattern::get_CurrentCurrentView.\nReturn int, the control-specific identifier of the current view of the control.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-get_currentcurrentview", "id": "f1782:c54:m1"} {"signature": "def SetValue(self, value: str, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.SetValue(value) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTransformPattern2::IUIAutomationValuePattern::SetValue.\nSet the value of the element.\nvalue: str.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationvaluepattern-setvalue", "id": "f1782:c75:m3"} {"signature": "@propertydef Orientation(self) -> int:", "body": "return self.Element.CurrentOrientation", "docstring": "Property Orientation.\nReturn int, a value in class `OrientationType`.\nCall IUIAutomationElement::get_CurrentOrientation.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentorientation", "id": "f1782:c78:m35"} {"signature": "def SetCursorPos(x: int, y: int) -> bool:", "body": "return bool(ctypes.windll.user32.SetCursorPos(x, y))", "docstring": "SetCursorPos from Win32.\nSet mouse cursor to point x, y.\nx: int.\ny: int.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m6"} {"signature": "def RightClick(self, x: int = None, y: int = None, ratioX: float = , ratioY: float = , simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove)if point:RightClick(point[], point[], waitTime)", "docstring": "x: int, if < 0, right click self.BoundingRectangle.right + x, if not None, ignore ratioX.\ny: int, if < 0, right click self.BoundingRectangle.bottom + y, if not None, ignore ratioY.\nratioX: float.\nratioY: float.\nsimulateMove: bool, if True, first move cursor to control smoothly.\nwaitTime: float.\n\nRightClick(), RightClick(ratioX=0.5, ratioY=0.5): right click center.\nRightClick(10, 10): right click left+10, top+10.\nRightClick(-10, -10): right click right-10, bottom-10.", "id": "f1782:c78:m65"} {"signature": "def FromControl(self, control: '', x: int = , y: int = , width: int = , height: int = ) -> bool:", "body": "rect = control.BoundingRectanglewhile rect.width() == or rect.height() == :control = control.GetParentControl()if not control:return Falserect = control.BoundingRectangleif width <= :width = rect.width() + widthif height <= :height = rect.height() + heighthandle = control.NativeWindowHandleif handle:left = xtop = yright = left + widthbottom = top + heightelse:while True:control = control.GetParentControl()handle = control.NativeWindowHandleif handle:pRect = control.BoundingRectangleleft = rect.left - pRect.left + xtop = rect.top - pRect.top + yright = left + widthbottom = top + heightbreakreturn self.FromHandle(handle, left, top, right, bottom)", "docstring": "Capture a control to Bitmap.\ncontrol: `Control` or its subclass.\nx: int.\ny: int.\nwidth: int.\nheight: int.\nx, y: the point in control's internal position(from 0,0)\nwidth, height: image's width and height from x, y, use 0 for entire area,\nIf width(or height) < 0, image size will be control's width(or height) - width(or height).\nReturn bool, True if succeed otherwise False.", "id": "f1782:c42:m7"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtableitempattern", "id": "f1782:c65:m0"} {"signature": "def GetGridItemPattern(self) -> GridItemPattern:", "body": "return self.GetPattern(PatternId.GridItemPattern)", "docstring": "Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c95:m3"} {"signature": "def GetPixelColor(x: int, y: int, handle: int = ) -> int:", "body": "hdc = ctypes.windll.user32.GetWindowDC(ctypes.c_void_p(handle))bgr = ctypes.windll.gdi32.GetPixel(hdc, x, y)ctypes.windll.user32.ReleaseDC(ctypes.c_void_p(handle), hdc)return bgr", "docstring": "Get pixel color of a native window.\nx: int.\ny: int.\nhandle: int, the handle of a native window.\nReturn int, the bgr value of point (x,y).\nr = bgr & 0x0000FF\ng = (bgr & 0x00FF00) >> 8\nb = (bgr & 0xFF0000) >> 16\nIf handle is 0, get pixel from Desktop window(root control).\nNote:\nNot all devices support GetPixel.\nAn application should call GetDeviceCaps to determine whether a specified device supports this function.\nFor example, console window doesn't support.", "id": "f1782:m25"} {"signature": "def EnumAndLogControl(control: Control, maxDepth: int = , showAllName: bool = True, startDepth: int = ) -> None:", "body": "for c, d in WalkControl(control, True, maxDepth):LogControl(c, d + startDepth, showAllName)", "docstring": "Print and log control and its descendants' propertyies.\ncontrol: `Control` or its subclass.\nmaxDepth: int, enum depth.\nshowAllName: bool, if False, print the first 30 characters of control.Name.\nstartDepth: int, control's current depth.", "id": "f1782:m83"} {"signature": "def GetExpandCollapsePattern(self) -> ExpandCollapsePattern:", "body": "return self.GetPattern(PatternId.ExpandCollapsePattern)", "docstring": "Return `ExpandCollapsePattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c83:m1"} {"signature": "def SetTopmost(self, isTopmost: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "if self.IsTopLevel():ret = SetWindowTopmost(self.NativeWindowHandle, isTopmost)time.sleep(waitTime)return retreturn False", "docstring": "Set top level window topmost.\nisTopmost: bool.\nwaitTime: float.", "id": "f1782:c99:m0"} {"signature": "def Toggle(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Toggle() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTogglePattern::Toggle.\nCycle through the toggle states of the control.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtogglepattern-toggle", "id": "f1782:c72:m2"} {"signature": "@propertydef Role(self) -> int:", "body": "return self.pattern.CurrentRole", "docstring": "Property Role.\nCall IUIAutomationLegacyIAccessiblePattern::get_CurrentRole.\nReturn int, a value in calss `AccessibleRole`, the Microsoft Active Accessibility role identifier.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentrole", "id": "f1782:c53:m7"} {"signature": "def GetParentControl(self) -> '':", "body": "ele = _AutomationClient.instance().ViewWalker.GetParentElement(self.Element)return Control.CreateControlFromElement(ele)", "docstring": "Return `Control` subclass or None.", "id": "f1782:c78:m50"} {"signature": "def Navigate(self, direction: int) -> '':", "body": "ele = self.pattern.Navigate(direction)return Control.CreateControlFromElement(ele)", "docstring": "Call IUIAutomationCustomNavigationPattern::Navigate.\nGet the next control in the specified direction within the logical UI tree.\ndirection: int, a value in class `NavigateDirection`.\nReturn `Control` subclass or None.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationcustomnavigationpattern-navigate", "id": "f1782:c44:m1"} {"signature": "def RemoveFromSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.textRange.RemoveFromSelection() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTextRange::RemoveFromSelection.\nRemove the text range from an existing collection of selected text in a text container that supports multiple, disjoint selections.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-removefromselection", "id": "f1782:c67:m16"} {"signature": "@propertydef Help(self) -> str:", "body": "return self.pattern.CurrentHelp", "docstring": "Property Help.\nCall IUIAutomationLegacyIAccessiblePattern::get_CurrentHelp.\nReturn str, the Microsoft Active Accessibility help string for the element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currenthelp", "id": "f1782:c53:m4"} {"signature": "def GetSelectionPattern(self) -> SelectionPattern:", "body": "return self.GetPattern(PatternId.SelectionPattern)", "docstring": "Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c118:m2"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationdockpattern", "id": "f1782:c45:m0"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextchildpattern", "id": "f1782:c68:m0"} {"signature": "@propertydef IsReadOnly(self) -> bool:", "body": "return self.pattern.CurrentIsReadOnly", "docstring": "Property IsReadOnly.\nCall IUIAutomationRangeValuePattern::get_CurrentIsReadOnly.\nReturn bool, indicates whether the value of the element can be changed.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentisreadonly", "id": "f1782:c56:m1"} {"signature": "@propertydef ZoomMinimum(self) -> float:", "body": "return self.pattern.CurrentZoomMinimum", "docstring": "Property ZoomMinimum.\nCall IUIAutomationTransformPattern2::get_CurrentZoomMinimum.\nReturn float, the minimum zoom level of the control's viewport.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_currentzoomminimum", "id": "f1782:c74:m4"} {"signature": "def GetTablePattern(self) -> TablePattern:", "body": "return self.GetPattern(PatternId.TablePattern)", "docstring": "Return `TablePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c85:m4"} {"signature": "def FromFile(self, filePath: str) -> bool:", "body": "self.Release()self._bitmap = _DllClient.instance().dll.BitmapFromFile(ctypes.c_wchar_p(filePath))self._getsize()return self._bitmap > ", "docstring": "Load image from a file.\nfilePath: str.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c42:m8"} {"signature": "def ToFile(self, savePath: str) -> bool:", "body": "name, ext = os.path.splitext(savePath)extMap = {'': '', '': '', '': '', '': '', '': '', '': '', '': ''}gdiplusImageFormat = extMap.get(ext.lower(), '')return bool(_DllClient.instance().dll.BitmapToFile(self._bitmap, ctypes.c_wchar_p(savePath), ctypes.c_wchar_p(gdiplusImageFormat)))", "docstring": "Save to a file.\nsavePath: str, should end with .bmp, .jpg, .jpeg, .png, .gif, .tif, .tiff.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c42:m9"} {"signature": "def GetSubBitmap(self, x: int, y: int, width: int, height: int) -> '':", "body": "colors = self.GetPixelColorsOfRect(x, y, width, height)bitmap = Bitmap(width, height)bitmap.SetPixelColorsOfRect(, , width, height, colors)return bitmap", "docstring": "x: int.\ny: int.\nwidth: int.\nheight: int.\nReturn `Bitmap`, a sub bitmap of the input rect.", "id": "f1782:c42:m22"} {"signature": "def SetFocus(self) -> bool:", "body": "return self.Element.SetFocus() == S_OK", "docstring": "Call IUIAutomationElement::SetFocus.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-setfocus", "id": "f1782:c78:m44"} {"signature": "@propertydef WindowInteractionState(self) -> int:", "body": "return self.pattern.CurrentWindowInteractionState", "docstring": "Property WindowInteractionState.\nCall IUIAutomationWindowPattern::get_CurrentWindowInteractionState.\nReturn int, a value in class `WindowInteractionState`,\n the current state of the window for the purposes of user interaction.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentwindowinteractionstate", "id": "f1782:c77:m6"} {"signature": "def GetInvokePattern(self) -> InvokePattern:", "body": "return self.GetPattern(PatternId.InvokePattern)", "docstring": "Return `InvokePattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c108:m2"} {"signature": "def SetGlobalSearchTimeout(seconds: float) -> None:", "body": "global TIME_OUT_SECONDTIME_OUT_SECOND = seconds", "docstring": "seconds: float.\nTo make this available, you need explicitly import uiautomation:\n from uiautomation import uiautomation as auto\n auto.SetGlobalSearchTimeout(10)", "id": "f1782:m67"} {"signature": "@propertydef ChildId(self) -> int:", "body": "return self.pattern.CurrentChildId", "docstring": "Property ChildId.\nCall IUIAutomationLegacyIAccessiblePattern::get_CurrentChildId.\nReturn int, the Microsoft Active Accessibility child identifier for the element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentchildid", "id": "f1782:c53:m1"} {"signature": "def SetWindowVisualState(self, state: int, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.SetWindowVisualState(state) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationWindowPattern::SetWindowVisualState.\nMinimize, maximize, or restore the window.\nstate: int, a value in class `WindowVisualState`.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-setwindowvisualstate", "id": "f1782:c77:m8"} {"signature": "def SetValue(self, value: str, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.SetValue(value) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationLegacyIAccessiblePattern::SetValue.\nSet the Microsoft Active Accessibility value property for the element.\nvalue: str.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-setvalue", "id": "f1782:c53:m14"} {"signature": "def GetInvokePattern(self) -> InvokePattern:", "body": "return self.GetPattern(PatternId.InvokePattern)", "docstring": "Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c95:m4"} {"signature": "@propertydef ExtendedProperties(self) -> str:", "body": "return self.pattern.CurrentExtendedProperties", "docstring": "Property ExtendedProperties.\nCall IUIAutomationStylesPattern::get_CurrentExtendedProperties.\nReturn str, a localized string that contains the list of extended properties for an element in a document.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentextendedproperties", "id": "f1782:c63:m1"} {"signature": "@propertydef DocumentRange(self) -> TextRange:", "body": "return TextRange(self.pattern.DocumentRange)", "docstring": "Property DocumentRange.\nCall IUIAutomationTextPattern::get_DocumentRange.\nReturn `TextRange`, a text range that encloses the main text of a document.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-get_documentrange", "id": "f1782:c70:m1"} {"signature": "def WalkControl(control: Control, includeTop: bool = False, maxDepth: int = ):", "body": "if includeTop:yield control, if maxDepth <= :returndepth = child = control.GetFirstChildControl()controlList = [child]while depth >= :lastControl = controlList[-]if lastControl:yield lastControl, depth + child = lastControl.GetNextSiblingControl()controlList[depth] = childif depth + < maxDepth:child = lastControl.GetFirstChildControl()if child:depth += controlList.append(child)else:del controlList[depth]depth -= ", "docstring": "control: `Control` or its subclass.\nincludeTop: bool, if True, yield (control, 0) first.\nmaxDepth: int, enum depth.\nYield 2 items tuple(control: Control, depth: int).", "id": "f1782:m81"} {"signature": "def GetPixelColorsOfRects(self, rects: list) -> list:", "body": "rects2 = [(x, y, x + width, y + height) for x, y, width, height in rects]left, top, right, bottom = zip(*rects2)left, top, right, bottom = min(left), min(top), max(right), max(bottom)width, height = right - left, bottom - topallColors = self.GetPixelColorsOfRect(left, top, width, height)colorsOfRects = []for x, y, w, h in rects:x -= lefty -= topcolors = []for row in range(h):colors.extend(allColors[(y + row) * width + x:(y + row) * width + x + w])colorsOfRects.append(colors)return colorsOfRects", "docstring": "rects: a list of rects, such as [(0,0,10,10), (10,10,20,20),(x,y,width,height)].\nReturn list, a list whose elements are ctypes.Array which is an iterable array of int values in argb.", "id": "f1782:c42:m20"} {"signature": "def GetScrollItemPattern(self) -> ScrollItemPattern:", "body": "return self.GetPattern(PatternId.ScrollItemPattern)", "docstring": "Return `ScrollItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c119:m3"} {"signature": "@propertydef AutomationId(self) -> str:", "body": "return self.Element.CurrentAutomationId", "docstring": "Property AutomationId.\nCall IUIAutomationElement::get_CurrentAutomationId.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentautomationid", "id": "f1782:c78:m14"} {"signature": "def GetTogglePattern(self) -> TogglePattern:", "body": "return self.GetPattern(PatternId.TogglePattern)", "docstring": "Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c95:m6"} {"signature": "def GetInvokePattern(self) -> InvokePattern:", "body": "return self.GetPattern(PatternId.InvokePattern)", "docstring": "Return `InvokePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c80:m2"} {"signature": "def GetGridPattern(self) -> GridPattern:", "body": "return self.GetPattern(PatternId.GridPattern)", "docstring": "Return `GridPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c109:m1"} {"signature": "def GetPixelColorsHorizontally(self, x: int, y: int, count: int) -> ctypes.Array:", "body": "arrayType = ctypes.c_uint32 * countvalues = arrayType()_DllClient.instance().dll.BitmapGetPixelsHorizontally(ctypes.c_size_t(self._bitmap), x, y, values, count)return values", "docstring": "x: int.\ny: int.\ncount: int.\nReturn `ctypes.Array`, an iterable array of int values in argb form point x,y horizontally.", "id": "f1782:c42:m12"} {"signature": "def GetSelectionItemPattern(self) -> SelectionItemPattern:", "body": "return self.GetPattern(PatternId.SelectionItemPattern)", "docstring": "Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c102:m1"} {"signature": "@propertydef SupportedTextSelection(self) -> bool:", "body": "return bool(self.pattern.SupportedTextSelection)", "docstring": "Property SupportedTextSelection.\nCall IUIAutomationTextPattern::get_SupportedTextSelection.\nReturn bool, specifies the type of text selection that is supported by the control.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-get_supportedtextselection", "id": "f1782:c70:m2"} {"signature": "def Hide(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "return self.ShowWindow(SW.Hide, waitTime)", "docstring": "Call native `ShowWindow(SW.Hide)`.\nwaitTime: float\nReturn bool, True if succeed otherwise False.", "id": "f1782:c78:m71"} {"signature": "def GetDockPattern(self) -> DockPattern:", "body": "return self.GetPattern(PatternId.DockPattern)", "docstring": "Return `DockPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c100:m1"} {"signature": "def GetExpandCollapsePattern(self) -> ExpandCollapsePattern:", "body": "return self.GetPattern(PatternId.ExpandCollapsePattern)", "docstring": "Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c97:m2"} {"signature": "def __init__(self, textRange=None):", "body": "self.textRange = textRange", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextrange", "id": "f1782:c67:m0"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtablepattern", "id": "f1782:c66:m0"} {"signature": "def FindText(self, text: str, backward: bool, ignoreCase: bool) -> '':", "body": "textRange = self.textRange.FindText(text, int(backward), int(ignoreCase))if textRange:return TextRange(textRange=textRange)", "docstring": "Call IUIAutomationTextRange::FindText.\ntext: str,\nbackward: bool, True if the last occurring text range should be returned instead of the first; otherwise False.\nignoreCase: bool, True if case should be ignored; otherwise False.\nreturn `TextRange` or None, a text range subset that contains the specified text.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-findtext", "id": "f1782:c67:m7"} {"signature": "def GetWindowPattern(self) -> WindowPattern:", "body": "return self.GetPattern(PatternId.WindowPattern)", "docstring": "Return `WindowPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c117:m2"} {"signature": "def ToBitmap(self, x: int = , y: int = , width: int = , height: int = ) -> Bitmap:", "body": "bitmap = Bitmap()bitmap.FromControl(self, x, y, width, height)return bitmap", "docstring": "Capture control to a Bitmap object.\nx, y: int, the point in control's internal position(from 0,0).\nwidth, height: int, image's width and height from x, y, use 0 for entire area.\n If width(or height) < 0, image size will be control's width(or height) - width(or height).", "id": "f1782:c78:m78"} {"signature": "def __init__(self, width: int = , height: int = ):", "body": "self._width = widthself._height = heightself._bitmap = if width > and height > :self._bitmap = _DllClient.instance().dll.BitmapCreate(width, height)", "docstring": "Create a black bimap of size(width, height).", "id": "f1782:c42:m0"} {"signature": "def keybd_event(bVk: int, bScan: int, dwFlags: int, dwExtraInfo: int) -> None:", "body": "ctypes.windll.user32.keybd_event(bVk, bScan, dwFlags, dwExtraInfo)", "docstring": "keybd_event from Win32.", "id": "f1782:m9"} {"signature": "def Select(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Select() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationSelectionItemPattern::Select.\nClear any selected items and then select the current element.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionitempattern-select", "id": "f1782:c59:m5"} {"signature": "def GetFirstChildControl(self) -> '':", "body": "ele = _AutomationClient.instance().ViewWalker.GetFirstChildElement(self.Element)return Control.CreateControlFromElement(ele)", "docstring": "Return `Control` subclass or None.", "id": "f1782:c78:m51"} {"signature": "def Click(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "SetCursorPos(x, y)screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.LeftDown | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep()mouse_event(MouseEventFlag.LeftUp | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Simulate mouse click at point x, y.\nx: int.\ny: int.\nwaitTime: float.", "id": "f1782:m12"} {"signature": "def FindAttribute(self, textAttributeId: int, val, backward: bool) -> '':", "body": "textRange = self.textRange.FindAttribute(textAttributeId, val, int(backward))if textRange:return TextRange(textRange=textRange)", "docstring": "Call IUIAutomationTextRange::FindAttribute.\ntextAttributeID: int, a value in class `TextAttributeId`.\nval: COM VARIANT according to textAttributeId? todo.\nbackward: bool, True if the last occurring text range should be returned instead of the first; otherwise False.\nreturn `TextRange` or None, a text range subset that has the specified text attribute value.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-findattribute", "id": "f1782:c67:m6"} {"signature": "def MoveToCenter(self) -> bool:", "body": "if self.IsTopLevel():rect = self.BoundingRectanglescreenWidth, screenHeight = GetScreenSize()x, y = (screenWidth - rect.width()) // , (screenHeight - rect.height()) // if x < : x = if y < : y = return SetWindowPos(self.NativeWindowHandle, SWP.HWND_Top, x, y, , , SWP.SWP_NoSize)return False", "docstring": "Move window to screen center.", "id": "f1782:c99:m8"} {"signature": "def Disappears(self, maxSearchSeconds: float = , searchIntervalSeconds: float = SEARCH_INTERVAL, printIfNotDisappear: bool = False) -> bool:", "body": "global DEBUG_EXIST_DISAPPEARstart = ProcessTime()while True:temp = DEBUG_EXIST_DISAPPEARDEBUG_EXIST_DISAPPEAR = False if not self.Exists(, , False):DEBUG_EXIST_DISAPPEAR = tempreturn TrueDEBUG_EXIST_DISAPPEAR = tempremain = start + maxSearchSeconds - ProcessTime()if remain > :time.sleep(min(remain, searchIntervalSeconds))else:if printIfNotDisappear or DEBUG_EXIST_DISAPPEAR:Logger.ColorfullyWriteLine(self.GetColorfulSearchPropertiesStr() + '')return False", "docstring": "maxSearchSeconds: float\nsearchIntervalSeconds: float\nCheck if control disappears every searchIntervalSeconds seconds in maxSearchSeconds seconds.\nReturn bool, True if control disappears.", "id": "f1782:c78:m59"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationitemcontainerpattern", "id": "f1782:c52:m0"} {"signature": "def WaitForInputIdle(self, milliseconds: int) -> bool:", "body": "return self.pattern.WaitForInputIdle(milliseconds) == S_OK", "docstring": "Call IUIAutomationWindowPattern::WaitForInputIdle.\nCause the calling code to block for the specified time or\n until the associated process enters an idle state, whichever completes first.\nmilliseconds: int.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-waitforinputidle", "id": "f1782:c77:m9"} {"signature": "@propertydef Author(self) -> str:", "body": "return self.pattern.CurrentAuthor", "docstring": "Property Author.\nCall IUIAutomationAnnotationPattern::get_CurrentAuthor.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currentauthor", "id": "f1782:c43:m3"} {"signature": "@propertydef LocalizedControlType(self) -> str:", "body": "return self.Element.CurrentLocalizedControlType", "docstring": "Property LocalizedControlType.\nCall IUIAutomationElement::get_CurrentLocalizedControlType.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentlocalizedcontroltype", "id": "f1782:c78:m32"} {"signature": "def GetRootControl() -> PaneControl:", "body": "return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.GetRootElement())", "docstring": "Get root control, the Desktop window.\nReturn `PaneControl`.", "id": "f1782:m71"} {"signature": "def WindowFromPoint(x: int, y: int) -> int:", "body": "return ctypes.windll.user32.WindowFromPoint(ctypes.wintypes.POINT(x, y))", "docstring": "WindowFromPoint from Win32.\nReturn int, a native window handle.", "id": "f1782:m4"} {"signature": "@propertydef HelpText(self) -> str:", "body": "return self.Element.CurrentHelpText", "docstring": "Property HelpText.\nCall IUIAutomationElement::get_CurrentHelpText.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currenthelptext", "id": "f1782:c78:m21"} {"signature": "@propertydef ProcessId(self) -> int:", "body": "return self.Element.CurrentProcessId", "docstring": "Property ProcessId.\nCall IUIAutomationElement::get_CurrentProcessId.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentprocessid", "id": "f1782:c78:m36"} {"signature": "def GetConsoleWindow() -> WindowControl:", "body": "return ControlFromHandle(ctypes.windll.kernel32.GetConsoleWindow())", "docstring": "Return `WindowControl`, a console window that runs python.", "id": "f1782:m74"} {"signature": "@staticmethoddef ColorfullyWrite(log: str, consoleColor: int = -, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None:", "body": "text = []start = while True:index1 = log.find('', start)if index1 >= :if index1 > start:text.append((log[start:index1], consoleColor))index2 = log.find('>', index1)colorName = log[index1+:index2]index3 = log.find('', index2 + )text.append((log[index2 + :index3], Logger.ColorNames[colorName]))start = index3 + else:if start < len(log):text.append((log[start:], consoleColor))breakfor t, c in text:Logger.Write(t, c, writeToFile, printToStdout, logFile)", "docstring": "log: str.\nconsoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`.\nwriteToFile: bool.\nprintToStdout: bool.\nlogFile: str, log file path.\nColorfullyWrite('Hello Green !!!'), color name must be in Logger.ColorNames.", "id": "f1782:c41:m3"} {"signature": "def GetAnnotationObjects(self) -> list:", "body": "eleArray = self.pattern.GetCurrentAnnotationObjects()if eleArray:controls = []for i in range(eleArray.Length):ele = eleArray.GetElement(i)con = Control.CreateControlFromElement(element=ele)if con:controls.append(con)return controlsreturn []", "docstring": "Call IUIAutomationSelectionPattern::GetCurrentAnnotationObjects.\nReturn list, a list of `Control` subclasses representing the annotations associated with this spreadsheet cell.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationspreadsheetitempattern-getcurrentannotationobjects", "id": "f1782:c61:m2"} {"signature": "def GetActiveComposition(self) -> TextRange:", "body": "textRange = self.pattern.GetActiveComposition()if textRange:return TextRange(textRange=textRange)", "docstring": "Call IUIAutomationTextEditPattern::GetActiveComposition.\nReturn `TextRange` or None, the active composition.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtexteditpattern-getactivecomposition", "id": "f1782:c69:m1"} {"signature": "def GetTableItemPattern(self) -> TableItemPattern:", "body": "return self.GetPattern(PatternId.TableItemPattern)", "docstring": "Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c93:m2"} {"signature": "def ControlFromPoint(x: int, y: int) -> Control:", "body": "element = _AutomationClient.instance().IUIAutomation.ElementFromPoint(ctypes.wintypes.POINT(x, y))return Control.CreateControlFromElement(element)", "docstring": "Call IUIAutomation ElementFromPoint x,y. May return None if mouse is over cmd's title bar icon.\nReturn `Control` subclass or None.", "id": "f1782:m75"} {"signature": "def SetWindowLong(handle: int, index: int, value: int) -> int:", "body": "return ctypes.windll.user32.SetWindowLongW(ctypes.c_void_p(handle), index, value)", "docstring": "SetWindowLong from Win32.\nhandle: int, the handle of a native window.\nindex: int.\nvalue: int.\nReturn int, the previous value before set.", "id": "f1782:m33"} {"signature": "def ShowDesktop(waitTime: float = ) -> None:", "body": "SendKeys('')time.sleep(waitTime)", "docstring": "Show Desktop by pressing win + d", "id": "f1782:m86"} {"signature": "def Move(self, unit: int, count: int, waitTime: float = OPERATION_WAIT_TIME) -> int:", "body": "ret = self.textRange.Move(unit, count)time.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTextRange::Move.\nMove the text range forward or backward by the specified number of text units.\nunit: int, a value in class `TextUnit`.\ncount: int, the number of text units to move.\n A positive value moves the text range forward.\n A negative value moves the text range backward. Zero has no effect.\nwaitTime: float.\nReturn: int, the number of text units actually moved.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-move", "id": "f1782:c67:m13"} {"signature": "def Move(self, x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Move(x, y) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTransformPattern::Move.\nMove the UI Automation element.\nx: int.\ny: int.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-move", "id": "f1782:c73:m4"} {"signature": "def GetGrabbedItems(self) -> list:", "body": "eleArray = self.pattern.GetCurrentGrabbedItems()if eleArray:controls = []for i in range(eleArray.Length):ele = eleArray.GetElement(i)con = Control.CreateControlFromElement(element=ele)if con:controls.append(con)return controlsreturn []", "docstring": "Call IUIAutomationDragPattern::GetCurrentGrabbedItems.\nReturn list, a list of `Control` subclasses that represent the full set of items\n that the user is dragging as part of a drag operation.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-getcurrentgrabbeditems", "id": "f1782:c46:m4"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationscrollitempattern", "id": "f1782:c57:m0"} {"signature": "@propertydef CanMaximize(self) -> bool:", "body": "return bool(self.pattern.CurrentCanMaximize)", "docstring": "Property CanMaximize.\nCall IUIAutomationWindowPattern::get_CurrentCanMaximize.\nReturn bool, indicates whether the window can be maximized.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentcanmaximize", "id": "f1782:c77:m2"} {"signature": "def WalkTree(top, getChildren: Callable = None, getFirstChild: Callable = None, getNextSibling: Callable = None, yieldCondition: Callable = None, includeTop: bool = False, maxDepth: int = ):", "body": "if maxDepth <= :returndepth = if getChildren:if includeTop:if not yieldCondition or yieldCondition(top, ):yield top, , children = getChildren(top)childList = [children]while depth >= : lastItems = childList[-]if lastItems:if not yieldCondition or yieldCondition(lastItems[], depth + ):yield lastItems[], depth + , len(lastItems) - if depth + < maxDepth:children = getChildren(lastItems[])if children:depth += childList.append(children)del lastItems[]else:del childList[depth]depth -= elif getFirstChild and getNextSibling:if includeTop:if not yieldCondition or yieldCondition(top, ):yield top, child = getFirstChild(top)childList = [child]while depth >= : lastItem = childList[-]if lastItem:if not yieldCondition or yieldCondition(lastItem, depth + ):yield lastItem, depth + child = getNextSibling(lastItem)childList[depth] = childif depth + < maxDepth:child = getFirstChild(lastItem)if child:depth += childList.append(child)else:del childList[depth]depth -= ", "docstring": "Walk a tree not using recursive algorithm.\ntop: a tree node.\ngetChildren: function(treeNode) -> list.\ngetNextSibling: function(treeNode) -> treeNode.\ngetNextSibling: function(treeNode) -> treeNode.\nyieldCondition: function(treeNode, depth) -> bool.\nincludeTop: bool, if True yield top first.\nmaxDepth: int, enum depth.\n\nIf getChildren is valid, ignore getFirstChild and getNextSibling,\n yield 3 items tuple: (treeNode, depth, remain children count in current depth).\nIf getChildren is not valid, using getFirstChild and getNextSibling,\n yield 2 items tuple: (treeNode, depth).\nIf yieldCondition is not None, only yield tree nodes that yieldCondition(treeNode, depth)->bool returns True.\n\nFor example:\ndef GetDirChildren(dir_):\n if os.path.isdir(dir_):\n return [os.path.join(dir_, it) for it in os.listdir(dir_)]\nfor it, depth, leftCount in WalkTree('D:\\\\', getChildren= GetDirChildren):\n print(it, depth, leftCount)", "id": "f1782:m70"} {"signature": "@propertydef ExpandCollapseState(self) -> int:", "body": "return self.pattern.CurrentExpandCollapseState", "docstring": "Property ExpandCollapseState.\nCall IUIAutomationExpandCollapsePattern::get_CurrentExpandCollapseState.\nReturn int, a value in class ExpandCollapseState.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationexpandcollapsepattern-get_currentexpandcollapsestate", "id": "f1782:c48:m1"} {"signature": "def GetValuePattern(self) -> ValuePattern:", "body": "return self.GetPattern(PatternId.ValuePattern)", "docstring": "Return `ValuePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c107:m3"} {"signature": "def GetLastChildControl(self) -> '':", "body": "ele = _AutomationClient.instance().ViewWalker.GetLastChildElement(self.Element)return Control.CreateControlFromElement(ele)", "docstring": "Return `Control` subclass or None.", "id": "f1782:c78:m52"} {"signature": "def GetSupportedViews(self) -> list:", "body": "return self.pattern.GetCurrentSupportedViews()", "docstring": "Call IUIAutomationMultipleViewPattern::GetCurrentSupportedViews, todo.\nReturn list, a list of int, control-specific view identifiers.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationmultipleviewpattern-getcurrentsupportedviews", "id": "f1782:c54:m2"} {"signature": "@propertydef ContainingGrid(self) -> '':", "body": "return Control.CreateControlFromElement(self.pattern.CurrentContainingGrid)", "docstring": "Property ContainingGrid.\nCall IUIAutomationGridItemPattern::get_CurrentContainingGrid.\nReturn `Control` subclass, the element that contains the grid item.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentcontaininggrid", "id": "f1782:c49:m3"} {"signature": "def RangeFromChild(self, child) -> TextRange:", "body": "textRange = self.pattern.RangeFromChild(Control.Element)if textRange:return TextRange(textRange=textRange)", "docstring": "Call IUIAutomationTextPattern::RangeFromChild.\nchild: `Control` or its subclass.\nReturn `TextRange` or None, a text range enclosing a child element such as an image,\n hyperlink, Microsoft Excel spreadsheet, or other embedded object.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefromchild", "id": "f1782:c70:m5"} {"signature": "@propertydef Minimum(self) -> float:", "body": "return self.pattern.CurrentMinimum", "docstring": "Property Minimum.\nCall IUIAutomationRangeValuePattern::get_CurrentMinimum.\nReturn float, the minimum value of the control.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentminimum", "id": "f1782:c56:m4"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationspreadsheetpattern", "id": "f1782:c62:m0"} {"signature": "def GetSelectionPattern(self) -> SelectionPattern:", "body": "return self.GetPattern(PatternId.SelectionPattern)", "docstring": "Return `SelectionPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c94:m4"} {"signature": "@propertydef AriaProperties(self) -> str:", "body": "return self.Element.CurrentAriaProperties", "docstring": "Property AriaProperties.\nCall IUIAutomationElement::get_CurrentAriaProperties.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentariaproperties", "id": "f1782:c78:m12"} {"signature": "def Resize(self, width: int, height: int, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Resize(width, height) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTransformPattern::Resize.\nResize the UI Automation element.\nwidth: int.\nheight: int.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-resize", "id": "f1782:c73:m5"} {"signature": "@staticmethoddef Write(log: Any, consoleColor: int = ConsoleColor.Default, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None, printTruncateLen: int = ) -> None:", "body": "if not isinstance(log, str):log = str(log)if printToStdout and sys.stdout:isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)if isValidColor:SetConsoleColor(consoleColor)try:if printTruncateLen > and len(log) > printTruncateLen:sys.stdout.write(log[:printTruncateLen] + '')else:sys.stdout.write(log)except Exception as ex:SetConsoleColor(ConsoleColor.Red)isValidColor = Truesys.stdout.write(ex.__class__.__name__ + '')if log.endswith(''):sys.stdout.write('')if isValidColor:ResetConsoleColor()sys.stdout.flush()if not writeToFile:returnfileName = logFile if logFile else Logger.FileNametry:fout = open(fileName, '', encoding='')fout.write(log)except Exception as ex:if sys.stdout:sys.stdout.write(ex.__class__.__name__ + '')finally:if fout:fout.close()", "docstring": "log: any type.\nconsoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`.\nwriteToFile: bool.\nprintToStdout: bool.\nlogFile: str, log file path.\nprintTruncateLen: int, if <= 0, log is not truncated when print.", "id": "f1782:c41:m1"} {"signature": "def GetRangeValuePattern(self) -> RangeValuePattern:", "body": "return self.GetPattern(PatternId.RangeValuePattern)", "docstring": "Return `RangeValuePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c103:m1"} {"signature": "def Select(self, flagsSelect: int, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Select(flagsSelect) == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationLegacyIAccessiblePattern::Select.\nPerform a Microsoft Active Accessibility selection.\nflagsSelect: int, a value in `AccessibleSelection`.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-select", "id": "f1782:c53:m13"} {"signature": "def GetTransformPattern(self) -> TransformPattern:", "body": "return self.GetPattern(PatternId.TransformPattern)", "docstring": "Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c90:m1"} {"signature": "def MetroClose(self, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "if self.ClassName == METRO_WINDOW_CLASS_NAME:screenWidth, screenHeight = GetScreenSize()MoveTo(screenWidth // , , waitTime=)DragDrop(screenWidth // , , screenWidth // , screenHeight, waitTime=waitTime)else:Logger.WriteLine('', ConsoleColor.Yellow)", "docstring": "Only work on Windows 8/8.1, if current window is Metro UI.\nwaitTime: float.", "id": "f1782:c120:m4"} {"signature": "def GetGridItemPattern(self) -> GridItemPattern:", "body": "return self.GetPattern(PatternId.GridItemPattern)", "docstring": "Return `GridItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c93:m1"} {"signature": "def ControlsAreSame(control1: Control, control2: Control) -> bool:", "body": "return bool(_AutomationClient.instance().IUIAutomation.CompareElements(control1.Element, control2.Element))", "docstring": "control1: `Control` or its subclass.\ncontrol2: `Control` or its subclass.\nReturn bool, True if control1 and control2 represent the same control otherwise False.", "id": "f1782:m80"} {"signature": "@propertydef Value(self) -> float:", "body": "return self.pattern.CurrentValue", "docstring": "Property Value.\nCall IUIAutomationRangeValuePattern::get_CurrentValue.\nReturn float, the value of the control.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentvalue", "id": "f1782:c56:m6"} {"signature": "@propertydef IsGrabbed(self) -> bool:", "body": "return bool(self.pattern.CurrentIsGrabbed)", "docstring": "Property IsGrabbed.\nCall IUIAutomationDragPattern::get_CurrentIsGrabbed.\nReturn bool, indicates whether the user has grabbed this element as part of a drag-and-drop operation.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentisgrabbed", "id": "f1782:c46:m3"} {"signature": "@propertydef HorizontalScrollPercent(self) -> float:", "body": "return self.pattern.CurrentHorizontalScrollPercent", "docstring": "Property HorizontalScrollPercent.\nCall IUIAutomationScrollPattern::get_CurrentHorizontalScrollPercent.\nReturn float, the horizontal scroll position.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currenthorizontalscrollpercent", "id": "f1782:c58:m2"} {"signature": "@classmethoddef instance(cls) -> '':", "body": "if cls._instance is None:cls._instance = cls()return cls._instance", "docstring": "Singleton instance (this prevents com creation on import).", "id": "f1782:c0:m0"} {"signature": "def Maximize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "if self.IsTopLevel():return self.ShowWindow(SW.ShowMaximized, waitTime)return False", "docstring": "Set top level window maximize.", "id": "f1782:c99:m3"} {"signature": "@propertydef DefaultAction(self) -> str:", "body": "return self.pattern.CurrentDefaultAction", "docstring": "Property DefaultAction.\nCall IUIAutomationLegacyIAccessiblePattern::get_CurrentDefaultAction.\nReturn str, the Microsoft Active Accessibility current default action for the element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentdefaultaction", "id": "f1782:c53:m2"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationdragpattern", "id": "f1782:c46:m0"} {"signature": "def ExpandToEnclosingUnit(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.textRange.ExpandToEnclosingUnit() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTextRange::ExpandToEnclosingUnit.\nNormalize the text range by the specified text unit.\n The range is expanded if it is smaller than the specified unit,\n or shortened if it is longer than the specified unit.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-expandtoenclosingunit", "id": "f1782:c67:m5"} {"signature": "@propertydef Description(self) -> str:", "body": "return self.pattern.CurrentDescription", "docstring": "Property Description.\nCall IUIAutomationLegacyIAccessiblePattern::get_CurrentDescription.\nReturn str, the Microsoft Active Accessibility description of the element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-get_currentdescription", "id": "f1782:c53:m3"} {"signature": "@propertydef HorizontalViewSize(self) -> float:", "body": "return self.pattern.CurrentHorizontalViewSize", "docstring": "Property HorizontalViewSize.\nCall IUIAutomationScrollPattern::get_CurrentHorizontalViewSize.\nReturn float, the horizontal size of the viewable region of a scrollable element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currenthorizontalviewsize", "id": "f1782:c58:m3"} {"signature": "def GetExpandCollapsePattern(self) -> ExpandCollapsePattern:", "body": "return self.GetPattern(PatternId.ExpandCollapsePattern)", "docstring": "Return `ExpandCollapsePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c86:m2"} {"signature": "def GetTransformPattern(self) -> TransformPattern:", "body": "return self.GetPattern(PatternId.TransformPattern)", "docstring": "Return `TransformPattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c120:m1"} {"signature": "@propertydef IsPassword(self) -> bool:", "body": "return self.Element.CurrentIsPassword", "docstring": "Property IsPassword.\nCall IUIAutomationElement::get_CurrentIsPassword.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentispassword", "id": "f1782:c78:m28"} {"signature": "def GetConsoleOriginalTitle() -> str:", "body": "if IsNT6orHigher:arrayType = ctypes.c_wchar * MAX_PATHvalues = arrayType()ctypes.windll.kernel32.GetConsoleOriginalTitleW(values, MAX_PATH)return values.valueelse:raise RuntimeError('')", "docstring": "GetConsoleOriginalTitle from Win32.\nReturn str.\nOnly available on Windows Vista or higher.", "id": "f1782:m44"} {"signature": "@propertydef ClassName(self) -> str:", "body": "return self.Element.CurrentClassName", "docstring": "Property ClassName.\nCall IUIAutomationElement::get_CurrentClassName.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentclassname", "id": "f1782:c78:m16"} {"signature": "def SendInput(*inputs) -> int:", "body": "nInputs = len(inputs)LPINPUT = INPUT * nInputspInputs = LPINPUT(*inputs)cbSize = ctypes.c_int(ctypes.sizeof(INPUT))return ctypes.windll.user32.SendInput(nInputs, pInputs, cbSize)", "docstring": "SendInput from Win32.\ninput: `INPUT`.\nReturn int, the number of events that it successfully inserted into the keyboard or mouse input stream.\n If the function returns zero, the input was already blocked by another thread.", "id": "f1782:m61"} {"signature": "def GetItemByName(self, name: str) -> '':", "body": "ele = self.pattern.GetItemByName(name)return Control.CreateControlFromElement(element=ele)", "docstring": "Call IUIAutomationSpreadsheetPattern::GetItemByName.\nname: str.\nReturn `Control` subclass or None, represents the spreadsheet cell that has the specified name..\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationspreadsheetpattern-getitembyname", "id": "f1782:c62:m1"} {"signature": "def PressMouse(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "SetCursorPos(x, y)screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.LeftDown | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Press left mouse.\nx: int.\ny: int.\nwaitTime: float.", "id": "f1782:m15"} {"signature": "@propertydef DropEffects(self) -> list:", "body": "return self.pattern.CurrentDropEffects", "docstring": "Property DropEffects.\nCall IUIAutomationDragPattern::get_CurrentDropEffects, todo SAFEARRAY.\nReturn list, a list of localized strings that enumerate the full set of effects\n that can happen when this element as part of a drag-and-drop operation.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationdragpattern-get_currentdropeffects", "id": "f1782:c46:m2"} {"signature": "def GetTablePattern(self) -> TablePattern:", "body": "return self.GetPattern(PatternId.TablePattern)", "docstring": "Return `TablePattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c81:m2"} {"signature": "@propertydef AccessKey(self) -> str:", "body": "return self.Element.CurrentAccessKey", "docstring": "Property AccessKey.\nCall IUIAutomationElement::get_CurrentAccessKey.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentaccesskey", "id": "f1782:c78:m11"} {"signature": "def GetTransformPattern(self) -> TransformPattern:", "body": "return self.GetPattern(PatternId.TransformPattern)", "docstring": "Return `TransformPattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c114:m1"} {"signature": "def SetConsoleTitle(text: str) -> bool:", "body": "return bool(ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(text)))", "docstring": "SetConsoleTitle from Win32.\ntext: str.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m46"} {"signature": "@propertydef Row(self) -> int:", "body": "return self.pattern.CurrentRow", "docstring": "Property Row.\nCall IUIAutomationGridItemPattern::get_CurrentRow.\nReturn int, the zero-based index of the row that contains the grid item.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentrow", "id": "f1782:c49:m4"} {"signature": "def GetFocusedControl() -> Control:", "body": "return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.GetFocusedElement())", "docstring": "Return `Control` subclass.", "id": "f1782:m72"} {"signature": "def ControlFromHandle(handle: int) -> Control:", "body": "return Control.CreateControlFromElement(_AutomationClient.instance().IUIAutomation.ElementFromHandle(handle))", "docstring": "Call IUIAutomation.ElementFromHandle with a native handle.\nhandle: int, a native window handle.\nReturn `Control` subclass.", "id": "f1782:m79"} {"signature": "def Restore(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "if self.IsTopLevel():return self.ShowWindow(SW.Restore, waitTime)return False", "docstring": "Restore window to normal state.\nSimilar to SwitchToThisWindow.", "id": "f1782:c99:m7"} {"signature": "def GetSelectionPattern(self) -> SelectionPattern:", "body": "return self.GetPattern(PatternId.SelectionPattern)", "docstring": "Return `SelectionPattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c110:m1"} {"signature": "@propertydef VerticallyScrollable(self) -> bool:", "body": "return bool(self.pattern.CurrentVerticallyScrollable)", "docstring": "Property VerticallyScrollable.\nCall IUIAutomationScrollPattern::get_CurrentVerticallyScrollable.\nReturn bool, indicates whether the element can scroll vertically.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentverticallyscrollable", "id": "f1782:c58:m4"} {"signature": "def SetPixelColorsVertically(self, x: int, y: int, colors: Iterable) -> bool:", "body": "count = len(colors)arrayType = ctypes.c_uint32 * countvalues = arrayType(*colors)return _DllClient.instance().dll.BitmapSetPixelsVertically(ctypes.c_size_t(self._bitmap), x, y, values, count)", "docstring": "Set pixel colors form x,y vertically.\nx: int.\ny: int.\ncolors: Iterable, an iterable list of int color values in argb.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c42:m15"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtextpattern2", "id": "f1782:c71:m0"} {"signature": "def WheelUp(self, wheelTimes: int = , interval: float = , waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "x, y = GetCursorPos()self.SetFocus()self.MoveCursorToMyCenter(False)WheelUp(wheelTimes, interval, waitTime)SetCursorPos(x, y)", "docstring": "Make control have focus first, move cursor to center and mouse wheel up.\nwheelTimes: int.\ninterval: float.\nwaitTime: float.", "id": "f1782:c78:m68"} {"signature": "def WaitHotKeyReleased(hotkey: tuple) -> None:", "body": "mod = {ModifierKey.Alt: Keys.VK_MENU,ModifierKey.Control: Keys.VK_CONTROL,ModifierKey.Shift: Keys.VK_SHIFT,ModifierKey.Win: Keys.VK_LWIN}while True:time.sleep()if IsKeyPressed(hotkey[]):continuefor k, v in mod.items():if k & hotkey[]:if IsKeyPressed(v):breakelse:break", "docstring": "hotkey: tuple, two ints tuple(modifierKey, key)", "id": "f1782:m89"} {"signature": "def AddToSelection(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.textRange.AddToSelection() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationTextRange::AddToSelection.\nAdd the text range to the collection of selected text ranges in a control that supports multiple, disjoint spans of selected text.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-addtoselection", "id": "f1782:c67:m1"} {"signature": "def MiddleClick(x: int, y: int, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "SetCursorPos(x, y)screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.MiddleDown | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep()mouse_event(MouseEventFlag.MiddleUp | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Simulate mouse middle click at point x, y.\nx: int.\ny: int.\nwaitTime: float.", "id": "f1782:m13"} {"signature": "@propertydef DockPosition(self) -> int:", "body": "return self.pattern.CurrentDockPosition", "docstring": "Property DockPosition.\nCall IUIAutomationDockPattern::get_CurrentDockPosition.\nReturn int, a value in class `DockPosition`.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationdockpattern-get_currentdockposition", "id": "f1782:c45:m1"} {"signature": "@propertydef IsTopmost(self) -> bool:", "body": "return bool(self.pattern.CurrentIsTopmost)", "docstring": "Property IsTopmost.\nCall IUIAutomationWindowPattern::get_CurrentIsTopmost.\nReturn bool, indicates whether the window is the topmost element in the z-order.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentistopmost", "id": "f1782:c77:m5"} {"signature": "@propertydef Shape(self) -> str:", "body": "return self.pattern.CurrentShape", "docstring": "Property Shape.\nCall IUIAutomationStylesPattern::get_CurrentShape.\nReturn str, the shape of an element in a document.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationstylespattern-get_currentshape", "id": "f1782:c63:m4"} {"signature": "def GetPixelColor(self, x: int, y: int) -> int:", "body": "handle = self.NativeWindowHandleif handle:return GetPixelColor(x, y, handle)", "docstring": "Call native `GetPixelColor` if control has a valid native handle.\nUse `self.ToBitmap` if control doesn't have a valid native handle or you get many pixels.\nx: int, internal x position.\ny: int, internal y position.\nReturn int, a color value in bgr.\nr = bgr & 0x0000FF\ng = (bgr & 0x00FF00) >> 8\nb = (bgr & 0xFF0000) >> 16", "id": "f1782:c78:m77"} {"signature": "def ReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "x, y = GetCursorPos()screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.LeftUp | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Release left mouse.\nwaitTime: float.", "id": "f1782:m16"} {"signature": "@staticmethoddef CreateControlFromControl(control: '') -> '':", "body": "newControl = Control.CreateControlFromElement(control.Element)return newControl", "docstring": "Create a concreate `Control` from a control instance, copy it.\ncontrol: `Control` or its subclass.\nReturn a subclass of `Control`, an instance of the control's real type.\nFor example: if control's ControlType is EditControl, return an EditControl.", "id": "f1782:c78:m3"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtransformpattern2", "id": "f1782:c74:m0"} {"signature": "@propertydef ControlTypeName(self) -> str:", "body": "return ControlTypeNames[self.ControlType]", "docstring": "Property ControlTypeName.", "id": "f1782:c78:m46"} {"signature": "def GetTransformPattern(self) -> TransformPattern:", "body": "return self.GetPattern(PatternId.TransformPattern)", "docstring": "Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c91:m2"} {"signature": "def GetTogglePattern(self) -> TogglePattern:", "body": "return self.GetPattern(PatternId.TogglePattern)", "docstring": "Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c80:m3"} {"signature": "def GetRowHeaderItems(self) -> list:", "body": "eleArray = self.pattern.GetCurrentRowHeaderItems()if eleArray:controls = []for i in range(eleArray.Length):ele = eleArray.GetElement(i)con = Control.CreateControlFromElement(element=ele)if con:controls.append(con)return controlsreturn []", "docstring": "Call IUIAutomationTableItemPattern::GetCurrentRowHeaderItems.\nReturn list, a list of `Control` subclasses, the row headers associated with a table item or cell.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtableitempattern-getcurrentrowheaderitems", "id": "f1782:c65:m2"} {"signature": "def MoveWindow(self, x: int, y: int, width: int, height: int, repaint: bool = True) -> bool:", "body": "handle = self.NativeWindowHandleif handle:return MoveWindow(handle, x, y, width, height, int(repaint))return False", "docstring": "Call native MoveWindow if control has a valid native handle.\nx: int.\ny: int.\nwidth: int.\nheight: int.\nrepaint: bool.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c78:m72"} {"signature": "def GetPropertyValue(self, propertyId: int) -> Any:", "body": "return self.Element.GetCurrentPropertyValue(propertyId)", "docstring": "Call IUIAutomationElement::GetCurrentPropertyValue.\npropertyId: int, a value in class `PropertyId`.\nReturn Any, corresponding type according to propertyId.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpropertyvalue", "id": "f1782:c78:m41"} {"signature": "def GetExpandCollapsePattern(self) -> ExpandCollapsePattern:", "body": "return self.GetPattern(PatternId.ExpandCollapsePattern)", "docstring": "Return `ExpandCollapsePattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c119:m1"} {"signature": "@classmethoddef instance(cls) -> '':", "body": "if cls._instance is None:cls._instance = cls()return cls._instance", "docstring": "Singleton instance (this prevents com creation on import).", "id": "f1782:c1:m0"} {"signature": "def Cancel(self) -> bool:", "body": "return self.pattern.Cancel() == S_OK", "docstring": "Call IUIAutomationSynchronizedInputPattern::Cancel.\nCause the Microsoft UI Automation provider to stop listening for mouse or keyboard input.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationsynchronizedinputpattern-cancel", "id": "f1782:c64:m1"} {"signature": "def GetSelectionItemPattern(self) -> SelectionItemPattern:", "body": "return self.GetPattern(PatternId.SelectionItemPattern)", "docstring": "Return `SelectionItemPattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c86:m1"} {"signature": "def IsKeyPressed(key: int) -> bool:", "body": "state = ctypes.windll.user32.GetAsyncKeyState(key)return bool(state & )", "docstring": "key: int, a value in class `Keys`.\nReturn bool.", "id": "f1782:m56"} {"signature": "def ShowWindow(self, cmdShow: int, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "handle = self.NativeWindowHandleif not handle:control = selfwhile not handle:control = control.GetParentControl()handle = control.NativeWindowHandleif handle:ret = ShowWindow(handle, cmdShow)time.sleep(waitTime)return ret", "docstring": "Get a native handle from self or ancestors until valid and call native `ShowWindow` with cmdShow.\ncmdShow: int, a value in in class `SW`.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c78:m69"} {"signature": "@propertydef CanZoom(self) -> bool:", "body": "return bool(self.pattern.CurrentCanZoom)", "docstring": "Property CanZoom.\nCall IUIAutomationTransformPattern2::get_CurrentCanZoom.\nReturn bool, indicates whether the control supports zooming of its viewport.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern2-get_CurrentCanZoom", "id": "f1782:c74:m1"} {"signature": "@propertydef BoundingRectangle(self) -> Rect:", "body": "rect = self.Element.CurrentBoundingRectanglereturn Rect(rect.left, rect.top, rect.right, rect.bottom)", "docstring": "Property BoundingRectangle.\nCall IUIAutomationElement::get_CurrentBoundingRectangle.\nReturn `Rect`.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentboundingrectangle\n\nrect = control.BoundingRectangle\nprint(rect.left, rect.top, rect.right, rect.bottom, rect.width(), rect.height(), rect.xcenter(), rect.ycenter())", "id": "f1782:c78:m15"} {"signature": "def MiddleClick(self, x: int = None, y: int = None, ratioX: float = , ratioY: float = , simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove)if point:MiddleClick(point[], point[], waitTime)", "docstring": "x: int, if < 0, middle click self.BoundingRectangle.right + x, if not None, ignore ratioX.\ny: int, if < 0, middle click self.BoundingRectangle.bottom + y, if not None, ignore ratioY.\nratioX: float.\nratioY: float.\nsimulateMove: bool, if True, first move cursor to control smoothly.\nwaitTime: float.\n\nMiddleClick(), MiddleClick(ratioX=0.5, ratioY=0.5): middle click center.\nMiddleClick(10, 10): middle click left+10, top+10.\nMiddleClick(-10, -10): middle click right-10, bottom-10.", "id": "f1782:c78:m64"} {"signature": "def GetIAccessible(self):", "body": "return self.pattern.GetIAccessible()", "docstring": "Call IUIAutomationLegacyIAccessiblePattern::GetIAccessible, todo.\nReturn an IAccessible object that corresponds to the Microsoft UI Automation element.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationlegacyiaccessiblepattern-getiaccessible\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/oleacc/nn-oleacc-iaccessible", "id": "f1782:c53:m12"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationtexteditpattern", "id": "f1782:c69:m0"} {"signature": "def GetTableItemPattern(self) -> TableItemPattern:", "body": "return self.GetPattern(PatternId.TableItemPattern)", "docstring": "Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c86:m5"} {"signature": "@propertydef Maximum(self) -> float:", "body": "return self.pattern.CurrentMaximum", "docstring": "Property Maximum.\nCall IUIAutomationRangeValuePattern::get_CurrentMaximum.\nReturn float, the maximum value of the control.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationrangevaluepattern-get_currentmaximum", "id": "f1782:c56:m3"} {"signature": "def ShowWindow(handle: int, cmdShow: int) -> bool:", "body": "return ctypes.windll.user32.ShowWindow(ctypes.c_void_p(handle), cmdShow)", "docstring": "ShowWindow from Win32.\nhandle: int, the handle of a native window.\ncmdShow: int, a value in clas `SW`.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m37"} {"signature": "def GetTopLevelControl(self) -> '':", "body": "handle = self.NativeWindowHandleif handle:topHandle = GetAncestor(handle, GAFlag.Root)if topHandle:if topHandle == handle:return selfelse:return ControlFromHandle(topHandle)else:passelse:control = selfwhile True:control = control.GetParentControl()handle = control.NativeWindowHandleif handle:topHandle = GetAncestor(handle, GAFlag.Root)return ControlFromHandle(topHandle)", "docstring": "Get the top level control which current control lays.\nIf current control is top level, return self.\nIf current control is root control, return None.\nReturn `PaneControl` or `WindowControl` or None.", "id": "f1782:c78:m81"} {"signature": "@staticmethoddef WriteLine(log: Any, consoleColor: int = -, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None:", "body": "Logger.Write(''.format(log), consoleColor, writeToFile, printToStdout, logFile)", "docstring": "log: any type.\nconsoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`.\nwriteToFile: bool.\nprintToStdout: bool.\nlogFile: str, log file path.", "id": "f1782:c41:m2"} {"signature": "def SetConsoleColor(color: int) -> bool:", "body": "global _ConsoleOutputHandleglobal _DefaultConsoleColorif not _DefaultConsoleColor:if not _ConsoleOutputHandle:_ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(_StdOutputHandle)bufferInfo = ConsoleScreenBufferInfo()ctypes.windll.kernel32.GetConsoleScreenBufferInfo(_ConsoleOutputHandle, ctypes.byref(bufferInfo))_DefaultConsoleColor = int(bufferInfo.wAttributes & )if sys.stdout:sys.stdout.flush()bool(ctypes.windll.kernel32.SetConsoleTextAttribute(_ConsoleOutputHandle, color))", "docstring": "Change the text color on console window.\ncolor: int, a value in class `ConsoleColor`.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m2"} {"signature": "def PlayWaveFile(filePath: str = r'', isAsync: bool = False, isLoop: bool = False) -> bool:", "body": "if filePath:SND_ASYNC = SND_NODEFAULT = SND_LOOP = SND_FILENAME = flags = SND_NODEFAULT | SND_FILENAMEif isAsync:flags |= SND_ASYNCif isLoop:flags |= SND_LOOPflags |= SND_ASYNCreturn bool(ctypes.windll.winmm.PlaySoundW(ctypes.c_wchar_p(filePath), ctypes.c_void_p(), flags))else:return bool(ctypes.windll.winmm.PlaySoundW(ctypes.c_wchar_p(), ctypes.c_void_p(), ))", "docstring": "Call PlaySound from Win32.\nfilePath: str, if emtpy, stop playing the current sound.\nisAsync: bool, if True, the sound is played asynchronously and returns immediately.\nisLoop: bool, if True, the sound plays repeatedly until PlayWaveFile(None) is called again, must also set isAsync to True.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m49"} {"signature": "def GetAnnotationTypes(self) -> list:", "body": "return self.pattern.GetCurrentAnnotationTypes()", "docstring": "Call IUIAutomationSelectionPattern::GetCurrentAnnotationTypes.\nReturn list, a list of int values in class `AnnotationType`,\n indicating the types of annotations that are associated with this spreadsheet cell.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-getcurrentannotationtypes", "id": "f1782:c61:m3"} {"signature": "@propertydef Column(self) -> int:", "body": "return self.pattern.CurrentColumn", "docstring": "Property Column.\nCall IUIAutomationGridItemPattern::get_CurrentColumn.\nReturn int, the zero-based index of the column that contains the item.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationgriditempattern-get_currentcolumn", "id": "f1782:c49:m1"} {"signature": "def GetPattern(self, patternId: int):", "body": "try:pattern = self.Element.GetCurrentPattern(patternId)if pattern:subPattern = CreatePattern(patternId, pattern)self._supportedPatterns[patternId] = subPatternreturn subPatternexcept comtypes.COMError as ex:pass", "docstring": "Call IUIAutomationElement::GetCurrentPattern.\nGet a new pattern by pattern id if it supports the pattern.\npatternId: int, a value in class `PatternId`.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-getcurrentpattern", "id": "f1782:c78:m39"} {"signature": "def SetWindowPos(handle: int, hWndInsertAfter: int, x: int, y: int, width: int, height: int, flags: int) -> bool:", "body": "return ctypes.windll.user32.SetWindowPos(ctypes.c_void_p(handle), ctypes.c_void_p(hWndInsertAfter), x, y, width, height, flags)", "docstring": "SetWindowPos from Win32.\nhandle: int, the handle of a native window.\nhWndInsertAfter: int, a value whose name starts with 'HWND' in class SWP.\nx: int.\ny: int.\nwidth: int.\nheight: int.\nflags: int, values whose name starts with 'SWP' in class `SWP`.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m39"} {"signature": "def GetTransformPattern(self) -> TransformPattern:", "body": "return self.GetPattern(PatternId.TransformPattern)", "docstring": "Return `TransformPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c100:m3"} {"signature": "def GetColumnHeaderItems(self) -> list:", "body": "eleArray = self.pattern.GetCurrentColumnHeaderItems()if eleArray:controls = []for i in range(eleArray.Length):ele = eleArray.GetElement(i)con = Control.CreateControlFromElement(element=ele)if con:controls.append(con)return controlsreturn []", "docstring": "Call IUIAutomationTableItemPattern::GetCurrentColumnHeaderItems.\nReturn list, a list of `Control` subclasses, the column headers associated with a table item or cell.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtableitempattern-getcurrentcolumnheaderitems", "id": "f1782:c65:m1"} {"signature": "@propertydef RowCount(self) -> int:", "body": "return self.pattern.CurrentRowCount", "docstring": "Property RowCount.\nCall IUIAutomationGridPattern::get_CurrentRowCount.\nReturn int, the number of rows in the grid.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationgridpattern-get_currentrowcount", "id": "f1782:c50:m2"} {"signature": "@propertydef CanMinimize(self) -> bool:", "body": "return bool(self.pattern.CurrentCanMinimize)", "docstring": "Property CanMinimize.\nCall IUIAutomationWindowPattern::get_CurrentCanMinimize.\nReturn bool, indicates whether the window can be minimized.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationwindowpattern-get_currentismodal", "id": "f1782:c77:m3"} {"signature": "def RightDragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float = , waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "RightPressMouse(x1, y1, )MoveTo(x2, y2, moveSpeed, )RightReleaseMouse(waitTime)", "docstring": "Simulate mouse right button drag from point x1, y1 drop to point x2, y2.\nx1: int.\ny1: int.\nx2: int.\ny2: int.\nmoveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster.\nwaitTime: float.", "id": "f1782:m21"} {"signature": "def EnumAndLogControlAncestors(control: Control, showAllName: bool = True) -> None:", "body": "lists = []while control:lists.insert(, control)control = control.GetParentControl()for i, control in enumerate(lists):LogControl(control, i, showAllName)", "docstring": "Print and log control and its ancestors' propertyies.\ncontrol: `Control` or its subclass.\nshowAllName: bool, if False, print the first 30 characters of control.Name.", "id": "f1782:m84"} {"signature": "def FindItemByProperty(control: '', propertyId: int, propertyValue) -> '':", "body": "ele = self.pattern.FindItemByProperty(control.Element, propertyId, propertyValue)return Control.CreateControlFromElement(ele)", "docstring": "Call IUIAutomationItemContainerPattern::FindItemByProperty.\ncontrol: `Control` or its subclass.\npropertyValue: COM VARIANT according to propertyId? todo.\npropertyId: int, a value in class `PropertyId`.\nReturn `Control` subclass, a control within a containing element, based on a specified property value.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationitemcontainerpattern-finditembyproperty", "id": "f1782:c52:m1"} {"signature": "def GetTogglePattern(self) -> TogglePattern:", "body": "return self.GetPattern(PatternId.TogglePattern)", "docstring": "Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c86:m6"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationgridpattern", "id": "f1782:c50:m0"} {"signature": "def RangeFromPoint(self, x: int, y: int) -> TextRange:", "body": "textRange = self.pattern.RangeFromPoint(ctypes.wintypes.POINT(x, y))if textRange:return TextRange(textRange=textRange)", "docstring": "Call IUIAutomationTextPattern::RangeFromPoint.\nchild: `Control` or its subclass.\nReturn `TextRange` or None, the degenerate (empty) text range nearest to the specified screen coordinates.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefrompoint", "id": "f1782:c70:m6"} {"signature": "def SetClipboardText(text: str) -> bool:", "body": "if ctypes.windll.user32.OpenClipboard():ctypes.windll.user32.EmptyClipboard()textByteLen = (len(text) + ) * hClipboardData = ctypes.windll.kernel32.GlobalAlloc(, textByteLen) hDestText = ctypes.windll.kernel32.GlobalLock(hClipboardData)ctypes.cdll.msvcrt.wcsncpy(ctypes.c_wchar_p(hDestText), ctypes.c_wchar_p(text), textByteLen // )ctypes.windll.kernel32.GlobalUnlock(hClipboardData)ctypes.windll.user32.SetClipboardData(, hClipboardData) ctypes.windll.user32.CloseClipboard()return Truereturn False", "docstring": "Return bool, True if succeed otherwise False.", "id": "f1782:m1"} {"signature": "def GetAncestorControl(self, condition: Callable) -> '':", "body": "ancestor = selfdepth = while True:ancestor = ancestor.GetParentControl()depth -= if ancestor:if condition(ancestor, depth):return ancestorelse:break", "docstring": "Get a ancestor control that matches the condition.\ncondition: Callable, function (control: Control, depth: int)->bool,\n depth starts with -1 and decreses when search goes up.\nReturn `Control` subclass or None.", "id": "f1782:c78:m49"} {"signature": "def RightReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "x, y = GetCursorPos()screenWidth, screenHeight = GetScreenSize()mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * // screenWidth, y * // screenHeight, , )time.sleep(waitTime)", "docstring": "Release right mouse.\nwaitTime: float.", "id": "f1782:m18"} {"signature": "@propertydef CanMove(self) -> bool:", "body": "return bool(self.pattern.CurrentCanMove)", "docstring": "Property CanMove.\nCall IUIAutomationTransformPattern::get_CurrentCanMove.\nReturn bool, indicates whether the element can be moved.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtransformpattern-get_currentcanmove", "id": "f1782:c73:m1"} {"signature": "@propertydef IsSelected(self) -> bool:", "body": "return bool(self.pattern.CurrentIsSelected)", "docstring": "Property IsSelected.\nCall IUIAutomationScrollPattern::get_CurrentIsSelected.\nReturn bool, indicates whether this item is selected.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationscrollpattern-get_currentisselected", "id": "f1782:c59:m2"} {"signature": "@propertydef IsKeyboardFocusable(self) -> bool:", "body": "return self.Element.CurrentIsKeyboardFocusable", "docstring": "Property IsKeyboardFocusable.\nCall IUIAutomationElement::get_CurrentIsKeyboardFocusable.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentiskeyboardfocusable", "id": "f1782:c78:m26"} {"signature": "@propertydef IsOffscreen(self) -> bool:", "body": "return self.Element.CurrentIsOffscreen", "docstring": "Property IsOffscreen.\nCall IUIAutomationElement::get_CurrentIsOffscreen.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentisoffscreen", "id": "f1782:c78:m27"} {"signature": "@propertydef Target(self) -> '':", "body": "ele = self.pattern.CurrentTargetreturn Control.CreateControlFromElement(ele)", "docstring": "Property Target.\nCall IUIAutomationAnnotationPattern::get_CurrentTarget.\nReturn `Control` subclass, the element that is being annotated.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationannotationpattern-get_currenttarget", "id": "f1782:c43:m5"} {"signature": "def AddSearchProperties(self, **searchProperties) -> None:", "body": "self.searchProperties.update(searchProperties)if '' in searchProperties:self.searchDepth = searchProperties['']if '' in searchProperties:regName = searchProperties['']self.regexName = re.compile(regName) if regName else None", "docstring": "Add search properties using `dict.update`.\nsearchProperties: dict, same as searchProperties in `Control.__init__`.", "id": "f1782:c78:m6"} {"signature": "def SwitchToThisWindow(handle: int) -> None:", "body": "ctypes.windll.user32.SwitchToThisWindow(ctypes.c_void_p(handle), )", "docstring": "SwitchToThisWindow from Win32.\nhandle: int, the handle of a native window.", "id": "f1782:m29"} {"signature": "def IsUserAnAdmin() -> bool:", "body": "return bool(ctypes.windll.shell32.IsUserAnAdmin())", "docstring": "IsUserAnAdmin from Win32.\nReturn bool.\nMinimum supported OS: Windows XP, Windows Server 2003", "id": "f1782:m51"} {"signature": "def Realize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Realize() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationVirtualizedItemPattern::Realize.\nCreate a full UI Automation element for a virtualized item.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationvirtualizeditempattern-realize", "id": "f1782:c76:m1"} {"signature": "def SendKey(self, key: int, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "self.SetFocus()SendKey(key, waitTime)", "docstring": "Make control have focus first and type a key.\n`self.SetFocus` may not work for some controls, you may need to click it to make it have focus.\nkey: int, a key code value in class Keys.\nwaitTime: float.", "id": "f1782:c78:m75"} {"signature": "def GetInvokePattern(self) -> InvokePattern:", "body": "return self.GetPattern(PatternId.InvokePattern)", "docstring": "Return `InvokePattern` if it supports the pattern else None(Must support according to MSDN).", "id": "f1782:c92:m1"} {"signature": "def SetActive(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "if self.IsTopLevel():handle = self.NativeWindowHandleif IsIconic(handle):ret = ShowWindow(handle, SW.Restore)elif not IsWindowVisible(handle):ret = ShowWindow(handle, SW.Show)ret = SetForegroundWindow(handle) time.sleep(waitTime)return retreturn False", "docstring": "Set top level window active.", "id": "f1782:c99:m9"} {"signature": "def SetPixelColor(self, x: int, y: int, argb: int) -> bool:", "body": "return _DllClient.instance().dll.BitmapSetPixel(self._bitmap, x, y, argb)", "docstring": "Set color value of a pixel.\nx: int.\ny: int.\nargb: int, color value.\nReturn bool, True if succeed otherwise False.", "id": "f1782:c42:m11"} {"signature": "def GetTableItemPattern(self) -> TableItemPattern:", "body": "return self.GetPattern(PatternId.TableItemPattern)", "docstring": "Return `TableItemPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c113:m2"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationrangevaluepattern", "id": "f1782:c56:m0"} {"signature": "def Click(self, x: int = None, y: int = None, ratioX: float = , ratioY: float = , simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None:", "body": "point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove)if point:Click(point[], point[], waitTime)", "docstring": "x: int, if < 0, click self.BoundingRectangle.right + x, if not None, ignore ratioX.\ny: int, if < 0, click self.BoundingRectangle.bottom + y, if not None, ignore ratioY.\nratioX: float.\nratioY: float.\nsimulateMove: bool, if True, first move cursor to control smoothly.\nwaitTime: float.\n\nClick(), Click(ratioX=0.5, ratioY=0.5): click center.\nClick(10, 10): click left+10, top+10.\nClick(-10, -10): click right-10, bottom-10.", "id": "f1782:c78:m63"} {"signature": "@propertydef CanSelectMultiple(self) -> bool:", "body": "return bool(self.pattern.CurrentCanSelectMultiple)", "docstring": "Property CanSelectMultiple.\nCall IUIAutomationSelectionPattern::get_CurrentCanSelectMultiple.\nReturn bool, indicates whether more than one item in the container can be selected at one time.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationselectionpattern-get_currentcanselectmultiple", "id": "f1782:c60:m1"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationselectionpattern", "id": "f1782:c60:m0"} {"signature": "@propertydef Element(self):", "body": "if not self._element:self.Refind(maxSearchSeconds=TIME_OUT_SECOND, searchIntervalSeconds=self.searchWaitTime)return self._element", "docstring": "Property Element.\nReturn `ctypes.POINTER(IUIAutomationElement)`.", "id": "f1782:c78:m45"} {"signature": "@staticmethoddef ColorfullyLog(log: str = '', consoleColor: int = -, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None:", "body": "t = datetime.datetime.now()frame = sys._getframe()log = ''.format(t.year, t.month, t.day,t.hour, t.minute, t.second, t.microsecond // , frame.f_code.co_name, frame.f_lineno, log)Logger.ColorfullyWrite(log, consoleColor, writeToFile, printToStdout, logFile)", "docstring": "log: any type.\nconsoleColor: int, a value in class ConsoleColor, such as ConsoleColor.DarkGreen.\nwriteToFile: bool.\nprintToStdout: bool.\nlogFile: str, log file path.\n\nColorfullyLog('Hello Green !!!'), color name must be in Logger.ColorNames", "id": "f1782:c41:m6"} {"signature": "def BringWindowToTop(handle: int) -> bool:", "body": "return bool(ctypes.windll.user32.BringWindowToTop(ctypes.c_void_p(handle)))", "docstring": "BringWindowToTop from Win32.\nhandle: int, the handle of a native window.\nReturn bool, True if succeed otherwise False.", "id": "f1782:m28"} {"signature": "@propertydef ControlType(self) -> int:", "body": "return self.Element.CurrentControlType", "docstring": "Property ControlType.\nReturn int, a value in class `ControlType`.\nCall IUIAutomationElement::get_CurrentControlType.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentcontroltype", "id": "f1782:c78:m17"} {"signature": "def Clone(self) -> '':", "body": "return TextRange(textRange=self.textRange.Clone())", "docstring": "Call IUIAutomationTextRange::Clone.\nreturn `TextRange`, identical to the original and inheriting all properties of the original.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-clone", "id": "f1782:c67:m2"} {"signature": "def GetScrollPattern(self) -> ScrollPattern:", "body": "return self.GetPattern(PatternId.ScrollPattern)", "docstring": "Return `ScrollPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c94:m3"} {"signature": "def GetTextPattern(self) -> TextPattern:", "body": "return self.GetPattern(PatternId.TextPattern)", "docstring": "Return `TextPattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c113:m3"} {"signature": "def GetTogglePattern(self) -> TogglePattern:", "body": "return self.GetPattern(PatternId.TogglePattern)", "docstring": "Return `TogglePattern` if it supports the pattern else None(Conditional support according to MSDN).", "id": "f1782:c98:m4"} {"signature": "@propertydef FrameworkId(self) -> str:", "body": "return self.Element.CurrentFrameworkId", "docstring": "Property FrameworkId.\nCall IUIAutomationElement::get_CurrentFrameworkId.\nReturn str, such as Win32, WPF...\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentframeworkid", "id": "f1782:c78:m19"} {"signature": "@propertydef Name(self) -> str:", "body": "return self.Element.CurrentName or ''", "docstring": "Property Name.\nCall IUIAutomationElement::get_CurrentName.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentname", "id": "f1782:c78:m33"} {"signature": "def GetDoubleClickTime() -> int:", "body": "return ctypes.windll.user32.GetDoubleClickTime()", "docstring": "GetDoubleClickTime from Win32.\nReturn int, in milliseconds.", "id": "f1782:m7"} {"signature": "def SetDockPosition(self, dockPosition: int, waitTime: float = OPERATION_WAIT_TIME) -> int:", "body": "ret = self.pattern.SetDockPosition(dockPosition)time.sleep(waitTime)return ret", "docstring": "Call IUIAutomationDockPattern::SetDockPosition.\ndockPosition: int, a value in class `DockPosition`.\nwaitTime: float.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationdockpattern-setdockposition", "id": "f1782:c45:m2"} {"signature": "def __init__(self, pattern=None):", "body": "self.pattern = pattern", "docstring": "Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nn-uiautomationclient-iuiautomationscrollpattern", "id": "f1782:c58:m0"} {"signature": "@propertydef Culture(self) -> int:", "body": "return self.Element.CurrentCulture", "docstring": "Property Culture.\nCall IUIAutomationElement::get_CurrentCulture.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationelement-get_currentculture", "id": "f1782:c78:m18"} {"signature": "def Collapse(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:", "body": "ret = self.pattern.Collapse() == S_OKtime.sleep(waitTime)return ret", "docstring": "Call IUIAutomationExpandCollapsePattern::Collapse.\nwaitTime: float.\nReturn bool, True if succeed otherwise False.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationexpandcollapsepattern-collapse", "id": "f1782:c48:m2"} {"signature": "@propertydef TextContainer(self) -> '':", "body": "return Control.CreateControlFromElement(self.pattern.TextContainer)", "docstring": "Property TextContainer.\nCall IUIAutomationSelectionContainer::get_TextContainer.\nReturn `Control` subclass, the nearest ancestor element that supports the Text control pattern.\nRefer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextchildpattern-get_textcontainer", "id": "f1782:c68:m1"} {"signature": "def format(self, record):", "body": "data = record._raw.copy()data[''] = data[''].isoformat()if data.get(''):data[''] = self.formatException(data[''])return json.dumps(data)", "docstring": "JSON-encode a record for serializing through redis.\n\nConvert date to iso format, and stringify any exceptions.", "id": "f1786:c0:m0"} {"signature": "def emit(self, record):", "body": "try:if self.max_messages:p = self.redis_client.pipeline()p.rpush(self.key, self.format(record))p.ltrim(self.key, -self.max_messages, -)p.execute()else:self.redis_client.rpush(self.key, self.format(record))except redis.RedisError:pass", "docstring": "Publish record to redis logging list", "id": "f1786:c2:m2"} {"signature": "def _getCallingContext():", "body": "frames = inspect.stack()if len(frames) > :context = frames[]else:context = frames[]modname = context[]lineno = context[]if context[]:funcname = context[]else:funcname = \"\"del contextdel framesreturn modname, funcname, lineno", "docstring": "Utility function for the RedisLogRecord.\n\nReturns the module, function, and lineno of the function \nthat called the logger. \n\nWe look way up in the stack. The stack at this point is:\n[0] logger.py _getCallingContext (hey, that's me!)\n[1] logger.py __init__\n[2] logger.py makeRecord\n[3] _log\n[4] \n[5] caller of logging method", "id": "f1787:m1"} {"signature": "def _unicode(string):", "body": "for encoding in ['', '']:try:result = unicode(string, encoding)return resultexcept UnicodeDecodeError:passresult = unicode(string, '', '')return result", "docstring": "Try to convert a string to unicode using different encodings", "id": "f1791:m2"} {"signature": "def request(key, features, query, timeout=):", "body": "data = {}data[''] = keydata[''] = ''.join([f for f in features if f in FEATURES])data[''] = quote(query)data[''] = ''r = requests.get(API_URL.format(**data), timeout=timeout)results = json.loads(_unicode(r.content))return results", "docstring": "Make an API request\n\n :param string key: API key to use\n :param list features: features to request. It must be a subset of :data:`FEATURES`\n :param string query: query to send\n :param integer timeout: timeout of the request\n :returns: result of the API request\n :rtype: dict", "id": "f1791:m1"} {"signature": "def satisfied_by_checked(self, req):", "body": "req_man = RequirementsManager([req])return any(req_man.check(*checked) for checked in self.checked)", "docstring": "Check if requirement is already satisfied by what was previously checked\n\n:param Requirement req: Requirement to check", "id": "f1794:c2:m8"} {"signature": "def update_requirements(self):", "body": "raise NotImplementedError", "docstring": "Update/persist requirements from `self.bumps`", "id": "f1794:c4:m7"} {"signature": "@classmethoddef parse(cls, s, required=False):", "body": "req = pkg_resources.Requirement.parse(s)return cls(req, required=required)", "docstring": "Parse string to create an instance\n\n:param str s: String with requirement to parse\n:param bool required: Is this requirement required to be fulfilled? If not, then it is a filter.", "id": "f1794:c1:m1"} {"signature": "@classmethoddef bump_message(self, bumps, include_changes=False):", "body": "raise NotImplementedError", "docstring": "Compose a bump message for the given bumps\n\n:param list bumps: List of :class:`Bump` instances\n:param bool include_changes: Indicate if the message should include detailed changes.", "id": "f1794:c4:m5"} {"signature": "def reverse(self):", "body": "if self._original_target_content:with open(self.target, '') as fp:fp.write(self._original_target_content)", "docstring": "Restore content in target file to be before any changes", "id": "f1794:c4:m16"} {"signature": "def requirements(self):", "body": "raise NotImplementedError", "docstring": "Return a list of existing requirements (as :class:`pkg_resources.Requirement`)", "id": "f1794:c4:m6"} {"signature": "def bump(self, filter_requirements, required=False, show_summary=True, show_detail=False, **kwargs):", "body": "found_targets = [target for target in self.targets if os.path.exists(target)]if not found_targets:raise BumpAccident('' % ''.join(self.targets))bump_reqs = RequirementsManager()if filter_requirements:requirements = parse_requirements(filter_requirements)bump_reqs.add(requirements, required=required)try:for target in found_targets:log.debug('', target)target_bumpers = []target_bump_reqs = RequirementsManager(bump_reqs)loops = while True:loops += if loops > :log.debug('')breakif not target_bumpers:target_bumpers = [model(target, detail=self.detail, test_drive=self.test_drive)for model in self.bumper_models if model.likes(target)]if not target_bumpers:log.debug('', target, self.default_model)target_bumpers = [self.default_model(target, detail=self.detail,test_drive=self.test_drive)]self.bumpers.extend(target_bumpers)new_target_bump_reqs = RequirementsManager()for bumper in target_bumpers:target_bumps = bumper.bump(target_bump_reqs)self.bumps.update(dict((b.name, b) for b in target_bumps))for bump in target_bumps:for new_req in bump.requirements:if not (bump_reqs.satisfied_by_checked(new_req) ortarget_bump_reqs.satisfied_by_checked(new_req)):new_target_bump_reqs.add(new_req)bump_reqs.matched_name |= target_bump_reqs.matched_namebump_reqs.checked.extend(target_bump_reqs.checked)if new_target_bump_reqs:bump_reqs.add(new_target_bump_reqs)target_bump_reqs = RequirementsManager(list(r for r in new_target_bump_reqs if r.project_name not in self.bumps))if not target_bump_reqs:breakif not self.bumpers:raise BumpAccident('' % ''.join(found_targets))if bump_reqs and not bump_reqs.matched_name:raise BumpAccident('' % ''.join(found_targets))if self.bumps:for bump in self.bumps.values():bump_reqs.check(bump)for reqs in bump_reqs.required_requirements().values():for req in reqs:if not self.full_throttle:use_force = '' if req.required_by else ''raise BumpAccident('''' % (req, use_force))if self.test_drive:log.info(\"\")messages = {}for bumper in self.bumpers:if bumper.bumps:if not self.test_drive:bumper.update_requirements()if self.test_drive or show_summary:msg = bumper.bump_message(self.test_drive or show_detail)if self.test_drive:print(msg)else:rewords = [('', ''), ('', ''),('', '')]for word, new_word in rewords:if msg.startswith(word):msg = msg.replace(word, new_word, )breaklog.info(msg)messages[bumper.target] = bumper.bump_message(True)return messages, self.bumpselse:log.info('')return {}, []except Exception:if not self.test_drive and self.bumps:map(lambda b: b.reverse(), self.bumpers)raise", "docstring": "Bump dependency requirements using filter.\n\n:param list filter_requirements: List of dependency filter requirements.\n:param bool required: Require the filter_requirements to be met (by adding if possible).\n:param bool show_summary: Show summary for each bump made.\n:param bool show_detail: Show detail for each bump made if available.\n:return: Tuple with two elements: Dict of target file to bump message, List of :class:`Bump`\n:raise BumpAccident: for any bump errors", "id": "f1795:c0:m1"} {"signature": "def reverse(self):", "body": "if not self.test_drive and self.bumps:map(lambda b: b.reverse(), self.bumpers)", "docstring": "Reverse all bumpers", "id": "f1795:c0:m2"} {"signature": "def bump():", "body": "parser = argparse.ArgumentParser(description=bump.__doc__)parser.add_argument('', nargs='', help=\"\"\"\"\"\")parser.add_argument('', '', action='',help='')parser.add_argument('', help='')parser.add_argument('', action='',help='')parser.add_argument('', '', '', action='',help='''')parser.add_argument('', '', action='', help='')parser.add_argument('', action='', help='')args = parser.parse_args()targets = [args.file] if args.file else ['', '']level = logging.DEBUG if args.debug else logging.INFOlogging.basicConfig(level=level, format='')try:bumper = BumperDriver(targets, full_throttle=args.force, detail=args.detail, test_drive=args.dry_run)bumper.bump(args.names, required=args.add, show_detail=args.detail)except Exception as e:if args.debug:raiseelse:log.error(e)sys.exit()", "docstring": "CLI entry point to bump requirements in requirements.txt or pinned.txt", "id": "f1795:m0"} {"signature": "def freeze(self):", "body": "data = super(IndexBuilder, self).freeze()try:base_file_names = data['']except KeyError:base_file_names = data['']store = {}c = itertools.count()for prefix, items in iteritems(data['']):for name, (index, typeindex, _, shortanchor) in iteritems(items):objtype = data[''][typeindex]if objtype.startswith(''):split = name.rsplit('', )if len(split) != :warnings.warn(\"\" % str((prefix, name, objtype)))continueprefix, name = splitlast_prefix = prefix.split('')[-]else:last_prefix = prefix.split('')[-]store[next(c)] = {'': base_file_names[index],'': objtype,'': prefix,'': last_prefix,'': name,'': shortanchor,}data.update({'': store})return data", "docstring": "Create a usable data structure for serializing.", "id": "f1803:c0:m0"} {"signature": "def _word_ngrams(self, tokens):", "body": "if self.stop_words is not None:tokens = [w for w in tokens if w not in self.stop_words]min_n, max_n = self.ngram_rangeif max_n != :original_tokens = tokensif min_n == :tokens = list(original_tokens)min_n += else:tokens = []n_original_tokens = len(original_tokens)tokens_append = tokens.appendspace_join = \"\".joinfor n in range(min_n,min(max_n + , n_original_tokens + )):for i in range(n_original_tokens - n + ):tokens_append(space_join(original_tokens[i: i + n]))return tokens", "docstring": "Turn tokens into a tokens of n-grams\n\nref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153", "id": "f1806:c0:m1"} {"signature": "def _document_frequency(X):", "body": "if sp.isspmatrix_csr(X):return np.bincount(X.indices, minlength=X.shape[])return np.diff(sp.csc_matrix(X, copy=False).indptr)", "docstring": "Count the number of non-zero values for each feature in sparse X.", "id": "f1806:m2"} {"signature": "def _check_stop_list(stop):", "body": "if stop == \"\":return THAI_STOP_WORDSelif isinstance(stop, six.string_types):raise ValueError(\"\" % stop)elif stop is None:return Nonereturn frozenset(stop)", "docstring": "Check stop words list\nref: https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/feature_extraction/text.py#L87-L95", "id": "f1806:m3"} {"signature": "def evaluate(best_processed_path, model):", "body": "x_test_char, x_test_type, y_test = prepare_feature(best_processed_path, option='')y_predict = model.predict([x_test_char, x_test_type])y_predict = (y_predict.ravel() > ).astype(int)f1score = f1_score(y_test, y_predict)precision = precision_score(y_test, y_predict)recall = recall_score(y_test, y_predict)return f1score, precision, recall", "docstring": "Evaluate model on splitted 10 percent testing set", "id": "f1807:m5"} {"signature": "def generate_best_dataset(best_path, output_path='', create_val=False):", "body": "if not os.path.isdir(output_path):os.mkdir(output_path)if not os.path.isdir(os.path.join(output_path, '')):os.makedirs(os.path.join(output_path, ''))if not os.path.isdir(os.path.join(output_path, '')):os.makedirs(os.path.join(output_path, ''))if not os.path.isdir(os.path.join(output_path, '')) and create_val:os.makedirs(os.path.join(output_path, ''))for article_type in article_types:files = glob(os.path.join(best_path, article_type, ''))files_train, files_test = train_test_split(files, random_state=, test_size=)if create_val:files_train, files_val = train_test_split(files_train, random_state=, test_size=)val_words = generate_words(files_val)val_df = create_char_dataframe(val_words)val_df.to_csv(os.path.join(output_path, '', ''.format(article_type)), index=False)train_words = generate_words(files_train)test_words = generate_words(files_test)train_df = create_char_dataframe(train_words)test_df = create_char_dataframe(test_words)train_df.to_csv(os.path.join(output_path, '', ''.format(article_type)), index=False)test_df.to_csv(os.path.join(output_path, '', ''.format(article_type)), index=False)print(\"\".format(article_type))", "docstring": "Generate CSV file for training and testing data\n\nInput\n=====\nbest_path: str, path to BEST folder which contains unzipped subfolder\n 'article', 'encyclopedia', 'news', 'novel'\n\ncleaned_data: str, path to output folder, the cleaned data will be saved\n in the given folder name where training set will be stored in `train` folder\n and testing set will be stored on `test` folder\n\ncreate_val: boolean, True or False, if True, divide training set into training set and\n validation set in `val` folder", "id": "f1807:m2"} {"signature": "def prepare_feature(best_processed_path, option=''):", "body": "n_pad = n_pad_2 = int((n_pad - )/)pad = [{'': '', '': '', '': True}]df_pad = pd.DataFrame(pad * n_pad_2)df = []for article_type in article_types:df.append(pd.read_csv(os.path.join(best_processed_path, option, ''.format(article_type, option))))df = pd.concat(df)df = pd.concat((df_pad, df, df_pad)) df[''] = df[''].map(lambda x: CHARS_MAP.get(x, ))df[''] = df[''].map(lambda x: CHAR_TYPES_MAP.get(x, ))df_pad = create_n_gram_df(df, n_pad=n_pad)char_row = ['' + str(i + ) for i in range(n_pad_2)] +['' + str(i + ) for i in range(n_pad_2)] + ['']type_row = ['' + str(i + ) for i in range(n_pad_2)] +['' + str(i + ) for i in range(n_pad_2)] + ['']x_char = df_pad[char_row].as_matrix()x_type = df_pad[type_row].as_matrix()y = df_pad[''].astype(int).as_matrix()return x_char, x_type, y", "docstring": "Transform processed path into feature matrix and output array\n\nInput\n=====\nbest_processed_path: str, path to processed BEST dataset\n\noption: str, 'train' or 'test'", "id": "f1807:m3"} {"signature": "def create_n_gram_df(df, n_pad):", "body": "n_pad_2 = int((n_pad - )/)for i in range(n_pad_2):df[''.format(i+)] = df[''].shift(i + )df[''.format(i+)] = df[''].shift(i + )df[''.format(i+)] = df[''].shift(-i - )df[''.format(i+)] = df[''].shift(-i - )return df[n_pad_2: -n_pad_2]", "docstring": "Given input dataframe, create feature dataframe of shifted characters", "id": "f1809:m1"} {"signature": "@blog.command()def update():", "body": "run('')run('')", "docstring": "Gets other people's changes from GitHub", "id": "f1826:m0"} {"signature": "@blog.command()@click.pass_contextdef lint(context):", "body": "config = context.objtry:run(''.format(dir=config[''],exclude=''.join(EXCLUDE),))except SubprocessError:context.exit()", "docstring": "Looks for errors in source code of your blog", "id": "f1827:m0"} {"signature": "@blog.command()@click.pass_contextdef preview(context):", "body": "config = context.objpelican(config, '', '')server_proc = Noneos.chdir(config[''])try:try:command = '' + str(PORT)server_proc = run(command, bg=True)time.sleep()click.launch('')time.sleep()pelican(config, '')except Exception:if server_proc is not None:server_proc.kill()raiseexcept KeyboardInterrupt:abort(context)", "docstring": "Opens local preview of your blog website", "id": "f1830:m0"} {"signature": "def backwards(self, orm):", "body": "", "docstring": "Write your backwards methods here.", "id": "f1851:c0:m1"} {"signature": "def forwards(self, orm):", "body": "print(\"\")ja_akt_stan=orm.JednostkaAdministracyjna.objects.all().aggregate(Max(''))['']orm.JednostkaAdministracyjna.objects.filter(stan_na__exact=ja_akt_stan).update(aktywny=True)orm.JednostkaAdministracyjna.objects.exclude(stan_na__exact=ja_akt_stan).update(aktywny=False)print(\"\")m_akt_stan=orm.Miejscowosc.objects.all().aggregate(Max(''))['']orm.Miejscowosc.objects.filter(stan_na__exact=m_akt_stan).update(aktywny=True)orm.Miejscowosc.objects.exclude(stan_na__exact=m_akt_stan).update(aktywny=False)print(\"\")rm_akt_stan=orm.RodzajMiejsowosci.objects.all().aggregate(Max(''))['']orm.RodzajMiejsowosci.objects.filter(stan_na__exact=rm_akt_stan).update(aktywny=True)orm.RodzajMiejsowosci.objects.exclude(stan_na__exact=rm_akt_stan).update(aktywny=False)print(\"\")u_akt_stan=orm.Ulica.objects.all().aggregate(Max(''))['']orm.Ulica.objects.filter(stan_na__exact=u_akt_stan).update(aktywny=True)orm.Ulica.objects.exclude(stan_na__exact=u_akt_stan).update(aktywny=False)", "docstring": "Write your forwards methods here.", "id": "f1851:c0:m0"} {"signature": "def forwards(self, orm):", "body": "LEN_TYPE = {: '',: '',: '',}for ja in orm.JednostkaAdministracyjna.objects.all():ja.typ = LEN_TYPE[len(ja.id)]ja.save()", "docstring": "Write your forwards methods here.", "id": "f1854:c0:m0"} {"signature": "def do_filter(qs, keywords, exclude=False):", "body": "and_q = Q()for keyword, value in iteritems(keywords):try:values = value.split(\"\")if len(values) > :or_q = Q()for value in values:or_q |= Q(**{keyword: value})and_q &= or_qexcept AttributeError:and_q &= Q(**{keyword: value})if exclude:qs = qs.exclude(and_q)else:qs = qs.filter(and_q)return qs", "docstring": "Filter queryset based on keywords.\nSupport for multiple-selected parent values.", "id": "f1868:m1"} {"signature": "@propertydef media(self):", "body": "media = super(JqueryMediaMixin, self).mediajs = []if JQUERY_URL:js.append(JQUERY_URL)elif JQUERY_URL is not False:vendor = '' if django.VERSION < (, , ) else ''extra = '' if settings.DEBUG else ''jquery_paths = [''.format(vendor, extra),'',]if USE_DJANGO_JQUERY:jquery_paths = [''.format(path) for path in jquery_paths]js.extend(jquery_paths)media += Media(js=js)return media", "docstring": "Media defined as a dynamic property instead of an inner class.", "id": "f1870:c0:m0"} {"signature": "def _get_available_choices(self, queryset, value):", "body": "item = queryset.filter(pk=value).first()if item:try:pk = getattr(item, self.chained_model_field + \"\")filter = {self.chained_model_field: pk}except AttributeError:try: pks = getattr(item, self.chained_model_field).all().values_list('', flat=True)filter = {self.chained_model_field + \"\": pks}except AttributeError:try: pks = getattr(item, self.chained_model_field + \"\").all().values_list('', flat=True)filter = {self.chained_model_field + \"\": pks}except AttributeError: filter = {}filtered = list(get_model(self.to_app_name, self.to_model_name).objects.filter(**filter).distinct())if self.sort:sort_results(filtered)else:filtered = []return filtered", "docstring": "get possible choices for selection", "id": "f1870:c1:m3"} {"signature": "def command_callback(result=None):", "body": "print(\"\".format(opendnp3.TaskCompletionToString(result.summary)))result.ForeachItem(collection_callback)", "docstring": ":type result: opendnp3.ICommandTaskResult", "id": "f1888:m2"} {"signature": "def OnReceiveIIN(self, iin):", "body": "self.iin_field = dict(LSB=iin.LSB,MSB=iin.MSB)", "docstring": "Called when a response or unsolicited response is receive from the outstation.", "id": "f1888:c0:m1"} {"signature": "def OnTaskComplete(self, info):", "body": "self.task_info = dict(type=info.type,result=info.result)", "docstring": "Task completion notification.", "id": "f1888:c0:m3"} {"signature": "def OnStateChange(self, state):", "body": "self.state = state", "docstring": "State change notification.", "id": "f1888:c1:m1"} {"signature": "def run_master(hang=False):", "body": "logger = asiodnp3.ConsoleLogger().Create()manager = asiodnp3.DNP3Manager(, asiodnp3.ConsoleLogger().Create())channel = manager.AddTCPClient(\"\",FILTERS,asiopal.ChannelRetry(),HOST,LOCAL,PORT,asiodnp3.PrintingChannelListener().Create())stack_config = asiodnp3.MasterStackConfig()stack_config.master.responseTimeout = openpal.TimeDuration().Seconds()stack_config.link.RemoteAddr = soe_handler = asiodnp3.PrintingSOEHandler().Create()default_master_app = asiodnp3.DefaultMasterApplication().Create()master = channel.AddMaster(\"\",soe_handler,default_master_app,stack_config)master.Enable()time.sleep()if not hang:del channeldel masterprint(\"\".format(hang))manager.Shutdown()", "docstring": "Demonstrate hanging when channel and master are not deleted prior to manager.Shutdown()", "id": "f1889:m0"} {"signature": "def do_quit(self, line):", "body": "self.application.shutdown()exit()", "docstring": "Quit the command-line interface. Command syntax is: quit", "id": "f1897:c0:m19"} {"signature": "def do_chan_log_normal(self, line):", "body": "self.application.channel.SetLogFilters(openpal.LogFilters(opendnp3.levels.NORMAL))print(''.format(opendnp3.levels.NORMAL))", "docstring": "Set the channel log level to NORMAL. Command syntax is: chan_log_normal", "id": "f1897:c0:m4"} {"signature": "def do_disable_unsol(self, line):", "body": "headers = [opendnp3.Header().AllObjects(, ),opendnp3.Header().AllObjects(, ),opendnp3.Header().AllObjects(, )]self.application.master.PerformFunction(\"\",opendnp3.FunctionCode.DISABLE_UNSOLICITED,headers,opendnp3.TaskConfig().Default())", "docstring": "Perform the function DISABLE_UNSOLICITED. Command syntax is: disable_unsol", "id": "f1897:c0:m5"} {"signature": "def do_o1(self, line):", "body": "self.application.send_direct_operate_command(opendnp3.ControlRelayOutputBlock(opendnp3.ControlCode.LATCH_ON),,command_callback)", "docstring": "Send a DirectOperate BinaryOutput (group 12) index 5 LATCH_ON to the Outstation. Command syntax is: o1", "id": "f1897:c0:m8"} {"signature": "def do_scan_fast(self, line):", "body": "self.application.fast_scan.Demand()", "docstring": "Demand an immediate fast scan. Command syntax is: scan_fast", "id": "f1897:c0:m15"} {"signature": "def do_chan_log_all(self, line):", "body": "self.application.channel.SetLogFilters(openpal.LogFilters(opendnp3.levels.ALL_COMMS))print(''.format(opendnp3.levels.ALL_COMMS))", "docstring": "Set the channel log level to ALL_COMMS. Command syntax is: chan_log_all", "id": "f1897:c0:m3"} {"signature": "def do_write_time(self, line):", "body": "millis_since_epoch = int((datetime.now() - datetime.utcfromtimestamp()).total_seconds() * )self.application.master.Write(opendnp3.TimeAndInterval(opendnp3.DNPTime(millis_since_epoch),,opendnp3.IntervalUnits.Seconds),, opendnp3.TaskConfig().Default())", "docstring": "Write a TimeAndInterval to the Outstation. Command syntax is: write_time", "id": "f1897:c0:m18"} {"signature": "def do_mast_log_normal(self, line):", "body": "self.application.master.SetLogFilters(openpal.LogFilters(opendnp3.levels.NORMAL))_log.debug(''.format(opendnp3.levels.NORMAL))", "docstring": "Set the master log level to NORMAL. Command syntax is: mast_log_normal", "id": "f1897:c0:m7"} {"signature": "def startup(self):", "body": "print('')self.do_menu('')self.cmdloop('')exit()", "docstring": "Display the command-line interface's menu and issue a prompt.", "id": "f1897:c0:m1"} {"signature": "def send_select_and_operate_command(self, command, index, callback=asiodnp3.PrintingCommandCallback.Get(),config=opendnp3.TaskConfig().Default()):", "body": "self.master.SelectAndOperate(command, index, callback, config)", "docstring": "Select and operate a single command\n\n:param command: command to operate\n:param index: index of the command\n:param callback: callback that will be invoked upon completion or failure\n:param config: optional configuration that controls normal callbacks and allows the user to be specified for SA", "id": "f1899:c0:m3"} {"signature": "def send_select_and_operate_command_set(self, command_set, callback=asiodnp3.PrintingCommandCallback.Get(),config=opendnp3.TaskConfig().Default()):", "body": "self.master.SelectAndOperate(command_set, callback, config)", "docstring": "Select and operate a set of commands\n\n:param command_set: set of command headers\n:param callback: callback that will be invoked upon completion or failure\n:param config: optional configuration that controls normal callbacks and allows the user to be specified for SA", "id": "f1899:c0:m4"} {"signature": "def send_direct_operate_command_set(self, command_set, callback=asiodnp3.PrintingCommandCallback.Get(),config=opendnp3.TaskConfig().Default()):", "body": "self.master.DirectOperate(command_set, callback, config)", "docstring": "Direct operate a set of commands\n\n:param command_set: set of command headers\n:param callback: callback that will be invoked upon completion or failure\n:param config: optional configuration that controls normal callbacks and allows the user to be specified for SA", "id": "f1899:c0:m2"} {"signature": "def command_callback(result=None):", "body": "print(\"\".format(opendnp3.TaskCompletionToString(result.summary)))result.ForeachItem(collection_callback)", "docstring": ":type result: opendnp3.ICommandTaskResult", "id": "f1899:m1"} {"signature": "def main():", "body": "app = MyMaster(log_handler=MyLogger(),listener=AppChannelListener(),soe_handler=SOEHandler(),master_application=MasterApplication())_log.debug('')app.shutdown()_log.debug('')exit()", "docstring": "The Master has been started from the command line. Execute ad-hoc tests if desired.", "id": "f1899:m3"} {"signature": "def Process(self, info, values):", "body": "visitor_class_types = {opendnp3.ICollectionIndexedBinary: VisitorIndexedBinary,opendnp3.ICollectionIndexedDoubleBitBinary: VisitorIndexedDoubleBitBinary,opendnp3.ICollectionIndexedCounter: VisitorIndexedCounter,opendnp3.ICollectionIndexedFrozenCounter: VisitorIndexedFrozenCounter,opendnp3.ICollectionIndexedAnalog: VisitorIndexedAnalog,opendnp3.ICollectionIndexedBinaryOutputStatus: VisitorIndexedBinaryOutputStatus,opendnp3.ICollectionIndexedAnalogOutputStatus: VisitorIndexedAnalogOutputStatus,opendnp3.ICollectionIndexedTimeAndInterval: VisitorIndexedTimeAndInterval}visitor_class = visitor_class_types[type(values)]visitor = visitor_class()values.Foreach(visitor)for index, value in visitor.index_and_value:log_string = ''_log.debug(log_string.format(info.gv, info.headerIndex, type(values).__name__, index, value))", "docstring": "Process measurement data.\n\n:param info: HeaderInfo\n:param values: A collection of values received from the Outstation (various data types are possible).", "id": "f1899:c3:m1"} {"signature": "def apply_update(self, value, index):", "body": "_log.debug(''.format(type(value).__name__, index, value.value))builder = asiodnp3.UpdateBuilder()builder.Update(value, index)update = builder.Build()OutstationApplication.get_outstation().Apply(update)", "docstring": "Record an opendnp3 data value (Analog, Binary, etc.) in the outstation's database.\n\n The data value gets sent to the Master as a side-effect.\n\n:param value: An instance of Analog, Binary, or another opendnp3 data value.\n:param index: (integer) Index of the data definition in the opendnp3 database.", "id": "f1900:c0:m13"} {"signature": "@classmethoddef process_point_value(cls, command_type, command, index, op_type):", "body": "_log.debug(''.format(index, command))", "docstring": "A PointValue was received from the Master. Process its payload.\n\n:param command_type: (string) Either 'Select' or 'Operate'.\n:param command: A ControlRelayOutputBlock or else a wrapped data value (AnalogOutputInt16, etc.).\n:param index: (integer) DNP3 index of the payload's data definition.\n:param op_type: An OperateType, or None if command_type == 'Select'.", "id": "f1900:c0:m12"} {"signature": "def GetApplicationIIN(self):", "body": "application_iin = opendnp3.ApplicationIIN()application_iin.configCorrupt = Falseapplication_iin.deviceTrouble = Falseapplication_iin.localControl = Falseapplication_iin.needTime = Falseiin_field = application_iin.ToIIN()_log.debug(''.format(iin_field.LSB,iin_field.MSB))return application_iin", "docstring": "Return the application-controlled IIN field.", "id": "f1900:c0:m7"} {"signature": "def do_b(self, line):", "body": "index, value_string = self.index_and_value_from_line(line)if index and value_string:if value_string.lower() == '' or value_string.lower() == '':self.application.apply_update(opendnp3.Binary(value_string == ''), index)else:print('')", "docstring": "Send the Master a BinaryInput (group 2) value. Command syntax is: 'b index true' or 'b index false", "id": "f1901:c0:m4"} {"signature": "def do_a(self, line):", "body": "index, value_string = self.index_and_value_from_line(line)if index and value_string:try:self.application.apply_update(opendnp3.Analog(float(value_string)), index)except ValueError:print('')", "docstring": "Send the Master an AnalogInput (group 32) value. Command syntax is: a index value", "id": "f1901:c0:m2"} {"signature": "def startup(self):", "body": "print('')self.do_menu('')self.cmdloop('')exit()", "docstring": "Display the command-line interface's menu and issue a prompt.", "id": "f1901:c0:m1"} {"signature": "@staticmethoddef index_and_value_from_line(line):", "body": "try:index = int(line.split('')[])except (ValueError, IndexError):print('')index = Nonetry:value_string = line.split('')[]except (ValueError, IndexError):print('')value_string = Nonereturn index, value_string", "docstring": "Parse an index (integer) and value (string) from command line args and return them.", "id": "f1901:c0:m10"} {"signature": "def do_a2(self, line):", "body": "self.application.apply_update(opendnp3.Analog(), index=)", "docstring": "Send the Master an AnalogInput (group 32) value of 2 at index 4. Command syntax is: a2", "id": "f1901:c0:m3"} {"signature": "def do_d(self, line):", "body": "index = self.index_from_line(line)if index:self.application.apply_update(opendnp3.DoubleBitBinary(opendnp3.DoubleBit.DETERMINED_ON), index)", "docstring": "Send the Master a DoubleBitBinaryInput (group 4) value of DETERMINED_ON. Command syntax is: d index", "id": "f1901:c0:m7"} {"signature": "def __init__(self, routing, default, python_path):", "body": "def make(x):if isinstance(x, (list, str)):return ActionList(x)assert isinstance(x, dict)if '' in x or '' in x:x = dict(default, **x)return construct.construct_type(x, python_path)return {k: make(v) for k, v in x.items()}routing = flatten.unflatten(routing)self.routing = make(routing)", "docstring": ":param dict routing: `routing` is a dict that maps addresses\n to lists of actions.\n\n The values in the input dictionary `routing` are recursively visited\n to build the routing table:\n\n * values that are strings or lists are used to construct ActionLists\n * dictionaries that contain \"typename\" or \"datatype\" keys are\n used to construct a class of that type.\n * otherwise, dictionaries are visited recursively\n * all other types are forbidden", "id": "f1907:c0:m0"} {"signature": "def multi(method):", "body": "@functools.wraps(method)def multi(self, address=''):values = flask.request.valuesaddress = urllib.parse.unquote_plus(address)if address and values and not address.endswith(''):address += ''result = {}for a in values or '':try:if not self.project:raise ValueError('')ed = editor.Editor(address + a, self.project)result[address + a] = {'': method(self, ed, a)}except:if self.project:traceback.print_exc()result[address + a] = {'': '' % a}return flask.jsonify(result)return multi", "docstring": "Decorator for RestServer methods that take multiple addresses", "id": "f1908:m1"} {"signature": "def extract(self, msg):", "body": "def normal(key):v = msg.get(key)if v is None:return vnormalizer = self.normalizers.get(key, lambda x: x)return normalizer(v)def odict(keys):return collections.OrderedDict((k, normal(k)) for k in keys)def match(m):return (msg.get(k) in v for k, v in m.items()) if m else ()accept = all(match(self.accept))reject = any(match(self.reject))if reject or not accept:keys = ()elif self.keys_by_type is None:keys = [k for k in msg.keys() if k not in self.omit]else:keys = self.keys_by_type.get(msg.get(''))return odict(keys)", "docstring": "Yield an ordered dictionary if msg['type'] is in keys_by_type.", "id": "f1918:c0:m1"} {"signature": "def stop(self=None):", "body": "if not self:instance = getattr(Runner.instance(), '', None)self = instance and instance()if not self:returnself._runner.stop()if self.project:self.project.stop()self.project = None", "docstring": "Stop the builder if it's running.", "id": "f1925:c0:m2"} {"signature": "def update(desc, other=None, **kwds):", "body": "other = other and _as_dict(other) or {}for i in other, kwds:for k, v in i.items():if isinstance(v, dict):old_v = desc[k]for k2, v2 in v.items():if v2 is None:old_v.pop(k2, None)else:old_v[k2] = v2else:set_one(desc, k, v)", "docstring": "Update sections in a Project description", "id": "f1926:m1"} {"signature": "def clear(self):", "body": "self._desc = {}for key, value in merge.DEFAULT_PROJECT.items():if key not in self._HIDDEN:self._desc[key] = type(value)()", "docstring": "Clear description to default values", "id": "f1928:c0:m1"} {"signature": "def as_dict(self):", "body": "return {k: v for k, v in self.items() if v}", "docstring": "Returns a dictionary of non-empty description", "id": "f1928:c0:m4"} {"signature": "def __getitem__(self, index):", "body": "index = self._check_index(index)return self.layout.get(*index)", "docstring": "Returns the r, g, b pixel at a location in the layout. May only be\ncalled if self.is_running is true.", "id": "f1929:c0:m1"} {"signature": "def clear(self):", "body": "self.desc.clear()", "docstring": "Clear description to default values", "id": "f1930:c0:m3"} {"signature": "@classmethoddef instance(cls):", "body": "return cls._INSTANCE and cls._INSTANCE()", "docstring": "Return the unique instance of Runner, if any, or None", "id": "f1931:c0:m3"} {"signature": "def stop(self):", "body": "if self.is_running:log.info('')self.is_running = Falseself.__class__._INSTANCE = Nonetry:self.thread and self.thread.stop()except:log.error('')traceback.print_exc()self.thread = Nonereturn True", "docstring": "Stop the Runner if it's running.\nCalled as a classmethod, stop the running instance if any.", "id": "f1931:c0:m2"} {"signature": "def __init__(self, *args, limit=None, **kwds):", "body": "super().__init__(*args, **kwds)self.limit = Limit(**(limit or {}))self._math = color_list.Math(self.color_list)", "docstring": ":param dict limit: A construction dictionary for a Limit.", "id": "f1947:c0:m0"} {"signature": "@propertydef _led(self):", "body": "return self.layout", "docstring": "Many BiblioPixelAnimations use the \"protected\" variable _led.", "id": "f1952:c0:m3"} {"signature": "@classmethoddef construct(cls, project, *, run=None, name=None, data=None, **desc):", "body": "from . failed import Failedexception = desc.pop('', None)if exception:a = Failed(project.layout, desc, exception)else:try:a = cls(project.layout, **desc)a._set_runner(run or {})except Exception as e:if cls.FAIL_ON_EXCEPTION:raisea = Failed(project.layout, desc, e)a.name = namea.data = datareturn a", "docstring": "Construct an animation, set the runner, and add in the two\n\"reserved fields\" `name` and `data`.", "id": "f1952:c0:m0"} {"signature": "def __init__(self, *args, overlay=False, detach=True, **kwds):", "body": "super().__init__(*args, **kwds)if detach:self.detach(overlay)", "docstring": "If overlay is True, then preclear is set to False for everything\nother than the first animation.", "id": "f1954:c0:m0"} {"signature": "def raw_opener(ip_address, port, delay=):", "body": "def target():time.sleep(delay)url = '' % (ip_address, port)webbrowser.open(url, new=, autoraise=True)threading.Thread(target=target, daemon=True).start()", "docstring": "Wait a little and then open a web browser page for the control panel.", "id": "f1966:m1"} {"signature": "def _clean_animation(desc, parent):", "body": "desc = load.load_if_filename(desc) or descif isinstance(desc, str):animation = {'': desc}elif not isinstance(desc, dict):raise TypeError('' % type(desc))elif '' in desc or '' not in desc:animation = descelse:animation = desc.pop('', {})if isinstance(animation, str):animation = {'': animation}animation[''] = desc.pop('', {})if desc:raise ValueError('' + ''.join(desc))animation.setdefault('', DEFAULT_ANIMATION)animation = construct.to_type_constructor(animation, ANIMATION_PATH)datatype = animation.setdefault('', failed.Failed)animation.setdefault('', datatype.__name__)run = animation.setdefault('', {})run_parent = parent.setdefault('', {})if not ('' in run or '' in run):if '' in run_parent:run.update(fps=run_parent[''])elif '' in run_parent:run.update(sleep_time=run_parent[''])return animation", "docstring": "Cleans up all sorts of special cases that humans want when entering\nan animation from a yaml file.\n\n1. Loading it from a file\n2. Using just a typename instead of a dict\n3. A single dict representing an animation, with a run: section.\n4. (Legacy) Having a dict with parallel elements run: and animation:\n5. (Legacy) A tuple or list: (animation, run )", "id": "f1968:m0"} {"signature": "def detach(self, overlay):", "body": "for i, a in enumerate(self.animations):a.layout = a.layout.clone()if overlay and i:a.preclear = False", "docstring": "Give each animation a unique, mutable layout so they can run\nindependently.", "id": "f1968:c0:m6"} {"signature": "def __init__(self, *args, size=, **kwds):", "body": "super().__init__(*args, detach=False, **kwds)if not size:raise ValueError('')self.size = size if isinstance(size, list) else [size]self.is_numpy = hasattr(self.color_list, '')for animation, begin, end in self._foreach():animation.layout = Strip([], color_list=self.color_list[begin:end])", "docstring": "Arguments --\n size: a number or a list of numbers representing the size of each\n segment from the original layout. If there aren't enough sizes\n for each segment, the list of sizes is reused repeatedly.", "id": "f1971:c0:m0"} {"signature": "@propertydef index(self):", "body": "return self._index", "docstring": ":returns int: index of the current animation within the Collection.", "id": "f1972:c0:m0"} {"signature": "def step(self, amt=):", "body": "if not self._stop_event.isSet():self._hold_for_data.wait()self._hold_for_data.clear()", "docstring": "This may seem silly, but on a Receiver step() need not do anything.\nInstead, receive the data on the receive thread and set it on the buffer\nthen call self._hold_for_data.set()", "id": "f1973:c1:m6"} {"signature": "def adapt_animation_layout(animation):", "body": "layout = animation.layoutrequired = getattr(animation, '', None)if not required or isinstance(layout, required):returnmsg = LAYOUT_WARNING % (type(animation).__name__, required.__name__, type(layout).__name__)setter = layout.setadaptor = Noneif required is strip.Strip:if isinstance(layout, matrix.Matrix):width = layout.widthdef adaptor(pixel, color=None):y, x = divmod(pixel, width)setter(x, y, color or BLACK)elif isinstance(layout, cube.Cube):lx, ly = layout.x, layout.ydef adaptor(pixel, color=None):yz, x = divmod(pixel, lx)z, y = divmod(yz, ly)setter(x, y, z, color or BLACK)elif isinstance(layout, circle.Circle):def adaptor(pixel, color=None):layout._set_base(pixel, color or BLACK)elif required is matrix.Matrix:if isinstance(layout, strip.Strip):width = animation.widthdef adaptor(x, y, color=None):setter(x + y * width, color or BLACK)if not adaptor:raise ValueError(msg)log.warning(msg)animation.layout.set = adaptor", "docstring": "Adapt the setter in an animation's layout so that Strip animations can run\non on Matrix, Cube, or Circle layout, and Matrix or Cube animations can run\non a Strip layout.", "id": "f1979:m0"} {"signature": "def read_from(self, data, pad=):", "body": "for i in range(self.BEGIN, self.END + ):index = self.index(i, len(data))yield pad if index is None else data[index]", "docstring": "Returns a generator with the elements \"data\" taken by offset, restricted\nby self.begin and self.end, and padded on either end by `pad` to get\nback to the original length of `data`", "id": "f1982:c0:m3"} {"signature": "def __init__(self, offset=, begin=None, end=None):", "body": "self.begin = self.BEGIN if begin is None else beginself.end = self.END if end is None else endif not (self.BEGIN <= self.begin <= self.end <= self.END):raise ValueError('' %(self.BEGIN, self.begin, self.end, self.END))self.offset = offset", "docstring": "Unlike a `range`, an OffsetRange includes both its begin *and* its end,\nso it's closer to how regular people think of a range - for example\nthat DMX channels are in the range 1-512.", "id": "f1982:c0:m0"} {"signature": "def pointOnCircle(cx, cy, radius, angle):", "body": "angle = math.radians(angle) - (math.pi / )x = cx + radius * math.cos(angle)if x < cx:x = math.ceil(x)else:x = math.floor(x)y = cy + radius * math.sin(angle)if y < cy:y = math.ceil(y)else:y = math.floor(y)return (int(x), int(y))", "docstring": "Calculates the coordinates of a point on a circle given the center point,\nradius, and angle.", "id": "f1983:m6"} {"signature": "def parse(s):", "body": "parts = s.replace('', '').split()if not parts:raise ValueError('')pieces = []for part in parts:m = PART_MATCH(part)pieces.extend(m.groups() if m else [part])if len(pieces) == :pieces.append('')if len(pieces) % :raise ValueError('' % (s, parts, pieces))result = for number, units in zip(*[iter(pieces)] * ):number = float(number)if number < :raise ValueError('')result += number * _get_units(units)return result", "docstring": "Parse a string representing a time interval or duration into seconds,\nor raise an exception\n\n:param str s: a string representation of a time interval\n:raises ValueError: if ``s`` can't be interpreted as a duration", "id": "f1987:m1"} {"signature": "def __init__(self, filename):", "body": "self.__filename = filenamedata = data_file.load(filename) if os.path.exists(filename) else {}super().__init__(data)", "docstring": ":param c: the filename to store the DATA_FILE in", "id": "f1988:c0:m0"} {"signature": "def __init__(self, constructor, **kwds):", "body": "self.servers = {}self.constructor = constructorself.kwds = kwds", "docstring": ":param constructor: a function which takes a key and some keywords,\n and returns a new server\n:param kwds: keywords to the ``constructor`` function", "id": "f1989:c1:m0"} {"signature": "def __init__(self, ratio=, knee=, gain=, enable=True):", "body": "self.ratio = ratioself.knee = kneeself.gain = gainself.enable = enable", "docstring": ":param float ratio: the compression ratio (1 means no compression).\n ratio should usually between 0 and 1.\n\n:param float knee: the ratio where the compression starts to kick in.\n knee should usually be 0 <= knee <= ratio\n\n:param float gain: post limiter output gain. gain should usually be >= 0", "id": "f1990:c0:m0"} {"signature": "@propertydef running(self):", "body": "return self.run_event.is_set() and not self.stop_event.is_set()", "docstring": "Is this Runnable expected to make any progress from here?\n\nThe Runnable might still execute a little code after it has stopped\nrunning.", "id": "f1993:c0:m1"} {"signature": "def cleanup(self):", "body": "", "docstring": "Cleans up resources after the Runnable.\n\nself.cleanup() may not throw an exception.", "id": "f1993:c0:m6"} {"signature": "@contextlib.contextmanagerdef run_until_stop(self):", "body": "self.start()try:yield selffinally:self.stop()self.wait()", "docstring": "A context manager that starts this Runnable, yields,\nand then waits for it to finish.", "id": "f1993:c0:m9"} {"signature": "def run(function, *args, use_subprocess=False, daemon=True, **kwds):", "body": "if use_subprocess:Creator, Queue = multiprocessing.Process, multiprocessing.Queueelse:Creator, Queue = threading.Thread, queue.Queueinput, output = Queue(), Queue()args = input, output, function, argssub = Creator(target=_run_locally, args=args, kwargs=kwds, daemon=daemon)sub.start()return sub, input, output", "docstring": "Create input, output queues, call `function` in a subprocess or a thread.\n\n``function`` is called like this: ``function(input, output, *args, **kwds)``\n\n:param use_subprocess: if true, create a new multiprocess;\n if false, create a new thread\n:param function: the function to call\n:param daemon: is the thread or subprocess run as a daemon or not?\n\n:param args: positional arguments to the function\n:param kwds: keyword arguments to the function\n:returns: a tuple with three elements: the subprocess or thread, an input\n queue, and an output queue.", "id": "f1995:m1"} {"signature": "def __init__(self, function, errors):", "body": "assert isinstance(errors, int) or errors in ('', '', '')self.function = functionself.errors = errorsself.error_count = ", "docstring": ":param function: the function to wrap\n:param errors: either a number, indicating how many errors to report\n before ignoring them, or one of these strings:\n 'raise', meaning to raise an exception\n 'ignore', meaning to ignore all errors\n 'report', meaning to report all errors", "id": "f1997:c0:m0"} {"signature": "def set_log_level(level):", "body": "if isinstance(level, str):level = LOG_NAMES[level.lower()]logger.setLevel(level)", "docstring": ":param level: the level to set - either a string level name from\n 'frame', 'debug', 'info', 'warning', 'error'\n or an integer log level from:\n log.FRAME, log.DEBUG, log.INFO, log.WARNING, log.ERROR", "id": "f1998:m4"} {"signature": "def dumps(data, use_yaml=None, safe=True, **kwds):", "body": "if use_yaml is None:use_yaml = ALWAYS_DUMP_YAMLif use_yaml:dumps = yaml.safe_dump if safe else yaml.dumpelse:dumps = json.dumpskwds.update(indent=, sort_keys=True)if not safe:kwds.update(default=repr)return dumps(data, **kwds)", "docstring": "Dumps data into a nicely formatted JSON string.\n\n:param dict data: a dictionary to dump\n:param kwds: keywords to pass to json.dumps\n:returns: a string with formatted data\n:rtype: str", "id": "f1999:m0"} {"signature": "def load(file, use_yaml=None):", "body": "if isinstance(file, str):fp = open(file)filename = fileelse:fp = filefilename = getattr(fp, '', '')try:return loads(fp.read(), use_yaml, filename)except Exception as e:e.args = ('', filename) + e.argsraise", "docstring": "Loads not only JSON files but also YAML files ending in .yml.\n\n:param file: a filename or file handle to read from\n:returns: the data loaded from the JSON or YAML file\n:rtype: dict", "id": "f1999:m3"} {"signature": "def dump(data, file=sys.stdout, use_yaml=None, **kwds):", "body": "if use_yaml is None:use_yaml = ALWAYS_DUMP_YAMLdef dump(fp):if use_yaml:yaml.safe_dump(data, stream=fp, **kwds)else:json.dump(data, fp, indent=, sort_keys=True, **kwds)if not isinstance(file, str):return dump(file)if os.path.isabs(file):parent = os.path.dirname(file)if not os.path.exists(parent):os.makedirs(parent, exist_ok=True)with open(file, '') as fp:return dump(fp)", "docstring": "Dumps data as nicely formatted JSON string to a file or file handle\n\n:param dict data: a dictionary to dump\n:param file: a filename or file handle to write to\n:param kwds: keywords to pass to json.dump", "id": "f1999:m1"} {"signature": "def advance_permutation(a, increasing=True, forward=True):", "body": "if not forward:a.reverse()cmp = operator.lt if increasing else operator.gttry:i = next(i for i in reversed(range(len(a) - )) if cmp(a[i], a[i + ]))j = next(j for j in reversed(range(i + , len(a))) if cmp(a[i], a[j]))except StopIteration:if forward:a.reverse()return Falsea[i], a[j] = a[j], a[i]a[i + :] = reversed(a[i + :])if not forward:a.reverse()return True", "docstring": "Advance a list of unique, ordered elements in-place, lexicographically\nincreasing or backward, by rightmost or leftmost digit.\n\nReturns False if the permutation wrapped around - i.e. went from\nlexicographically greatest to least, and True in all other cases.\n\nIf the length of the list is N, then this function will repeat values after\nN! steps, and will return False exactly once.\n\nSee also https://stackoverflow.com/a/34325140/43839", "id": "f2003:m0"} {"signature": "def get_pid(pid_filename=None):", "body": "return int(open(pid_filename or DEFAULT_PID_FILENAME).read())", "docstring": "Return the integer PID for the current bp process, or raise an exception if\nthere is no such process or it hasn't registered a PID.", "id": "f2005:m2"} {"signature": "def report(function, *args, **kwds):", "body": "try:function(*args, **kwds)except Exception:traceback.print_exc()", "docstring": "Run a function, catch, report and discard exceptions", "id": "f2007:m1"} {"signature": "@contextlib.contextmanagerdef add(*args):", "body": "try:yieldexcept Exception as e:e.args = args + e.argsraise", "docstring": "A context manager that appends arguments to any exception thrown\n\n:param args: Arguments to be appended to the ``.args`` attribute of any\n exception that is thrown while the context manager is active", "id": "f2007:m0"} {"signature": "def resize(image, x, y, stretch=False, top=None, left=None, mode='',resample=None):", "body": "if x <= :raise ValueError('')if y <= :raise ValueError('')from PIL import Imageresample = Image.ANTIALIAS if resample is None else resampleif not isinstance(resample, numbers.Number):try:resample = getattr(Image, resample.upper())except:raise ValueError(\"\" % resample)if not isinstance(resample, numbers.Number):raise ValueError(\"\" % resample)size = x, yif stretch:return image.resize(size, resample=resample)result = Image.new(mode, size)ratios = [d1 / d2 for d1, d2 in zip(size, image.size)]if ratios[] < ratios[]:new_size = (size[], int(image.size[] * ratios[]))else:new_size = (int(image.size[] * ratios[]), size[])image = image.resize(new_size, resample=resample)if left is None:box_x = int((x - new_size[]) / )elif left:box_x = else:box_x = x - new_size[]if top is None:box_y = int((y - new_size[]) / )elif top:box_y = else:box_y = y - new_size[]result.paste(image, box=(box_x, box_y))return result", "docstring": "Return an image resized.", "id": "f2011:m1"} {"signature": "def show_image(setter, width, height,image_path='', image_obj=None, offset=(, ),bgcolor=COLORS.Off, brightness=):", "body": "bgcolor = color_scale(bgcolor, brightness)img = image_objif image_path and not img:from PIL import Imageimg = Image.open(image_path)elif not img:raise ValueError('')w = min(width - offset[], img.size[])h = min(height - offset[], img.size[])ox = offset[]oy = offset[]for x in range(ox, w + ox):for y in range(oy, h + oy):r, g, b, a = (, , , )rgba = img.getpixel((x - ox, y - oy))if isinstance(rgba, int):raise ValueError('')if len(rgba) == :r, g, b = rgbaelif len(rgba) == :r, g, b, a = rgbaelse:raise ValueError('')if a == :r, g, b = bgcolorelse:r, g, b = color_scale((r, g, b), a)if brightness != :r, g, b = color_scale((r, g, b), brightness)setter(x, y, (r, g, b))", "docstring": "Display an image on a matrix.", "id": "f2015:m0"} {"signature": "def loadImage(layout, imagePath=\"\", imageObj=None, offset=(, ),bgcolor=COLORS.Off, brightness=):", "body": "if not isinstance(layout, Matrix):raise RuntimeError(\"\")texture = [[COLORS.Off for x in range(layout.width)]for y in range(layout.height)]def setter(x, y, pixel):if y >= and x >= :texture[y][x] = pixelshow_image(setter, layout.width, layout.height, imagePath, imageObj,offset, bgcolor, brightness)return texture", "docstring": "Display an image on the matrix", "id": "f2015:m2"} {"signature": "def showImage(layout, imagePath=\"\", imageObj=None, offset=(, ),bgcolor=COLORS.Off, brightness=):", "body": "if not isinstance(layout, Matrix):raise RuntimeError(\"\")layout.all_off()return show_image(layout.set, layout.width, layout.height, imagePath,imageObj, offset, bgcolor, brightness)", "docstring": "Display an image on the matrix", "id": "f2015:m1"} {"signature": "def __init__(self, address):", "body": "super().__init__()self.address = address", "docstring": ":param str address: a pair (ip_address, port) to pass to socket.connect", "id": "f2021:c0:m0"} {"signature": "def __init__(self, address, **kwds):", "body": "super().__init__(**kwds)self.sender = Sender(address)", "docstring": ":param str address: a pair (ip_address, port) to pass to socket.connect", "id": "f2021:c1:m0"} {"signature": "def unflatten(master):", "body": "result = {}for k, v in master.items():*first, last = k.split('')r = resultfor i in first:r = r.setdefault(i, {})r[last] = vreturn result", "docstring": ":param dict master: a multilevel dictionary\n:return: a unflattened dictionary\n:rtype: dict\n\nUnflattens a single-level dictionary a multilevel into one so that::\n\n {'foo.bar.a': 1,\n 'foo.bar.b': True,\n 'foo.bar.a': 1,\n }\n\nwould become::\n\n {'foo':\n {'bar':\n {\n 'a': 1,\n 'b': True,\n 'c': 'hello',\n },\n },\n }", "id": "f2023:m1"} {"signature": "def pop_legacy_palette(kwds, *color_defaults):", "body": "palette = kwds.pop('', None)if palette:legacy = [k for k, _ in color_defaults if k in kwds]if legacy:raise ValueError('' + ''.join(legacy))return palettevalues = [kwds.pop(k, v) for k, v in color_defaults]if values and color_defaults[][] in ('', ''):values = values[]return make.colors(values or None)", "docstring": "Older animations in BPA and other areas use all sorts of different names for\nwhat we are now representing with palettes.\n\nThis function mutates a kwds dictionary to remove these legacy fields and\nextract a palette from it, which it returns.", "id": "f2026:m0"} {"signature": "def wheel_color(position):", "body": "return _WHEEL[round(position) % len(_WHEEL)]", "docstring": "Get color from wheel value (0 - 384).\n Provided for those used to using it from Adafruit libraries", "id": "f2027:m1"} {"signature": "def hsv2rgb_rainbow(hsv):", "body": "def nscale8x3_video(r, g, b, scale):nonzeroscale = if scale != :nonzeroscale = if r != :r = ((r * scale) >> ) + nonzeroscaleif g != :g = ((g * scale) >> ) + nonzeroscaleif b != :b = ((b * scale) >> ) + nonzeroscalereturn (r, g, b)def scale8_video_LEAVING_R1_DIRTY(i, scale):nonzeroscale = if scale != :nonzeroscale = if i != :i = ((i * scale) >> ) + nonzeroscalereturn ih, s, v = hsvoffset = h & offset8 = offset * third = (offset8 * ( // )) >> r, g, b = (, , )if not (h & ):if not (h & ):if not (h & ):r = - thirdg = thirdb = else:r = g = + thirdb = else:if not (h & ):twothirds = (third << )r = - twothirdsg = + thirdb = else:r = g = - thirdb = thirdelse:if not (h & ):if not (h & ):r = twothirds = (third << )g = - twothirdsb = + twothirdselse:r = thirdg = b = - thirdelse:if not (h & ):r = + thirdg = b = - thirdelse:r = + thirdg = b = - thirdif s != :r, g, b = nscale8x3_video(r, g, b, s)desat = - sdesat = (desat * desat) >> brightness_floor = desatr = r + brightness_floorg = g + brightness_floorb = b + brightness_floorif v != :v = scale8_video_LEAVING_R1_DIRTY(v, v)r, g, b = nscale8x3_video(r, g, b, v)return (r, g, b)", "docstring": "Generates RGB values from HSV that have an even visual\n distribution. Be careful as this method is only have as fast as\n hsv2rgb_spectrum.", "id": "f2030:m2"} {"signature": "def color_cmp(a, b):", "body": "if a == b:return a, b = rgb_to_hsv(a), rgb_to_hsv(b)return - if a < b else ", "docstring": "Order colors by hue, saturation and value, in that order.\n\n Returns -1 if a < b, 0 if a == b and 1 if a < b.", "id": "f2030:m12"} {"signature": "def hsv2rgb_raw(hsv):", "body": "HSV_SECTION_3 = h, s, v = hsvinvsat = - sbrightness_floor = (v * invsat) // color_amplitude = v - brightness_floorsection = h // HSV_SECTION_3 offset = h % HSV_SECTION_3 rampup = offsetrampdown = (HSV_SECTION_3 - ) - offsetrampup_amp_adj = (rampup * color_amplitude) // ( // )rampdown_amp_adj = (rampdown * color_amplitude) // ( // )rampup_adj_with_floor = rampup_amp_adj + brightness_floorrampdown_adj_with_floor = rampdown_amp_adj + brightness_floorr, g, b = (, , )if section:if section == :r = brightness_floorg = rampdown_adj_with_floorb = rampup_adj_with_floorelse:r = rampup_adj_with_floorg = brightness_floorb = rampdown_adj_with_floorelse:r = rampdown_adj_with_floorg = rampup_adj_with_floorb = brightness_floorreturn (r, g, b)", "docstring": "Converts an HSV tuple to RGB. Intended for internal use.\nYou should use hsv2rgb_spectrum or hsv2rgb_rainbow instead.", "id": "f2030:m0"} {"signature": "def hsv2rgb_360(hsv):", "body": "h, s, v = hsvr, g, b = colorsys.hsv_to_rgb(h / , s, v)return (int(r * ), int(g * ), int(b * ))", "docstring": "Python default hsv to rgb conversion for when hue values in the\n range 0-359 are preferred. Due to requiring float math, this method\n is slower than hsv2rgb_rainbow and hsv2rgb_spectrum.", "id": "f2030:m3"} {"signature": "def euclidean(c1, c2):", "body": "diffs = ((i - j) for i, j in zip(c1, c2))return sum(x * x for x in diffs)", "docstring": "Square of the euclidean distance", "id": "f2032:m1"} {"signature": "def __init__(self, colors=(), continuous=False, serpentine=False, scale=,offset=, autoscale=False, length=None):", "body": "super().__init__(colors)if not self:self.append(Black)self.continuous = continuousself.serpentine = serpentineself.scale = scaleself.offset = offsetself.autoscale = autoscaleself.length = length", "docstring": "Arguments:\n colors: an iterable of colors\n\n continuous: if True, interpolate linearly between colors; if False,\n use the nearest color from the original list\n\n serpentine: if True, palette colors are used in reverse order every\n other iteration, giving a back-and-forth effect. If False,\n palette colors always restart on each iteration\n\n scale: Scales the incoming index ``i``. As ``i`` moves from 0\n to ``len(colors) - 1``, the whole palette repeats itself\n ``self.scale`` times\n\n offset: offset to the incoming index ``i``, applied after scaling\n\n autoscale: If True, automatically rescale the Palette size to\n match the length of the output. ``autoscale`` happens before\n ``scale``, so the two work well together to give banding or\n striping effects across your display\n\n ``length``:\n The length of the output color_list. If None, use the length of\n the palette itself. If autoscale=True, ``length`` is used to scale\n the palette to match the output.", "id": "f2033:c0:m0"} {"signature": "def toggle(s):", "body": "is_numeric = '' in s or s.startswith('') or s.startswith('')c = name_to_color(s)return color_to_name(c) if is_numeric else str(c)", "docstring": "Toggle back and forth between a name and a tuple representation.\n\n:param str s: a string which is either a text name, or a tuple-string:\n a string with three numbers separated by commas\n\n:returns: if the string was a text name, return a tuple. If it's a\n tuple-string and it corresponds to a text name, return the text\n name, else return the original tuple-string.", "id": "f2035:m2"} {"signature": "def color_blend(a, b):", "body": "return ( - ((( - a[]) * ( - b[])) >> ), - ((( - a[]) * ( - b[])) >> ), - ((( - a[]) * ( - b[])) >> ))", "docstring": "Performs a Screen blend on RGB color tuples, a and b", "id": "f2036:m0"} {"signature": "def get(self, i):", "body": "return self.table[max(, min(, int(i)))]", "docstring": ":returns: The gamma table entry\n:param int i: the index into the table", "id": "f2037:c0:m1"} {"signature": "def get(name=None):", "body": "if name is None or name == '':return _DEFAULT_PALETTEif isinstance(name, str):return PROJECT_PALETTES.get(name) or BUILT_IN_PALETTES.get(name)", "docstring": "Return a named Palette, or None if no such name exists.\n\nIf ``name`` is omitted, the default value is used.", "id": "f2041:m0"} {"signature": "def push_to_driver(self):", "body": "self.wait_for_update()self.update_colors()", "docstring": "Push the current pixel state to the driver", "id": "f2042:c2:m3"} {"signature": "def bresenham_line(setter, x0, y0, x1, y1, color=None, colorFunc=None):", "body": "steep = abs(y1 - y0) > abs(x1 - x0)if steep:x0, y0 = y0, x0x1, y1 = y1, x1if x0 > x1:x0, x1 = x1, x0y0, y1 = y1, y0dx = x1 - x0dy = abs(y1 - y0)err = dx / if y0 < y1:ystep = else:ystep = -count = for x in range(x0, x1 + ):if colorFunc:color = colorFunc(count)count += if steep:setter(y0, x, color)else:setter(x, y0, color)err -= dyif err < :y0 += ysteperr += dx", "docstring": "Draw line from point x0,y0 to x,1,y1. Will draw beyond matrix bounds.", "id": "f2044:m5"} {"signature": "def fillScreen(self, color=None):", "body": "md.fill_rect(self.set, , , self.width, self.height, color)", "docstring": "Fill the matrix with the given RGB color", "id": "f2045:c0:m20"} {"signature": "@propertydef shape(self):", "body": "return self.width, self.height", "docstring": "Returns ``width, height``", "id": "f2045:c0:m1"} {"signature": "def bresenham_line(self, x0, y0, x1, y1, color=None, colorFunc=None):", "body": "md.bresenham_line(self.set, x0, y0, x1, y1, color, colorFunc)", "docstring": "Draw line from point x0, y0 to x1, y1 using Bresenham's algorithm.\n\nWill draw beyond matrix bounds.", "id": "f2045:c0:m16"} {"signature": "def drawRect(self, x, y, w, h, color=None, aa=False):", "body": "md.draw_rect(self.set, x, y, w, h, color, aa)", "docstring": "Draw rectangle with top-left corner at x,y, width w and height h\n\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m18"} {"signature": "def set(self, x, y, color):", "body": "raise NotImplementedError", "docstring": "Set the pixel color at position x, y.", "id": "f2045:c0:m3"} {"signature": "def drawText(self, text, x=, y=, color=None,bg=colors.COLORS.Off, aa=False, font=font.default_font,font_scale=):", "body": "md.draw_text(self.fonts, self.set, text, self.width, self.height,x, y, color, bg, aa, font, font_scale)", "docstring": "Draw a line of text starting at (x, y) in an RGB color.\n\n:param colorFunc: a function that takes an integer from x0 to x1 and\n returns a color corresponding to that point\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m26"} {"signature": "def fillTriangle(self, x0, y0, x1, y1, x2, y2, color=None, aa=False):", "body": "md.fill_triangle(self.set, x0, y0, x1, y1, x2, y2, color, aa)", "docstring": "Draw filled triangle with points x0,y0 - x1,y1 - x2,y2\n\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m24"} {"signature": "def drawCircle(self, x0, y0, r, color=None):", "body": "md.draw_circle(self.set, x0, y0, r, color)", "docstring": "Draw a circle in an RGB color, with center x0, y0 and radius r.", "id": "f2045:c0:m13"} {"signature": "def __init__(self, drivers, width=, height=,rotation=, vert_flip=False, y_flip=False,serpentine=True,threadedUpdate=False, brightness=,pixelSize=(, ), **kwargs):", "body": "self.gen_multi = make_matrix_coord_map_multisuper().__init__(drivers, threadedUpdate, brightness, **kwargs)rot_mod = rotation % self.rotation = * round(rot_mod / )if self.rotation != rot_mod:log.warning(ROTATION_WARNING, rotation, self.rotation)self.width = width or getattr(self.drivers[], '') or self.height = height or getattr(self.drivers[], '') or self.vert_flip = vert_flipself.y_flip = y_flipself.serpentine = serpentineself.pixelSize = pixelSizepw, ph = self.pixelSizeif not (self.width or self.height):square = int(math.sqrt(self.numLEDs))if (square * square) == self.numLEDs:self.width = self.height = squareelse:raise TypeError('''')if self.width * self.height > self.numLEDs:raise ValueError(''% (self.width, self.height, self.numLEDs))if not self.coord_map:if len(self.drivers) == :log.debug('''')y_flip = y_flip or vert_flipself.coord_map = make_matrix_coord_map(self.width, self.height,serpentine=serpentine,rotation=rotation,y_flip=vert_flip)elif self.drivers:raise TypeError('')self.set_pixel_positions(make_matrix_coord_map_positions(self.coord_map))if rotation in (, ):w = self.widthh = self.heightself.width = hself.height = wself.texture = Noneself.set = self._setColorif pw < or pw > self.width or ph < or ph > self.height:raise ValueError('''')if self.width % pw != or self.height % ph != :raise ValueError('')if pw == and ph == :self._set = self.__setNormalelse:self._set = self.__setScaledself.width = self.width / pwself.height = self.height / phself.numLEDs = self.width * self.heightself.fonts = font.fonts", "docstring": "Main class for matricies.\n\n driver -- instance that inherits from DriverBase\n width -- X axis size of matrix\n height -- Y axis size of matrix\n coord_map -- a 2D matrix defining the X,Y to strip index mapping.\n Not needed in most cases\n rotation -- how to rotate when generating the map.\n Not used if coord_map specified\n vert_flip - flips the generated map along the Y axis.\n This along with rotation can achieve any orientation", "id": "f2045:c0:m0"} {"signature": "def drawLine(self, x0, y0, x1, y1, color=None, colorFunc=None, aa=False):", "body": "md.draw_line(self.set, x0, y0, x1, y1, color, colorFunc, aa)", "docstring": "Draw a between x0, y0 and x1, y1 in an RGB color.\n\n:param colorFunc: a function that takes an integer from x0 to x1 and\n returns a color corresponding to that point\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m15"} {"signature": "def fillRoundRect(self, x, y, w, h, r, color=None, aa=False):", "body": "md.fill_round_rect(self.set, x, y, w, h, r, color, aa)", "docstring": "Draw a rounded rectangle with top-left corner at (x, y), width w,\nheight h, and corner radius r\n\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m22"} {"signature": "def get(self, x, y):", "body": "try:pixel = self.coord_map[y][x]return self._get_base(pixel)except IndexError:return colors.COLORS.Black", "docstring": "Return the pixel color at position (x, y), or Colors.black if that\nposition is out-of-bounds.", "id": "f2045:c0:m2"} {"signature": "def fillRect(self, x, y, w, h, color=None, aa=False):", "body": "md.fill_rect(self.set, x, y, w, h, color, aa)", "docstring": "Draw a solid rectangle with top-left corner at (x, y), width w and\nheight h.\n\n:param aa: if True, use Bresenham's algorithm for line drawing;\n otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m19"} {"signature": "def drawTriangle(self, x0, y0, x1, y1, x2, y2, color=None, aa=False):", "body": "md.draw_triangle(self.set, x0, y0, x1, y1, x2, y2, color, aa)", "docstring": "Draw triangle with vertices (x0, y0), (x1, y1) and (x2, y2)\n\n:param aa: if True, use Bresenham's algorithm for line drawing;\n Otherwise use Xiaolin Wu's algorithm", "id": "f2045:c0:m23"} {"signature": "@propertydef shape(self):", "body": "return self.ringCount, self.ringSteps", "docstring": "Returns ``ringCount, ringSteps``.", "id": "f2046:c0:m2"} {"signature": "def set(self, ring, angle, color):", "body": "pixel = self.angleToPixel(angle, ring)self._set_base(pixel, color)", "docstring": "Set pixel to RGB color tuple", "id": "f2046:c0:m5"} {"signature": "def apply(self, function):", "body": "for cut in self.cuts:value = self.read(cut)function(value)self.write(cut, value)", "docstring": "For each row or column in cuts, read a list of its colors,\napply the function to that list of colors, then write it back\nto the layout.", "id": "f2050:c0:m1"} {"signature": "def setRGB(self, pixel, r, g, b):", "body": "self.set(pixel, (r, g, b))", "docstring": "Set single pixel using individual RGB values instead of tuple", "id": "f2051:c0:m7"} {"signature": "def set(self, pixel, color):", "body": "raise NotImplementedError", "docstring": "Set the pixel color at position x in the strip.", "id": "f2051:c0:m3"} {"signature": "def get(self, pixel):", "body": "return self._get_base(pixel * self.pixelWidth)", "docstring": "Get RGB color tuple of color at index pixel", "id": "f2051:c0:m2"} {"signature": "def setOff(self, pixel):", "body": "self.set(pixel, (, , ))", "docstring": "Set single pixel off", "id": "f2051:c0:m9"} {"signature": "def fillRGB(self, r, g, b, start=, end=-):", "body": "self.fill((r, g, b), start, end)", "docstring": "Fill entire strip by giving individual RGB values instead of tuple", "id": "f2052:c0:m24"} {"signature": "@classmethoddef construct(cls, project, **desc):", "body": "return cls(project.drivers, maker=project.maker, **desc)", "docstring": "Construct a layout.\n SHOULD BE PRIVATE", "id": "f2052:c0:m0"} {"signature": "def set_colors(self, buf):", "body": "deprecated.deprecated('')if len(self._colors) != len(buf):raise IOError(\"\"\"\".format(len(self._colors), len(buf)))self._colors[:] = buf", "docstring": "DEPRECATED: use self.color_list\n\nUse with extreme caution!\nDirectly sets the internal buffer and bypasses all brightness and\nrotation control buf must also be in the exact format required by the\ndisplay type.", "id": "f2052:c1:m2"} {"signature": "def set_pixel_positions(self, pixel_positions):", "body": "for d in self.drivers:d.set_pixel_positions(pixel_positions)", "docstring": "SHOULD BE PRIVATE", "id": "f2052:c0:m2"} {"signature": "def reflect_y(x, y, matrix):", "body": "return x, matrix.rows - - y", "docstring": "Reflect the index horizontally.", "id": "f2053:m1"} {"signature": "def transpose(x, y, _):", "body": "return y, x", "docstring": "Transpose rows and columns.", "id": "f2053:m4"} {"signature": "@abc.abstractmethoddef __setitem__(self, index, value):", "body": "pass", "docstring": "`index` must be an integer, not a slice.", "id": "f2060:c0:m1"} {"signature": "def recurse(desc, pre='', post=None, python_path=None):", "body": "def call(f, desc):if isinstance(f, str):f = getattr(datatype, f, None)return f and f(desc)desc = load.load_if_filename(desc) or descdesc = construct.to_type_constructor(desc, python_path)datatype = desc.get('')desc = call(pre, desc) or descfor child_name in getattr(datatype, '', []):child = desc.get(child_name)if child:is_plural = child_name.endswith('')remove_s = is_plural and child_name != ''cname = child_name[:-] if remove_s else child_namenew_path = python_path or ('' + cname)if is_plural:if isinstance(child, (dict, str)):child = [child]for i, c in enumerate(child):child[i] = recurse(c, pre, post, new_path)desc[child_name] = childelse:desc[child_name] = recurse(child, pre, post, new_path)d = call(post, desc)return desc if d is None else d", "docstring": "Depth first recursion through a dictionary containing type constructors\n\nThe arguments pre, post and children are independently either:\n\n* None, which means to do nothing\n* a string, which means to use the static class method of that name on the\n class being constructed, or\n* a callable, to be called at each recursion\n\nArguments:\n\ndictionary -- a project dictionary or one of its subdictionaries\npre -- called before children are visited node in the recursion\npost -- called after children are visited in the recursion\npython_path -- relative path to start resolving typenames", "id": "f2061:m0"} {"signature": "def put_edit(self, f, *args, **kwds):", "body": "self.put_nowait(functools.partial(f, *args, **kwds))", "docstring": "Defer an edit to run on the EditQueue.\n\n:param callable f: The function to be called\n:param tuple args: Positional arguments to the function\n:param tuple kwds: Keyword arguments to the function\n:throws queue.Full: if the queue is full", "id": "f2076:c0:m0"} {"signature": "def get_and_run_edits(self):", "body": "if self.empty():returnedits = []while True:try:edits.append(self.get_nowait())except queue.Empty:breakfor e in edits:try:e()except:log.error('', e)traceback.print_exc()", "docstring": "Get all the edits in the queue, then execute them.\n\nThe algorithm gets all edits, and then executes all of them. It does\n*not* pull off one edit, execute, repeat until the queue is empty, and\nthat means that the queue might not be empty at the end of\n``run_edits``, because new edits might have entered the queue\nwhile the previous edits are being executed.\n\nThis has the advantage that if edits enter the queue faster than they\ncan be processed, ``get_and_run_edits`` won't go into an infinite loop,\nbut rather the queue will grow unboundedly, which that can be\ndetected, and mitigated and reported on - or if Queue.maxsize is\nset, ``bp`` will report a fairly clear error and just dump the edits\non the ground.", "id": "f2076:c0:m1"} {"signature": "def __init__(self, *,drivers, layout, maker, path, animation, controls,edit_queue_maxsize=EDIT_QUEUE_MAXSIZE, **kwds):", "body": "self.needs_cleanup = Falsedef create(root, name):def post(desc):exception = desc.get('')if exception:raise exceptionreturn self.construct_child(name, **desc)with exception.add('' + name):return recurse.recurse(root,pre=None,post=post,python_path='' + name)attributes.check(kwds, '')self.path = pathlayout = layout or fill.fill_layout(animation)self.maker = self.construct_child('', **maker)self.drivers = [create(d, '') for d in drivers]with exception.add(''):self.layout = self.construct_child('', **layout)self.animation = create(animation, '')self.running = Falseself.clock = clock.Clock()eq = edit_queue.EditQueue(maxsize=edit_queue_maxsize)self.layout.edit_queue = self.animation.edit_queue = eqself.animation.add_preframe_callback(eq.get_and_run_edits)self.controls = [create(c, '') for c in controls]for d in self.drivers:d.set_project(self)self.animation.set_project(self)", "docstring": ":param int edit_queue_maxsize: maxsize parameter to queue.Queue.\n 0 means an unbounded queue.", "id": "f2080:c0:m2"} {"signature": "def project(*descs, root_file=None):", "body": "load.ROOT_FILE = root_filedesc = merge.merge(merge.DEFAULT_PROJECT, *descs)path = desc.get('', '')if root_file:project_path = os.path.dirname(root_file)if path:path += '' + project_pathelse:path = project_pathwith load.extender(path):desc = recurse.recurse(desc)project = construct.construct(**desc)project.desc = descreturn project", "docstring": "Make a new project, using recursion and alias resolution.\n\nUse this function in preference to calling Project() directly.", "id": "f2080:m0"} {"signature": "def __init__(self, *args, filename='', render=None,divide=, frames=, time=, speed=, options=None,gif_dir=None, **kwds):", "body": "super().__init__(*args, **kwds)self.cur_step = self.movie_writer = _movie_writer.MovieWriter(filename, render, divide, frames, time, speed, options, gif_dir)", "docstring": ":param str filename: Base filename to write the animated GIF file\n\n:param dict render: Parameters to the renderer function -\n see ``bibliopixel.util.image.render.renderer``\n\n:param int divide: If greater than 1, only rendered one in ``divide``\n frames\n\n:param int frames: Number of frames to write\n\n:param float time: Total time to write. If non-zero, takes precedence\n over `frames`\n\n:param float speed: the speed of the GIF is scaled up by this factor,\n so if speed=2 then a 2 second animation will become a 1 second GIF.\n\n:param dict options: Options to\n ``bibliopixel.util.image.gif.write_animation``\n\n:param str gif_dir: If set, write individual GIF frame files to this\n directory, and do not delete them when done. For testing purposes.", "id": "f2089:c0:m0"} {"signature": "def set_device_id(self, dev, id):", "body": "if id < or id > :raise ValueError(\"\")com, code, ok = io.send_packet(CMDTYPE.SETID, , dev, self.baudrate, , id)if not ok:raise_error(code)", "docstring": "Set device ID to new value.\n\n :param str dev: Serial device address/path\n :param id: Device ID to set", "id": "f2090:c0:m4"} {"signature": "def error(self, fail=True, action=''):", "body": "e = ''if action:e = '' % (action, e)log.error(e)if fail:raise IOError(e)", "docstring": "SHOULD BE PRIVATE METHOD", "id": "f2090:c0:m3"} {"signature": "def get_device(self, id=None):", "body": "if id is None:if not self.devices:raise ValueError('' % self.hardware_id)id, (device, version) = sorted(self.devices.items())[]elif id in self.devices:device, version = self.devices[id]else:error = '' % idlog.error(error)raise ValueError(error)log.info(\"\",device, id, version)return id, device, version", "docstring": "Returns details of either the first or specified device\n\n :param int id: Identifier of desired device. If not given, first device\n found will be returned\n\n :returns tuple: Device ID, Device Address, Firmware Version", "id": "f2090:c0:m2"} {"signature": "def get_device_id(self, dev):", "body": "com, code, ok = io.send_packet(CMDTYPE.GETID, , dev, self.baudrate, )if code is None:self.error(action='')return code", "docstring": "Get device ID at given address/path.\n\n :param str dev: Serial device address/path\n :param baudrate: Baudrate to use when connecting (optional)", "id": "f2090:c0:m5"} {"signature": "def __init__(self, num=, delay=, **kwds):", "body": "super().__init__(num)self._kwds = kwdsself._delay = delay", "docstring": "Args\n delay: time to wait in seconds to simulate actual hardware\n interface time", "id": "f2103:c0:m0"} {"signature": "def send_packet(self, data):", "body": "pass", "docstring": "do nothing", "id": "f2108:c4:m0"} {"signature": "def send_packet(self, data):", "body": "raise NotImplementedError", "docstring": "SHOULD BE PRIVATE", "id": "f2108:c0:m1"} {"signature": "def send_packet(self, data):", "body": "package_size = for i in range(int(math.ceil(len(data) / package_size))):start = i * package_sizeend = (i + ) * package_sizeself._spi.transfer(data[start:end])", "docstring": "SHOULD BE PRIVATE", "id": "f2108:c2:m1"} {"signature": "def error(self, text):", "body": "msg = ''.format(self._dev, self._spi_speed, text)log.error(msg)raise IOError(msg)", "docstring": "SHOULD BE PRIVATE", "id": "f2108:c0:m3"} {"signature": "def send_packet(self, data):", "body": "package_size = for i in range(int(math.ceil(len(data) / package_size))):start = i * package_sizeend = (i + ) * package_sizeself._spi.write(data[start:end])self._spi.flush()", "docstring": "SHOULD BE PRIVATE", "id": "f2108:c1:m1"} {"signature": "def __init__(self, num=, port=, **kwds):", "body": "super().__init__(num, address=port, **kwds)", "docstring": "Args:\n num: number of LEDs being visualizer.\n port: the port on which the SimPixel server is running.\n pixel_positions: the positions of the LEDs in 3-d space.\n **kwds: keywords passed to DriverBase.", "id": "f2117:c1:m1"} {"signature": "def sendFragment(self, data):", "body": "self._sendMessage(True, STREAM, data)", "docstring": "see sendFragmentStart()\n\nIf data is a unicode object then the frame is sent as Text.\nIf the data is a bytearray object then the frame is sent as Binary.", "id": "f2119:c1:m9"} {"signature": "def handleMessage(self):", "body": "pass", "docstring": "Called when websocket frame is received.\nTo access the frame data call self.data.\n\nIf the frame is Text then self.data is a unicode object.\nIf the frame is Binary then self.data is a bytearray object.", "id": "f2119:c1:m1"} {"signature": "def sendMessage(self, data):", "body": "opcode = BINARYif _check_unicode(data):opcode = TEXTself._sendMessage(False, opcode, data)", "docstring": "Send websocket data frame to the client.\n\nIf data is a unicode object then the frame is sent as Text.\nIf the data is a bytearray object then the frame is sent as Binary.", "id": "f2119:c1:m11"} {"signature": "def set_device_brightness(self, brightness):", "body": "packet = util.generate_header(CMDTYPE.BRIGHTNESS, )packet.append(self._brightness)s = self._connect()s.sendall(packet)resp = ord(s.recv())return resp == RETURN_CODES.SUCCESS", "docstring": "Hardware specific method to set the global brightness for\n this driver's output. This method is required to be implemented,\n however, users should call\n :py:meth:`.driver_base.DriverBase.set_brightness`\n instead of calling this method directly.\n\n :param int brightness: 0-255 value representing the desired\n brightness level", "id": "f2122:c1:m4"} {"signature": "def __init__(self, *args, ip_address='', port=artnet_message.UDP_PORT,filter_dupes=True, offset=, **kwds):", "body": "super().__init__(*args, address=(ip_address, port), **kwds)self.filter_dupes = filter_dupesself.offset = offset_range.DMXChannel(offset)self.msg = artnet_message.dmx_message()self.last_message = None", "docstring": ":param dict channel_map: maps DMX channels to positions in\n the color_list\n:param int offset: a DMX channel offset, positive, negative or zero", "id": "f2124:c0:m0"} {"signature": "def _render(self):", "body": "if self.set_device_brightness:level = else:level = self._brightness / gam, (r, g, b) = self.gamma.get, self.c_orderfor i in range(min(self.numLEDs, len(self._buf) / )):c = [int(level * x) for x in self._colors[i + self._pos]]self._buf[i * :(i + ) * ] = gam(c[r]), gam(c[g]), gam(c[b])", "docstring": "Typically called from :py:func:`_compute_packet` this applies\n brightness and gamma correction to the pixels controlled by this\n driver.", "id": "f2125:c0:m15"} {"signature": "@classmethoddef construct(cls, project, **desc):", "body": "return cls(maker=project.maker, **desc)", "docstring": "Construct a driver from a project and a description.", "id": "f2125:c0:m0"} {"signature": "def join(self, timeout=None):", "body": "", "docstring": "Called to join threads.", "id": "f2125:c0:m8"} {"signature": "def _send_packet(self):", "body": "", "docstring": "Send the packet to the driver.\n\n Eventually, this will run on an I/O thread.", "id": "f2125:c0:m12"} {"signature": "def cleanup(self):", "body": "", "docstring": "Called to shut this driver down, and stop all threads and processes.", "id": "f2125:c0:m7"} {"signature": "def bufByteCount(self):", "body": "return * self.numLEDs", "docstring": "Total number of bytes that the pixel buffer represents.\nMainly used for drivers such as :py:mod:`bibliopixel.drivers.serial`\nand :py:mod:`.network`", "id": "f2125:c0:m9"} {"signature": "def start(self):", "body": "", "docstring": "Called right before this driver will run. This is the place\nto do things like start threads, not in the constructor.", "id": "f2125:c0:m5"} {"signature": "def sync(self):", "body": "", "docstring": "The sync() method is called after the entire frame has been\nsent to the device to indicate that it may now be displayed.\n\nThis is particularly useful when there are multiple drivers comprising\none display which all need to display the next frame at exactly the same\ntime.", "id": "f2125:c0:m10"} {"signature": "def set_pixel_positions(self, pixel_positions):", "body": "pass", "docstring": "Internal Use Only\n\nPlaceholder callback for sending physical pixel layout data to the\n``SimPixel`` driver.", "id": "f2125:c0:m2"} {"signature": "def stop(self):", "body": "", "docstring": "Called to request any threads or resources to shut down.", "id": "f2125:c0:m6"} {"signature": "def get_all_items(obj):", "body": "if hasattr(obj, ''):items = []for key in obj:for value in obj.getlist(key):items.append((key, value))return itemselse:return obj.items()", "docstring": "dict.items() but with a separate row for each value in a MultiValueDict", "id": "f2250:m6"} {"signature": "def parse_json_path(path):", "body": "original_path = pathsteps = []failed = [JsonStep(type=\"\",key=original_path,last=True,failed=True,)]digit_re = re.compile(r'')key_re = re.compile(r'')parts = path.split(\"\")first_key = parts[]if parts[:]:path = \"\" + \"\".join(parts[:])else:path = \"\"steps.append(JsonStep(type=\"\",key=first_key,))if not path:steps[-].last = Truereturn stepswhile path:if path[:] == \"\":path = path[:]steps.append(JsonStep(type=\"\",key=,))continuedigit_match = digit_re.match(path)if digit_match:path = digit_re.sub(\"\", path)steps.append(JsonStep(type=\"\",key=int(digit_match.group()),))continuekey_match = key_re.match(path)if key_match:path = key_re.sub(\"\", path)steps.append(JsonStep(type=\"\",key=key_match.group(),))continuereturn failednext_step = Nonefor step in reversed(steps):if next_step:step.next_type = next_step.typeelse:step.last = Truenext_step = stepreturn steps", "docstring": "Parse a string as a JSON path\nAn implementation of \"steps to parse a JSON encoding path\"\nhttp://www.w3.org/TR/html-json-forms/#dfn-steps-to-parse-a-json-encoding-path", "id": "f2250:m1"} {"signature": "def clean_empty_string(obj):", "body": "if obj == '':return Noneif isinstance(obj, list):return [None if item == '' else itemfor item in obj]if isinstance(obj, dict):for key in obj:obj[key] = clean_empty_string(obj[key])return obj", "docstring": "Replace empty form values with None, since the is_html_input() check in\nField won't work after we convert to JSON.\n(FIXME: What about allow_blank=True?)", "id": "f2250:m5"} {"signature": "def clone(self):", "body": "new_object = copy.copy(self)if new_object.next:new_object.next = new_object.next.clone()return new_object", "docstring": "Self-cloning. All its next Pipe objects are cloned too.\n\n :returns: cloned object", "id": "f2258:c1:m4"} {"signature": "@staticmethoddef func(generator):", "body": "return Pipe(generator)", "docstring": "Wrap a generator function to Pipe object.\n\n :param generator: The generator function to be wrapped.\n :type generator: generator\n :returns: Pipe object", "id": "f2258:c2:m0"} {"signature": "def __init__(self, func, *args, **kw):", "body": "self.__name__ = func.__name__self.__doc__ = func.__doc__self.func = funcself.next = Noneself.chained = Falseself.args = argsself.kw = kw", "docstring": "Constructor of Pipe. It takes first argument as a generator function.\n args and kw are default arguments to be used if the Pipe object is\n cascaded directly. The default arguments are replaced by the arguments of\n __call__ operator.\n\n :param self: self reference.\n :param func: The generator function to be be wrapped.\n :param args: The default arguments to be used for generator function.\n :param kw: The default keyword arguments to be used for generator function.", "id": "f2258:c1:m0"} {"signature": "def unregister_all_types():", "body": "Pipe.pipe_item_types.clear()", "docstring": "Unregister all data types from Pipe class.", "id": "f2258:m2"} {"signature": "def append(self, next):", "body": "next.chained = Trueif self.next:self.next.append(next)else:self.next = next", "docstring": "Append next object to pipe tail.\n\n :param next: The Pipe object to be appended to tail.\n :type next: Pipe object.", "id": "f2258:c1:m5"} {"signature": "@staticmethoddef map(func):", "body": "def wrapper(prev, *argv, **kw):if prev is None:raise TypeError('')for i in prev:yield func(i, *argv, **kw)return Pipe(wrapper)", "docstring": "Wrap a map function to Pipe object. Map function is a function with\n at least one argument. It is used to convert data. The first argument\n is the data to be converted. The return data from map function will\n be sent to next generator.\n\n :param func: The map function to be wrapped.\n :type func: function object\n :param args: The default arguments to be used for map function.\n :param kw: The default keyword arguments to be used for map function.\n :returns: Pipe object", "id": "f2258:c2:m1"} {"signature": "def unregister_type(item_type):", "body": "if item_type not in Pipe.pipe_item_types:returndel Pipe.pipe_item_types[item_type]", "docstring": "Unregister data type from Pipe class. Check Pipe.__or__ and Pipe.__ror__ for\n detail.\n\n :param item_type: The type of data object which used in pipe cascading.", "id": "f2258:m1"} {"signature": "def __or__(self, next):", "body": "if not isinstance(next, Pipe):item_creator = get_item_creator(type(next))if item_creator is None:raise UnregisteredPipeType(type(next))next = item_creator(next)clone = self.clone()if not next.chained:clone.append(next)else:clone.append(next(*next.args, **next.kw))return clone", "docstring": "Set operand of right-hand side to be next Pipe object. Type convertion\n will be applied automatically if next is not a Pipe object and its type\n is registered in Pipe.pipe_item_types. Otherwise, UnregisteredPipeType\n will be raised.\n\n :param next: The next Pipe object to be cascaded.\n :type next: Pipe object or any object whose type is registered.\n\n :returns: The clone of self.", "id": "f2258:c1:m1"} {"signature": "@pipe.funcdef resplit(prev, pattern, *args, **kw):", "body": "maxsplit = if '' not in kw else kw.pop('')pattern_obj = re.compile(pattern, *args, **kw)for s in prev:yield pattern_obj.split(s, maxsplit=maxsplit)", "docstring": "The resplit pipe split previous pipe input by regular expression.\n\n Use 'maxsplit' keyword argument to limit the number of split.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param pattern: The pattern which used to split string.\n :type pattern: str|unicode", "id": "f2259:m15"} {"signature": "@pipe.funcdef readline(prev, filename=None, mode='', trim=str.rstrip, start=, end=sys.maxsize):", "body": "if prev is None:if filename is None:raise Exception('')elif is_str_type(filename):file_list = [filename, ]else:file_list = filenameelse:file_list = prevfor fn in file_list:if isinstance(fn, file_type):fd = fnelse:fd = open(fn, mode)try:if start <= and end == sys.maxsize:for line in fd:yield trim(line)else:for line_no, line in enumerate(fd, ):if line_no < start:continueyield trim(line)if line_no >= end:breakfinally:if fd != fn:fd.close()", "docstring": "This pipe get filenames or file object from previous pipe and read the\n content of file. Then, send the content of file line by line to next pipe.\n\n The start and end parameters are used to limit the range of reading from file.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param filename: The files to be read. If None, use previous pipe input as filenames.\n :type filename: None|str|unicode|list|tuple\n :param mode: The mode to open file. default is 'r'\n :type mode: str\n :param trim: The function to trim the line before send to next pipe.\n :type trim: function object.\n :param start: if star is specified, only line number larger or equal to start will be sent.\n :type start: integer\n :param end: The last line number to read.\n :type end: integer\n :returns: generator", "id": "f2259:m21"} {"signature": "@pipe.funcdef subn(prev, pattern, repl, *args, **kw):", "body": "count = if '' not in kw else kw.pop('')pattern_obj = re.compile(pattern, *args, **kw)for s in prev:yield pattern_obj.subn(repl, s, count=count)", "docstring": "subn pipe is a wrapper of re.subn method.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param pattern: The pattern string.\n :type pattern: str|unicode\n :param repl: Check repl argument in re.sub method.\n :type repl: str|unicode|callable", "id": "f2259:m17"} {"signature": "def register_default_types():", "body": "register_type(type, pipe.map)register_type(types.FunctionType, pipe.map)register_type(types.MethodType, pipe.map)register_type(tuple, seq)register_type(list, seq)register_type(types.GeneratorType, seq)register_type(string_type, sh)register_type(unicode_type, sh)register_type(file_type, fileobj)if is_py3:register_type(range, seq)register_type(map, seq)", "docstring": "Regiser all default type-to-pipe convertors.", "id": "f2259:m29"} {"signature": "@pipe.funcdef safe_substitute(prev, *args, **kw):", "body": "template_obj = string.Template(*args, **kw)for data in prev:yield template_obj.safe_substitute(data)", "docstring": "alias of string.Template.safe_substitute", "id": "f2259:m27"} {"signature": "@pipe.funcdef format(prev, format_string):", "body": "for i in prev:yield (format_string % i)", "docstring": "The pipe formats the data passed from previous generator according to\n given format_string argument.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param format_string: format string which used to format the data from\n previous iterator.\n :type sequence: str\n :returns: generator", "id": "f2259:m12"} {"signature": "def run(cmd):", "body": "return cmd.run()", "docstring": "Run pipe object and return its last result.\n\n :param cmd: The Pipe object to be executed.\n :type cmd: Pipe\n :returns: The last result.\n\n .. seealso::\n :py:meth:`cmdlet.Pipe.run`", "id": "f2259:m0"} {"signature": "@pipe.funcdef stdout(prev, endl='', thru=False):", "body": "for i in prev:sys.stdout.write(str(i) + endl)if thru:yield i", "docstring": "This pipe read data from previous iterator and write it to stdout.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param endl: The end-of-line symbol for each output.\n :type endl: str\n :param thru: If true, data will passed to next generator. If false, data\n will be dropped.\n :type thru: bool\n :returns: generator", "id": "f2259:m19"} {"signature": "@pipe.funcdef grep(prev, pattern, *args, **kw):", "body": "inv = False if '' not in kw else kw.pop('')pattern_obj = re.compile(pattern, *args, **kw)for data in prev:if bool(inv) ^ bool(pattern_obj.match(data)):yield data", "docstring": "The pipe greps the data passed from previous generator according to\n given regular expression.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param pattern: The pattern which used to filter out data.\n :type pattern: str|unicode|re pattern object\n :param inv: If true, invert the match condition.\n :type inv: boolean\n :param kw:\n :type kw: dict\n :returns: generator", "id": "f2259:m13"} {"signature": "@pipe.funcdef pack(prev, n, rest=False, **kw):", "body": "if '' in kw:use_padding = Truepadding = kw['']else:use_padding = Falsepadding = Noneitems = []for i, data in enumerate(prev, ):items.append(data)if (i % n) == :yield itemsitems = []if len(items) != and rest:if use_padding:items.extend([padding, ] * (n - (i % n)))yield items", "docstring": "pack pipe takes n elements from previous generator and yield one\n list to next.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param rest: Set True to allow to output the rest part of last elements.\n :type prev: boolean\n :param padding: Specify the padding element for the rest part of last elements.\n :type prev: boolean\n :returns: generator\n\n :Example:\n >>> result([1,2,3,4,5,6,7] | pack(3))\n [[1, 2, 3], [4, 5, 6]]\n\n >>> result([1,2,3,4,5,6,7] | pack(3, rest=True))\n [[1, 2, 3], [4, 5, 6], [7,]]\n\n >>> result([1,2,3,4,5,6,7] | pack(3, padding=None))\n [[1, 2, 3], [4, 5, 6], [7, None, None]]", "id": "f2259:m11"} {"signature": "@pipe.funcdef fileobj(prev, file_handle, endl='', thru=False):", "body": "if prev is not None:for i in prev:file_handle.write(str(i)+endl)if thru:yield ielse:for data in file_handle:yield data", "docstring": "This pipe read/write data from/to file object which specified by\n file_handle.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param file_handle: The file object to read or write.\n :type file_handle: file object\n :param endl: The end-of-line symbol for each output.\n :type endl: str\n :param thru: If true, data will passed to next generator. If false, data\n will be dropped.\n :type thru: bool\n :returns: generator", "id": "f2259:m22"} {"signature": "@pipe.funcdef attr(prev, attr_name):", "body": "for obj in prev:if hasattr(obj, attr_name):yield getattr(obj, attr_name)", "docstring": "attr pipe can extract attribute value of object.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param attr_name: The name of attribute\n :type attr_name: str\n :returns: generator", "id": "f2259:m4"} {"signature": "@pipe.funcdef attrs(prev, attr_names):", "body": "for obj in prev:attr_values = []for name in attr_names:if hasattr(obj, name):attr_values.append(getattr(obj, name))yield attr_values", "docstring": "attrs pipe can extract attribute values of object.\n\n If attr_names is a list and its item is not a valid attribute of\n prev's object. It will be excluded from yielded dict.\n\n :param prev: The previous iterator of pipe.\n :type prev: Pipe\n :param attr_names: The list of attribute names\n :type attr_names: str of list\n :returns: generator", "id": "f2259:m5"} {"signature": "def _get_env(self, env_var):", "body": "value = os.environ.get(env_var)if not value:raise ValueError('' % env_var)return value", "docstring": "Helper to read an environment variable", "id": "f2265:c2:m1"} {"signature": "def output_password(self, password):", "body": "print(password, file=sys.stdout)", "docstring": "Output the password to the user.\n\n This mostly exists to ease the testing process.", "id": "f2277:c0:m5"} {"signature": "@classmethoddef pass_from_pipe(cls):", "body": "is_pipe = not sys.stdin.isatty()return is_pipe and cls.strip_last_newline(sys.stdin.read())", "docstring": "Return password from pipe if not on TTY, else False.", "id": "f2277:c0:m3"} {"signature": "def once(func):", "body": "def wrapper(*args, **kwargs):if not hasattr(func, ''):func.always_returns = func(*args, **kwargs)return func.always_returnsreturn functools.wraps(func)(wrapper)", "docstring": "Decorate func so it's only ever called the first time.\n\nThis decorator can ensure that an expensive or non-idempotent function\nwill not be expensive on subsequent calls and is idempotent.\n\n>>> func = once(lambda a: a+3)\n>>> func(3)\n6\n>>> func(9)\n6\n>>> func('12')\n6", "id": "f2280:m0"} {"signature": "def _check_old_config_root():", "body": "globals()[''] = lambda: Noneconfig_file_new = os.path.join(_config_root_Linux(), '')config_file_old = os.path.join(_data_root_Linux(), '')if os.path.isfile(config_file_old) and not os.path.isfile(config_file_new):msg = (\"\"\"\"\"\")raise RuntimeError(msg.format(**locals()))", "docstring": "Prior versions of keyring would search for the config\nin XDG_DATA_HOME, but should probably have been\nsearching for config in XDG_CONFIG_HOME. If the\nconfig exists in the former but not in the latter,\nraise a RuntimeError to force the change.", "id": "f2281:m4"} {"signature": "def load_env():", "body": "try:return load_keyring(os.environ[''])except KeyError:pass", "docstring": "Load a keyring configured in the environment variable.", "id": "f2284:m11"} {"signature": "def set_keyring(keyring):", "body": "global _keyring_backendif not isinstance(keyring, backend.KeyringBackend):raise TypeError(\"\")_keyring_backend = keyring", "docstring": "Set current keyring backend.", "id": "f2284:m0"} {"signature": "def init_backend(limit=None):", "body": "backend._limit = limitkeyrings = filter(limit, backend.get_all_keyring())set_keyring(load_env()or load_config()or max(keyrings, default=fail.Keyring(), key=backend.by_priority))", "docstring": "Load a keyring specified in the config file or infer the best available.\n\nLimit, if supplied, should be a callable taking a backend and returning\nTrue if that backend should be included for consideration.", "id": "f2284:m8"} {"signature": "def get_password(service_name, username):", "body": "return _keyring_backend.get_password(service_name, username)", "docstring": "Get password from the specified service.", "id": "f2284:m3"} {"signature": "def delete_password(self, service, username):", "body": "if not self.connected(service):raise PasswordDeleteError(\"\")if not self.iface.hasEntry(self.handle, service, username, self.appid):raise PasswordDeleteError(\"\")self.iface.removeEntry(self.handle, service, username, self.appid)", "docstring": "Delete the password for the username of the service.", "id": "f2285:c0:m6"} {"signature": "def get_password(self, service, username):", "body": "if not self.connected(service):raise KeyringLocked(\"\")if not self.iface.hasEntry(self.handle, service, username, self.appid):return Nonepassword = self.iface.readPassword(self.handle, service, username, self.appid)return str(password)", "docstring": "Get password of the username for the service", "id": "f2285:c0:m4"} {"signature": "@staticmethoddef unpack(word):", "body": "if not isinstance(word, str):return wordval, = struct.unpack('', word.encode(''))return val", "docstring": "r\"\"\"\n >>> PackedAttributes.unpack(0)\n 0\n >>> PackedAttributes.unpack('\\x00\\x00\\x00\\x01')\n 1\n >>> PackedAttributes.unpack('abcd')\n 1633837924", "id": "f2287:c5:m1"} {"signature": "def set_password(self, service, username, password):", "body": "collection = self.get_preferred_collection()attributes = {\"\": self.appid,\"\": service,\"\": username}label = \"\".format(username, service)collection.create_item(label, attributes, password, replace=True)", "docstring": "Set password for the username of the service", "id": "f2288:c0:m3"} {"signature": "def get_preferred_collection(self):", "body": "bus = secretstorage.dbus_init()try:if hasattr(self, ''):collection = secretstorage.Collection(bus, self.preferred_collection)else:collection = secretstorage.get_default_collection(bus)except exceptions.SecretStorageException as e:raise InitError(\"\" % e)if collection.is_locked():collection.unlock()if collection.is_locked(): raise KeyringLocked(\"\")return collection", "docstring": "If self.preferred_collection contains a D-Bus path,\n the collection at that address is returned. Otherwise,\n the default collection is returned.", "id": "f2288:c0:m1"} {"signature": "def get_password(self, service, username):", "body": "collection = self.get_preferred_collection()items = collection.search_items({\"\": username, \"\": service})for item in items:if hasattr(item, ''):item.unlock()if item.is_locked(): raise KeyringLocked('')return item.get_secret().decode('')", "docstring": "Get password of the username for the service", "id": "f2288:c0:m2"} {"signature": "def delete_password(self, service, username):", "body": "collection = self.get_preferred_collection()items = collection.search_items({\"\": username, \"\": service})for item in items:return item.delete()raise PasswordDeleteError(\"\")", "docstring": "Delete the stored password (only the first one)", "id": "f2288:c0:m4"} {"signature": "@properties.ClassProperty@classmethoddef priority(cls):", "body": "if missing_deps:raise RuntimeError(\"\")return ", "docstring": "If available, the preferred backend on Windows.", "id": "f2289:c0:m0"} {"signature": "@properties.ClassProperty@classmethoddef backends(cls):", "body": "allowed = (keyringfor keyring in filter(backend._limit, backend.get_all_keyring())if not isinstance(keyring, ChainerBackend)and keyring.priority > )return sorted(allowed, key=backend.by_priority, reverse=True)", "docstring": "Discover all keyrings for chaining.", "id": "f2291:c0:m1"} {"signature": "@properties.ClassProperty@classmethoddef priority(cls):", "body": "return * (len(cls.backends) > )", "docstring": "High-priority if there are backends to chain, otherwise 0.", "id": "f2291:c0:m0"} {"signature": "def get_credential(self, service, username):", "body": "if username is not None:password = self.get_password(service, username)if password is not None:return credentials.SimpleCredential(username,password,)return None", "docstring": "Gets the username and password for the service.\n Returns a Credential instance.\n\n The *username* argument is optional and may be omitted by\n the caller or ignored by the backend. Callers must use the\n returned username.", "id": "f2293:c1:m8"} {"signature": "@abc.abstractmethoddef encrypt(self, value):", "body": "pass", "docstring": "Encrypt the value.", "id": "f2293:c2:m0"} {"signature": "@abc.abstractmethoddef get_password(self, service, username):", "body": "return None", "docstring": "Get password of the username for the service", "id": "f2293:c1:m5"} {"signature": "@abc.abstractmethoddef decrypt(self, value):", "body": "pass", "docstring": "Decrypt the value.", "id": "f2293:c2:m1"} {"signature": "def get_queryset(self):", "body": "return Question.objects.filter(pub_date__lte=timezone.now())", "docstring": "Excludes any questions that aren't published yet.", "id": "f2297:c1:m0"} {"signature": "def get_queryset(self):", "body": "return Question.objects.filter(pub_date__lte=timezone.now())", "docstring": "Excludes any questions that aren't published yet.", "id": "f2297:c2:m0"} {"signature": "def create_question(question_text, days):", "body": "time = timezone.now() + datetime.timedelta(days=days)return Question.objects.create(question_text=question_text, pub_date=time)", "docstring": "Creates a question with the given `question_text` published the given\nnumber of `days` offset to now (negative for questions published\nin the past, positive for questions that have yet to be published).", "id": "f2300:m0"} {"signature": "def get_field_value(self, field, value_verbose=True):", "body": "if not value_verbose:\"\"\"\"\"\"value = field._get_val_from_obj(self)else:if isinstance(field, ForeignKey):value = getattr(self, field.name)else:try:value = self._get_FIELD_display(field)except :value = field._get_val_from_obj(self)if(value == True or value == False or isinstance(value, (int, float))):return valuereturn unicode(value)", "docstring": "\u8fd4\u56de\u663e\u793a\u7684\u503c\uff0c\u800c\u4e0d\u662f\u5355\u7eaf\u7684\u6570\u636e\u5e93\u4e2d\u7684\u503c\nfield \u662fmodel\u4e2d\u7684field type\nvalue_verbose \u4e3aTrue\uff0c\u8fd4\u56de\u6570\u636e\u7684\u663e\u793a\u6570\u636e\uff0c\u4f1a\u8f6c\u6362\u4e3achoice\u7684\u5185\u5bb9\uff0c\n\u5982\u679cvalue_verbose \u4e3aFalse\uff0c \u8fd4\u56de\u6570\u636e\u7684\u5b9e\u9645\u503c", "id": "f2309:c0:m1"} {"signature": "def get_querydict(self):", "body": "if self.method:querydict = getattr(self.request, self.method.upper())else:querydict = getattr(self.request, ''.upper())query_dict = dict(list(querydict.items()))return query_dict", "docstring": "\u8fd9\u4e2a\u51fd\u6570\u8ddf self.method\u6709\u5173\nself.method \u6682\u65f6\u6ca1\u7528, querydict\u90fd\u662fPOST\u7684", "id": "f2310:c1:m1"} {"signature": "def get_limit_queryset(self):", "body": "queryset = self.get_queryset()limit_queryset = queryset.all()[self.get_slice_start() :self.get_slice_end()] return limit_queryset", "docstring": "\u8fd4\u56de\u5206\u9875\u4e4b\u540e\u7684queryset", "id": "f2310:c1:m6"} {"signature": "def get_slice_start(self):", "body": "value = Noneif self.easyui_page:value = (self.easyui_page -) * self.easyui_rowsreturn value", "docstring": "\u8fd4\u56dequeryset\u5207\u7247\u7684\u5934", "id": "f2310:c1:m3"} {"signature": "def get_queryset(self):", "body": "filter_dict = self.get_filter_dict()queryset = super(EasyUIListMixin, self).get_queryset()queryset = queryset.filter(**filter_dict)if self.easyui_order:queryset = queryset.order_by(self.easyui_order)return queryset", "docstring": "queryset", "id": "f2310:c1:m5"} {"signature": "def get_template_names(self):", "body": "names = super(EasyUIDeleteView, self).get_template_names()names.append('')return names", "docstring": "datagrid\u7684\u9ed8\u8ba4\u6a21\u677f", "id": "f2311:c3:m0"} {"signature": "def success(request):", "body": "return HttpResponse('')", "docstring": "\u589e\u5220\u6539\u64cd\u4f5c\u6210\u529f\u4e4b\u540e\u8fd4\u56de\u8fd9\u4e2a\u9875\u9762", "id": "f2313:m0"} {"signature": "def get_url(request):", "body": "menu_id = request.GET.get('')m_object = Menu.objects.get(pk=menu_id)namespace = m_object.namespaceviewname = m_object.viewnameurl_string = '' %(namespace, viewname)url = reverse(url_string)return HttpResponse(url)", "docstring": "\u901a\u8fc7menu_id\uff0c\u83b7\u53d6\u5bf9\u5e94\u7684URL\neg. /easyui/MenuListView/", "id": "f2313:m1"} {"signature": "def register_views(app_name, view_filename, urlpatterns=None):", "body": "app_module = __import__(app_name)view_module = getattr(app_module, view_filename)views = dir(view_module)for view_name in views:if view_name.endswith(''):view = getattr(view_module, view_name)if isinstance(view, object):if urlpatterns:urlpatterns += patterns('',url(r'' % view_name, view.as_view(), name=view_name),)else:urlpatterns = patterns('',url(r'' % view_name, view.as_view(), name=view_name),)else:passreturn urlpatterns", "docstring": "app_name APP\u540d\nview_filename views \u6240\u5728\u7684\u6587\u4ef6\nurlpatterns url\u4e2d\u5df2\u7ecf\u5b58\u5728\u7684urlpatterns\n\nreturn urlpatterns\n\n\u53ea\u5bfc\u5165View\u7ed3\u5c3e\u7684\uff0c\u662f\u7c7b\u7684\u89c6\u56fe", "id": "f2316:m1"} {"signature": "def sentimentDF(symbol, type='', date=None, token='', version=''):", "body": "ret = sentiment(symbol, type, date, token, version)if type == '':ret = [ret]df = pd.DataFrame(ret)_toDatetime(df)return df", "docstring": "This endpoint provides social sentiment data from StockTwits. Data can be viewed as a daily value, or by minute for a given date.\n\n https://iexcloud.io/docs/api/#social-sentiment\n Continuous\n\n Args:\n symbol (string); Ticker to request\n type (string); 'daily' or 'minute'\n date (string); date in YYYYMMDD or datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2329:m3"} {"signature": "def cryptoDF(token='', version=''):", "body": "df = pd.DataFrame(crypto(token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This will return an array of quotes for all Cryptocurrencies supported by the IEX API. Each element is a standard quote object with four additional keys.\n\n https://iexcloud.io/docs/api/#crypto\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2329:m1"} {"signature": "def crypto(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "This will return an array of quotes for all Cryptocurrencies supported by the IEX API. Each element is a standard quote object with four additional keys.\n\n https://iexcloud.io/docs/api/#crypto\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2329:m0"} {"signature": "def largestTrades(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "This returns 15 minute delayed, last sale eligible trades.\n\n https://iexcloud.io/docs/api/#largest-trades\n 9:30-4pm ET M-F during regular market hours\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m48"} {"signature": "def _statsToDF(s):", "body": "if s:df = pd.io.json.json_normalize(s)_toDatetime(df)_reindex(df, '')else:df = pd.DataFrame()return df", "docstring": "internal", "id": "f2330:m46"} {"signature": "def batchDF(symbols, fields=None, range_='', last=, token='', version=''):", "body": "x = batch(symbols, fields, range_, last, token, version)ret = {}if isinstance(symbols, str):for field in x.keys():ret[field] = _MAPPING[field](x[field])else:for symbol in x.keys():for field in x[symbol].keys():if field not in ret:ret[field] = pd.DataFrame()dat = x[symbol][field]dat = _MAPPING[field](dat)dat[''] = symbolret[field] = pd.concat([ret[field], dat], sort=True)return ret", "docstring": "Batch several data requests into one invocation\n\n https://iexcloud.io/docs/api/#batch-requests\n\n\n Args:\n symbols (list); List of tickers to request\n fields (list); List of fields to request\n range_ (string); Date range for chart\n last (int);\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: results in json", "id": "f2330:m3"} {"signature": "def shortInterestDF(symbol, date=None, token='', version=''):", "body": "df = pd.DataFrame(shortInterest(symbol, date, token, version))_toDatetime(df)return df", "docstring": "The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.\n\n The report data will be published daily at 4:00pm ET.\n\n https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev\n\n Args:\n symbol (string); Ticker to request\n date (datetime); Effective Datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m91"} {"signature": "def logoNotebook(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)url = logo(symbol, token, version)['']return ImageI(url=url)", "docstring": "This is a helper function, but the google APIs url is standardized.\n\n https://iexcloud.io/docs/api/#logo\n 8am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n image: result", "id": "f2330:m54"} {"signature": "def marketVolumeDF(token='', version=''):", "body": "return pd.DataFrame(marketVolume())", "docstring": "This endpoint returns real time traded volume on U.S. markets.\n\n https://iexcloud.io/docs/api/#market-volume-u-s\n 7:45am-5:15pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m56"} {"signature": "def sectorPerformance(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https://iexcloud.io/docs/api/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m81"} {"signature": "def priceTargetDF(symbol, token='', version=''):", "body": "df = pd.io.json.json_normalize(priceTarget(symbol, token, version))_toDatetime(df)return df", "docstring": "Provides the latest avg, high, and low analyst price target for a symbol.\n\n https://iexcloud.io/docs/api/#price-target\n Updates at 10am, 11am, 12pm UTC every day\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m76"} {"signature": "def volumeByVenueDF(symbol, token='', version=''):", "body": "df = pd.DataFrame(volumeByVenue(symbol, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This returns 15 minute delayed and 30 day average consolidated volume percentage of a stock, by market.\n This call will always return 13 values, and will be sorted in ascending order by current day trading volume percentage.\n\n https://iexcloud.io/docs/api/#volume-by-venue\n Updated during regular market hours 9:30am-4pm ET\n\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m87"} {"signature": "def _splitsToDF(s):", "body": "df = pd.DataFrame(s)_toDatetime(df)_reindex(df, '')return df", "docstring": "internal", "id": "f2330:m84"} {"signature": "def logoPNG(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)response = requests.get(logo(symbol, token, version)[''])return ImageP.open(BytesIO(response.content))", "docstring": "This is a helper function, but the google APIs url is standardized.\n\n https://iexcloud.io/docs/api/#logo\n 8am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n image: result as png", "id": "f2330:m53"} {"signature": "def batch(symbols, fields=None, range_='', last=, token='', version=''):", "body": "fields = fields or _BATCH_TYPES[:] if not isinstance(symbols, [].__class__):if not isinstance(symbols, str):raise PyEXception('')if isinstance(fields, str):fields = [fields]if range_ not in _TIMEFRAME_CHART:raise PyEXception('' % str(_TIMEFRAME_CHART))if isinstance(symbols, str):route = ''.format(symbols, ''.join(fields), range_, last)return _getJson(route, token, version)if len(symbols) > :raise PyEXception('')route = ''.format(''.join(symbols), ''.join(fields), range_, last)return _getJson(route, token, version)", "docstring": "Batch several data requests into one invocation\n\n https://iexcloud.io/docs/api/#batch-requests\n\n\n Args:\n symbols (list); List of tickers to request\n fields (list); List of fields to request\n range_ (string); Date range for chart\n last (int);\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: results in json", "id": "f2330:m2"} {"signature": "def threshold(date=None, token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)return _getJson('', token, version)", "docstring": "The following are IEX-listed securities that have an aggregate fail to deliver position for five consecutive settlement days at a registered clearing agency, totaling 10,000 shares or more and equal to at least 0.5% of the issuer\u2019s total shares outstanding (i.e., \u201cthreshold securities\u201d).\n The report data will be published to the IEX website daily at 8:30 p.m. ET with data for that trading day.\n\n https://iexcloud.io/docs/api/#listed-regulation-sho-threshold-securities-list-in-dev\n\n Args:\n date (datetime); Effective Datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m88"} {"signature": "def book(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Book data\n\n https://iextrading.com/developer/docs/#book\n realtime during Investors Exchange market hours\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m6"} {"signature": "def logo(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "This is a helper function, but the google APIs url is standardized.\n\n https://iexcloud.io/docs/api/#logo\n 8am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m52"} {"signature": "def balanceSheet(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Pulls balance sheet data. Available quarterly (4 quarters) and annually (4 years)\n\n https://iexcloud.io/docs/api/#balance-sheet\n Updates at 8am, 9am UTC daily\n\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m0"} {"signature": "def _peersToDF(p):", "body": "df = pd.DataFrame(p, columns=[''])_toDatetime(df)_reindex(df, '')df[''] = df.indexreturn df", "docstring": "internal", "id": "f2330:m67"} {"signature": "def _bookToDF(b):", "body": "quote = b.get('', [])asks = b.get('', [])bids = b.get('', [])trades = b.get('', [])df1 = pd.io.json.json_normalize(quote)df1[''] = ''df2 = pd.io.json.json_normalize(asks)df2[''] = quote['']df2[''] = ''df3 = pd.io.json.json_normalize(bids)df3[''] = quote['']df3[''] = ''df4 = pd.io.json.json_normalize(trades)df4[''] = quote['']df3[''] = ''df = pd.concat([df1, df2, df3, df4], sort=True)_toDatetime(df)return df", "docstring": "internal", "id": "f2330:m7"} {"signature": "def marketNews(count=, token='', version=''):", "body": "return _getJson('' + str(count), token, version)", "docstring": "News about market\n\n https://iexcloud.io/docs/api/#news\n Continuous\n\n Args:\n count (int): limit number of results\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m60"} {"signature": "def news(symbol, count=, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '' + str(count), token, version)", "docstring": "News about company\n\n https://iexcloud.io/docs/api/#news\n Continuous\n\n Args:\n symbol (string); Ticker to request\n count (int): limit number of results\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m57"} {"signature": "def shortInterest(symbol, date=None, token='', version=''):", "body": "_raiseIfNotStr(symbol)if date:date = _strOrDate(date)return _getJson('' + symbol + '' + date, token, version)return _getJson('' + symbol + '', token, version)", "docstring": "The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.\n\n The report data will be published daily at 4:00pm ET.\n\n https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev\n\n Args:\n symbol (string); Ticker to request\n date (datetime); Effective Datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m90"} {"signature": "def chart(symbol, timeframe='', date=None, token='', version=''):", "body": "_raiseIfNotStr(symbol)if timeframe is not None and timeframe != '':if timeframe not in _TIMEFRAME_CHART:raise PyEXception('' % str(_TIMEFRAME_CHART))return _getJson('' + symbol + '' + '' + timeframe, token, version)if date:date = _strOrDate(date)return _getJson('' + symbol + '' + '' + date, token, version)return _getJson('' + symbol + '', token, version)", "docstring": "Historical price/volume data, daily and intraday\n\n https://iexcloud.io/docs/api/#historical-prices\n Data Schedule\n 1d: -9:30-4pm ET Mon-Fri on regular market trading days\n -9:30-1pm ET on early close trading days\n All others:\n -Prior trading day available after 4am ET Tue-Sat\n\n Args:\n symbol (string); Ticker to request\n timeframe (string); Timeframe to request e.g. 1m\n date (datetime): date, if requesting intraday\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m11"} {"signature": "def thresholdDF(date=None, token='', version=''):", "body": "df = pd.DataFrame(threshold(date, token, version))_toDatetime(df)return df", "docstring": "The following are IEX-listed securities that have an aggregate fail to deliver position for five consecutive settlement days at a registered clearing agency, totaling 10,000 shares or more and equal to at least 0.5% of the issuer\u2019s total shares outstanding (i.e., \u201cthreshold securities\u201d).\n The report data will be published to the IEX website daily at 8:30 p.m. ET with data for that trading day.\n\n https://iexcloud.io/docs/api/#listed-regulation-sho-threshold-securities-list-in-dev\n\n Args:\n date (datetime); Effective Datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m89"} {"signature": "def chartDF(symbol, timeframe='', date=None, token='', version=''):", "body": "c = chart(symbol, timeframe, date, token, version)df = pd.DataFrame(c)_toDatetime(df)if timeframe is not None and timeframe != '':_reindex(df, '')else:if not df.empty:df.set_index(['', ''], inplace=True)else:return pd.DataFrame()return df", "docstring": "Historical price/volume data, daily and intraday\n\n https://iexcloud.io/docs/api/#historical-prices\n Data Schedule\n 1d: -9:30-4pm ET Mon-Fri on regular market trading days\n -9:30-1pm ET on early close trading days\n All others:\n -Prior trading day available after 4am ET Tue-Sat\n\n Args:\n symbol (string); Ticker to request\n timeframe (string); Timeframe to request e.g. 1m\n date (datetime): date, if requesting intraday\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m13"} {"signature": "def yesterdayDF(symbol, token='', version=''):", "body": "y = yesterday(symbol, token, version)if y:df = pd.io.json.json_normalize(y)_toDatetime(df)_reindex(df, '')else:df = pd.DataFrame()return df", "docstring": "This returns previous day adjusted price data for one or more stocks\n\n https://iexcloud.io/docs/api/#previous-day-prices\n Available after 4am ET Tue-Sat\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m70"} {"signature": "def spreadDF(symbol, token='', version=''):", "body": "df = pd.DataFrame(spread(symbol, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This returns an array of effective spread, eligible volume, and price improvement of a stock, by market.\n Unlike volume-by-venue, this will only return a venue if effective spread is not \u2018N/A\u2019. Values are sorted in descending order by effectiveSpread.\n Lower effectiveSpread and higher priceImprovement values are generally considered optimal.\n\n Effective spread is designed to measure marketable orders executed in relation to the market center\u2019s\n quoted spread and takes into account hidden and midpoint liquidity available at each market center.\n Effective Spread is calculated by using eligible trade prices recorded to the consolidated tape and\n comparing those trade prices to the National Best Bid and Offer (\u201cNBBO\u201d) at the time of the execution.\n\n View the data disclaimer at the bottom of the stocks app for more information about how these values are calculated.\n\n\n https://iexcloud.io/docs/api/#earnings-today\n 8am ET M-F\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m32"} {"signature": "def marketOhlcDF(token='', version=''):", "body": "x = marketOhlc(token, version)data = []for key in x:data.append(x[key])data[-][''] = keydf = pd.io.json.json_normalize(data)_toDatetime(df)_reindex(df, '')return df", "docstring": "Returns the official open and close for whole market.\n\n https://iexcloud.io/docs/api/#news\n 9:30am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m65"} {"signature": "def ipoUpcoming(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "This returns a list of upcoming or today IPOs scheduled for the current and next month. The response is split into two structures:\n rawData and viewData. rawData represents all available data for an IPO. viewData represents data structured for display to a user.\n\n https://iexcloud.io/docs/api/#ipo-calendar\n 10am, 10:30am UTC daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m43"} {"signature": "def _earningsToDF(e):", "body": "if e:df = pd.io.json.json_normalize(e, '', '')_toDatetime(df)_reindex(df, '')else:df = pd.DataFrame()return df", "docstring": "internal", "id": "f2330:m27"} {"signature": "def splits(symbol, timeframe='', token='', version=''):", "body": "_raiseIfNotStr(symbol)if timeframe not in _TIMEFRAME_DIVSPLIT:raise PyEXception('' % str(_TIMEFRAME_DIVSPLIT))return _getJson('' + symbol + '' + timeframe, token, version)", "docstring": "Stock split history\n\n https://iexcloud.io/docs/api/#splits\n Updated at 9am UTC every day\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m83"} {"signature": "def financials(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Pulls income statement, balance sheet, and cash flow data from the four most recent reported quarters.\n\n https://iexcloud.io/docs/api/#financials\n Updates at 8am, 9am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m36"} {"signature": "def companyDF(symbol, token='', version=''):", "body": "c = company(symbol, token, version)df = _companyToDF(c)return df", "docstring": "Company reference data\n\n https://iexcloud.io/docs/api/#company\n Updates at 4am and 5am UTC every day\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m20"} {"signature": "def marketNewsDF(count=, token='', version=''):", "body": "df = pd.DataFrame(marketNews(count, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "News about market\n\n https://iexcloud.io/docs/api/#news\n Continuous\n\n Args:\n count (int): limit number of results\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m61"} {"signature": "def earnings(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Earnings data for a given company including the actual EPS, consensus, and fiscal period. Earnings are available quarterly (last 4 quarters) and annually (last 4 years).\n\n https://iexcloud.io/docs/api/#earnings\n Updates at 9am, 11am, 12pm UTC every day\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m26"} {"signature": "def incomeStatement(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Pulls income statement data. Available quarterly (4 quarters) or annually (4 years).\n\n https://iexcloud.io/docs/api/#income-statement\n Updates at 8am, 9am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m39"} {"signature": "def ipoToday(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "This returns a list of upcoming or today IPOs scheduled for the current and next month. The response is split into two structures:\n rawData and viewData. rawData represents all available data for an IPO. viewData represents data structured for display to a user.\n\n https://iexcloud.io/docs/api/#ipo-calendar\n 10am, 10:30am UTC daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m41"} {"signature": "def priceTarget(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "Provides the latest avg, high, and low analyst price target for a symbol.\n\n https://iexcloud.io/docs/api/#price-target\n Updates at 10am, 11am, 12pm UTC every day\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m75"} {"signature": "def marketYesterdayDF(token='', version=''):", "body": "x = marketYesterday(token, version)data = []for key in x:data.append(x[key])data[-][''] = keydf = pd.DataFrame(data)_toDatetime(df)_reindex(df, '')return df", "docstring": "This returns previous day adjusted price data for whole market\n\n https://iexcloud.io/docs/api/#previous-day-prices\n Available after 4am ET Tue-Sat\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m72"} {"signature": "def delayedQuoteDF(symbol, token='', version=''):", "body": "df = pd.io.json.json_normalize(delayedQuote(symbol, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This returns the 15 minute delayed market quote.\n\n https://iexcloud.io/docs/api/#delayed-quote\n 15min delayed\n 4:30am - 8pm ET M-F when market is open\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m22"} {"signature": "def _dividendsToDF(d):", "body": "df = pd.DataFrame(d)_toDatetime(df)_reindex(df, '')return df", "docstring": "internal", "id": "f2330:m24"} {"signature": "def volumeByVenue(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "This returns 15 minute delayed and 30 day average consolidated volume percentage of a stock, by market.\n This call will always return 13 values, and will be sorted in ascending order by current day trading volume percentage.\n\n https://iexcloud.io/docs/api/#volume-by-venue\n Updated during regular market hours 9:30am-4pm ET\n\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m86"} {"signature": "def marketShortInterestDF(date=None, token='', version=''):", "body": "df = pd.DataFrame(marketShortInterest(date, token, version))_toDatetime(df)return df", "docstring": "The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.\n\n The report data will be published daily at 4:00pm ET.\n\n https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev\n\n Args:\n date (datetime); Effective Datetime\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m93"} {"signature": "def marketOhlc(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "Returns the official open and close for whole market.\n\n https://iexcloud.io/docs/api/#news\n 9:30am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m64"} {"signature": "def delayedQuote(symbol, token='', version=''):", "body": "_raiseIfNotStr(symbol)return _getJson('' + symbol + '', token, version)", "docstring": "This returns the 15 minute delayed market quote.\n\n https://iexcloud.io/docs/api/#delayed-quote\n 15min delayed\n 4:30am - 8pm ET M-F when market is open\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2330:m21"} {"signature": "def peersDF(symbol, token='', version=''):", "body": "p = peers(symbol, token, version)df = _peersToDF(p)return df", "docstring": "Peers of ticker\n\n https://iexcloud.io/docs/api/#peers\n 8am UTC daily\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m68"} {"signature": "def _estimatesToDF(f):", "body": "if f:df = pd.io.json.json_normalize(f, '', '')_toDatetime(df)_reindex(df, '')else:df = pd.DataFrame()return df", "docstring": "internal", "id": "f2330:m34"} {"signature": "def sectorPerformanceDF(token='', version=''):", "body": "df = pd.DataFrame(sectorPerformance(token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https://iexcloud.io/docs/api/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m82"} {"signature": "def quoteDF(symbol, token='', version=''):", "body": "q = quote(symbol, token, version)if q:df = pd.io.json.json_normalize(q)_toDatetime(df)_reindex(df, '')else:df = pd.DataFrame()return df", "docstring": "Get quote for ticker\n\n https://iexcloud.io/docs/api/#quote\n 4:30am-8pm ET Mon-Fri\n\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m78"} {"signature": "def listDF(option='', token='', version=''):", "body": "df = pd.DataFrame(list(option, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "Returns an array of quotes for the top 10 symbols in a specified list.\n\n\n https://iexcloud.io/docs/api/#list\n Updated intraday\n\n Args:\n option (string); Option to query\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2330:m51"} {"signature": "def internationalSymbolsDF(region='', exchange='', token='', version=''):", "body": "df = pd.DataFrame(internationalSymbols(region, exchange, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This call returns an array of international symbols that IEX Cloud supports for API calls.\n\n https://iexcloud.io/docs/api/#international-symbols\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n region (string); region, 2 letter case insensitive string of country codes using ISO 3166-1 alpha-2\n exchange (string): Case insensitive string of Exchange using IEX Supported Exchanges list\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2331:m15"} {"signature": "def internationalSymbolsList(region='', exchange='', token='', version=''):", "body": "return internationalSymbolsDF(region, exchange, token, version).index.tolist()", "docstring": "This call returns an array of international symbols that IEX Cloud supports for API calls.\n\n https://iexcloud.io/docs/api/#international-symbols\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n region (string); region, 2 letter case insensitive string of country codes using ISO 3166-1 alpha-2\n exchange (string): Case insensitive string of Exchange using IEX Supported Exchanges list\n token (string); Access token\n version (string); API version\n\n Returns:\n list: result", "id": "f2331:m20"} {"signature": "@deprecated(details='')def directory(date=None, token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)return _getJson('', token, version)", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n dict: result", "id": "f2331:m27"} {"signature": "def calendar(type='', direction='', last=, startDate=None, token='', version=''):", "body": "if startDate:startDate = _strOrDate(startDate)return _getJson(''.format(type=type, direction=direction, last=last, date=startDate), token, version)return _getJson('' + type + '' + direction + '' + str(last), token, version)", "docstring": "This call allows you to fetch a number of trade dates or holidays from a given date. For example, if you want the next trading day, you would call /ref-data/us/dates/trade/next/1.\n\n https://iexcloud.io/docs/api/#u-s-exchanges\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n type (string); \"holiday\" or \"trade\"\n direction (string); \"next\" or \"last\"\n last (int); number to move in direction\n startDate (date); start date for next or last, YYYYMMDD\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2331:m2"} {"signature": "def mutualFundSymbolsDF(token='', version=''):", "body": "df = pd.DataFrame(mutualFundSymbols(token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "This call returns an array of mutual fund symbols that IEX Cloud supports for API calls.\n\n https://iexcloud.io/docs/api/#mutual-fund-symbols\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2331:m13"} {"signature": "@deprecated(details='')def corporateActions(date=None, token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)return _getJson('', token, version)", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n dict: result", "id": "f2331:m21"} {"signature": "@deprecated(details='')def nextDayExtDate(date=None, token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)return _getJson('', token, version)", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n dict: result", "id": "f2331:m25"} {"signature": "def iexSymbolsList(token='', version=''):", "body": "return iexSymbolsDF(token, version).index.tolist()", "docstring": "This call returns an array of symbols the Investors Exchange supports for trading.\n This list is updated daily as of 7:45 a.m. ET. Symbols may be added or removed by the Investors Exchange after the list was produced.\n\n https://iexcloud.io/docs/api/#iex-symbols\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n list: result", "id": "f2331:m17"} {"signature": "def calendarDF(type='', direction='', last=, startDate=None, token='', version=''):", "body": "dat = pd.DataFrame(calendar(type, direction, last, startDate, token, version))_toDatetime(dat)return dat", "docstring": "This call allows you to fetch a number of trade dates or holidays from a given date. For example, if you want the next trading day, you would call /ref-data/us/dates/trade/next/1.\n\n https://iexcloud.io/docs/api/#u-s-exchanges\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n type (string); \"holiday\" or \"trade\"\n direction (string); \"next\" or \"last\"\n last (int); number to move in direction\n startDate (date); start date for next or last, YYYYMMDD\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2331:m3"} {"signature": "@deprecated(details='')def directoryDF(date=None, token='', version=''):", "body": "df = pd.DataFrame(directory(date, token, version))_toDatetime(df)return df", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n dict: result", "id": "f2331:m28"} {"signature": "@deprecated(details='')def dividendsDF(date=None, token='', version=''):", "body": "df = pd.DataFrame(dividends(date, token, version))_toDatetime(df)return df", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n DataFrame: result", "id": "f2331:m24"} {"signature": "@deprecated(details='')def dividends(date=None, token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)return _getJson('', token, version)", "docstring": "Args:\n date (datetime): Effective date\n token (string); Access token\n version (string); API version\n\nReturns:\n dict: result", "id": "f2331:m23"} {"signature": "def exchangesDF(token='', version=''):", "body": "return pd.DataFrame(exchanges())", "docstring": "Returns an array of U.S. exchanges.\n\n https://iexcloud.io/docs/api/#u-s-exchanges\n 8am, 9am, 12pm, 1pm UTC daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2331:m1"} {"signature": "def systemEventSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "The System event message is used to indicate events that apply to the market or the data feed.\n\n There will be a single message disseminated per channel for each System Event type within a given trading session.\n\n https://iexcloud.io/docs/api/#deep-system-event\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m11"} {"signature": "def securityEventSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "The Security event message is used to indicate events that apply to a security. A Security event message will be sent whenever such event occurs\n\n https://iexcloud.io/docs/api/#deep-security-event\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m9"} {"signature": "def officialPriceSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "The Official Price message is used to disseminate the IEX Official Opening and Closing Prices.\n\n These messages will be provided only for IEX Listed Securities.\n\n https://iexcloud.io/docs/api/#deep-official-price\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m8"} {"signature": "def auctionSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "DEEP broadcasts an Auction Information Message every one second between the Lock-in Time and the auction match for Opening and Closing Auctions,\n and during the Display Only Period for IPO, Halt, and Volatility Auctions. Only IEX listed securities are eligible for IEX Auctions.\n\n https://iexcloud.io/docs/api/#deep-auction\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m5"} {"signature": "def tradeBreaksSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "Trade report messages are sent when an order on the IEX Order Book is executed in whole or in part. DEEP sends a Trade report message for every individual fill.\n\n https://iexcloud.io/docs/api/#deep-trades\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m12"} {"signature": "def bookSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "Book shows IEX\u2019s bids and asks for given symbols.\n\n https://iexcloud.io/docs/api/#deep-book\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m6"} {"signature": "def opHaltStatusSSE(symbols=None, on_data=None, token='', version=''):", "body": "return _runSSE('', symbols, on_data, token, version)", "docstring": "The Exchange may suspend trading of one or more securities on IEX for operational reasons and indicates such operational halt using the Operational halt status message.\n\n IEX disseminates a full pre-market spin of Operational halt status messages indicating the operational halt status of all securities.\n In the spin, IEX will send out an Operational Halt Message with \u201cN\u201d (Not operationally halted on IEX) for all securities that are eligible for trading at the start of the Pre-Market Session.\n If a security is absent from the dissemination, firms should assume that the security is being treated as operationally halted in the IEX Trading System at the start of the Pre-Market Session.\n\n After the pre-market spin, IEX will use the Operational halt status message to relay changes in operational halt status for an individual security.\n\n https://iexcloud.io/docs/api/#deep-operational-halt-status\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m7"} {"signature": "def deepSSE(symbols=None, channels=None, on_data=None, token='', version=''):", "body": "symbols = _strCommaSeparatedString(symbols)channels = channels or []if isinstance(channels, str):if channels not in DeepChannelsSSE.options():raise PyEXception('', type(channels))channels = [channels]elif isinstance(channels, DeepChannelsSSE):channels = [channels.value]elif isinstance(channels, list):for i, c in enumerate(channels):if isinstance(c, DeepChannelsSSE):channels[i] = c.valueelif not isinstance(c, str) or isinstance(c, str) and c not in DeepChannelsSSE.options():raise PyEXception('', c)channels = _strCommaSeparatedString(channels)return _streamSSE(_SSE_DEEP_URL_PREFIX.format(symbols=symbols, channels=channels, token=token, version=version), on_data)", "docstring": "DEEP is used to receive real-time depth of book quotations direct from IEX.\n The depth of book quotations received via DEEP provide an aggregated size of resting displayed orders at a price and side,\n and do not indicate the size or number of individual orders at any price level.\n Non-displayed orders and non-displayed portions of reserve orders are not represented in DEEP.\n\n DEEP also provides last trade price and size information. Trades resulting from either displayed or non-displayed orders matching on IEX will be reported. Routed executions will not be reported.\n\n https://iexcloud.io/docs/api/#deep\n\n Args:\n symbols (string); Tickers to request\n on_data (function): Callback on data\n token (string); Access token\n version (string); API version", "id": "f2332:m3"} {"signature": "def deep(symbol=None, token='', version=''):", "body": "_raiseIfNotStr(symbol)if symbol:return _getJson('' + symbol, token, version)return _getJson('', token, version)", "docstring": "DEEP is used to receive real-time depth of book quotations direct from IEX.\n The depth of book quotations received via DEEP provide an aggregated size of resting displayed orders at a price and side,\n and do not indicate the size or number of individual orders at any price level.\n Non-displayed orders and non-displayed portions of reserve orders are not represented in DEEP.\n\n DEEP also provides last trade price and size information. Trades resulting from either displayed or non-displayed orders matching on IEX will be reported. Routed executions will not be reported.\n\n https://iexcloud.io/docs/api/#deep\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2333:m4"} {"signature": "def tradingStatusDF(symbol=None, token='', version=''):", "body": "x = tradingStatus(symbol, token, version)data = []for key in x:d = x[key]d[''] = keydata.append(d)df = pd.DataFrame(data)_toDatetime(df)return df", "docstring": "The Trading status message is used to indicate the current trading status of a security.\n For IEX-listed securities, IEX acts as the primary market and has the authority to institute a trading halt or trading pause in a security due to news dissemination or regulatory reasons.\n For non-IEX-listed securities, IEX abides by any regulatory trading halts and trading pauses instituted by the primary or listing market, as applicable.\n\n IEX disseminates a full pre-market spin of Trading status messages indicating the trading status of all securities.\n In the spin, IEX will send out a Trading status message with \u201cT\u201d (Trading) for all securities that are eligible for trading at the start of the Pre-Market Session.\n If a security is absent from the dissemination, firms should assume that the security is being treated as operationally halted in the IEX Trading System.\n\n After the pre-market spin, IEX will use the Trading status message to relay changes in trading status for an individual security. Messages will be sent when a security is:\n\n Halted\n Paused*\n Released into an Order Acceptance Period*\n Released for trading\n *The paused and released into an Order Acceptance Period status will be disseminated for IEX-listed securities only. Trading pauses on non-IEX-listed securities will be treated simply as a halt.\n\n https://iexcloud.io/docs/api/#deep-trading-status\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m25"} {"signature": "def bookDF(symbol=None, token='', version=''):", "body": "x = book(symbol, token, version)data = []for key in x:d = x[key]d[''] = keydata.append(d)df = pd.io.json.json_normalize(data)_toDatetime(df)return df", "docstring": "Book shows IEX\u2019s bids and asks for given symbols.\n\n https://iexcloud.io/docs/api/#deep-book\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m9"} {"signature": "def tradeBreak(symbol=None, token='', version=''):", "body": "_raiseIfNotStr(symbol)if symbol:return _getJson('' + symbol, token, version)return _getJson('', token, version)", "docstring": "Trade break messages are sent when an execution on IEX is broken on that same trading day. Trade breaks are rare and only affect applications that rely upon IEX execution based data.\n\n https://iexcloud.io/docs/api/#deep-trade-break\n\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2333:m22"} {"signature": "def topsDF(symbols=None, token='', version=''):", "body": "df = pd.io.json.json_normalize(tops(symbols, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "TOPS provides IEX\u2019s aggregated best quoted bid and offer position in near real time for all securities on IEX\u2019s displayed limit order book.\n TOPS is ideal for developers needing both quote and trade data.\n\n https://iexcloud.io/docs/api/#tops\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m1"} {"signature": "def opHaltStatusDF(symbol=None, token='', version=''):", "body": "x = opHaltStatus(symbol, token, version)data = []for key in x:d = x[key]d[''] = keydata.append(d)df = pd.DataFrame(data)_toDatetime(df)return df", "docstring": "The Exchange may suspend trading of one or more securities on IEX for operational reasons and indicates such operational halt using the Operational halt status message.\n\n IEX disseminates a full pre-market spin of Operational halt status messages indicating the operational halt status of all securities.\n In the spin, IEX will send out an Operational Halt Message with \u201cN\u201d (Not operationally halted on IEX) for all securities that are eligible for trading at the start of the Pre-Market Session.\n If a security is absent from the dissemination, firms should assume that the security is being treated as operationally halted in the IEX Trading System at the start of the Pre-Market Session.\n\n After the pre-market spin, IEX will use the Operational halt status message to relay changes in operational halt status for an individual security.\n\n https://iexcloud.io/docs/api/#deep-operational-halt-status\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m11"} {"signature": "def lastDF(symbols=None, token='', version=''):", "body": "df = pd.io.json.json_normalize(last(symbols, token, version))_toDatetime(df)_reindex(df, '')return df", "docstring": "Last provides trade data for executions on IEX. It is a near real time, intraday API that provides IEX last sale price, size and time.\n Last is ideal for developers that need a lightweight stock quote.\n\n https://iexcloud.io/docs/api/#last\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m3"} {"signature": "def ssrStatus(symbol=None, token='', version=''):", "body": "_raiseIfNotStr(symbol)if symbol:return _getJson('' + symbol, token, version)return _getJson('', token, version)", "docstring": "In association with Rule 201 of Regulation SHO, the Short Sale Price Test Message is used to indicate when a short sale price test restriction is in effect for a security.\n\n IEX disseminates a full pre-market spin of Short sale price test status messages indicating the Rule 201 status of all securities.\n After the pre-market spin, IEX will use the Short sale price test status message in the event of an intraday status change.\n\n The IEX Trading System will process orders based on the latest short sale price test restriction status.\n\n https://iexcloud.io/docs/api/#deep-short-sale-price-test-status\n\n Args:\n symbol (string); Ticker to request\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2333:m16"} {"signature": "def systemEventDF(token='', version=''):", "body": "df = pd.io.json.json_normalize(systemEvent(token, version))_toDatetime(df)return df", "docstring": "The System event message is used to indicate events that apply to the market or the data feed.\n\n There will be a single message disseminated per channel for each System Event type within a given trading session.\n\n https://iexcloud.io/docs/api/#deep-system-event\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result", "id": "f2333:m19"} {"signature": "@deprecated(details='')def tradingStatusWS(symbols=None, on_data=None):", "body": "symbols = _strToList(symbols)sendinit = ({'': symbols, '': ['']},)return _stream(_wsURL(''), sendinit, on_data)", "docstring": "https://iextrading.com/developer/docs/#trading-status", "id": "f2334:m6"} {"signature": "@deprecated(details='')def tradesWS(symbols=None, on_data=None):", "body": "symbols = _strToList(symbols)sendinit = ({'': symbols, '': ['']},)return _stream(_wsURL(''), sendinit, on_data)", "docstring": "https://iextrading.com/developer/docs/#trades", "id": "f2334:m4"} {"signature": "@deprecated(details='')def opHaltStatusWS(symbols=None, on_data=None):", "body": "symbols = _strToList(symbols)sendinit = ({'': symbols, '': ['']},)return _stream(_wsURL(''), sendinit, on_data)", "docstring": "https://iextrading.com/developer/docs/#operational-halt-status", "id": "f2334:m7"} {"signature": "@deprecated(details='')def bookWS(symbols=None, on_data=None):", "body": "symbols = _strToList(symbols)sendinit = ({'': symbols, '': ['']},)return _stream(_wsURL(''), sendinit, on_data)", "docstring": "https://iextrading.com/developer/docs/#book51", "id": "f2334:m3"} {"signature": "@deprecated(details='')def auctionWS(symbols=None, on_data=None):", "body": "symbols = _strToList(symbols)sendinit = ({'': symbols, '': ['']},)return _stream(_wsURL(''), sendinit, on_data)", "docstring": "https://iextrading.com/developer/docs/#auction", "id": "f2334:m11"} {"signature": "@deprecated(details='')def marketsDF(token='', version=''):", "body": "df = pd.DataFrame(markets(token, version))_toDatetime(df)return df", "docstring": "https://iextrading.com/developer/docs/#intraday", "id": "f2337:m1"} {"signature": "def __init__(self, addr, sendinit=None, on_data=None, on_open=None, on_close=None, raw=True):", "body": "self.addr = addrself.sendinit = sendiniton_data = on_data or printclass Namespace(BaseNamespace):def on_connect(self, *data):if on_open:on_open(_tryJson(data, raw))def on_disconnect(self, *data):if on_close:on_close(_tryJson(data, raw))def on_message(self, data):on_data(_tryJson(data, raw))self._Namespace = Namespace", "docstring": "addr: path to sio\nsendinit: tuple to emit\non_data, on_open, on_close: functions to call", "id": "f2338:c1:m0"} {"signature": "def _raiseIfNotStr(s):", "body": "if s is not None and not isinstance(s, string_types):raise PyEXception('' % str(type(s)))", "docstring": "internal", "id": "f2338:m7"} {"signature": "def _stream(url, sendinit=None, on_data=print):", "body": "cl = WSClient(url, sendinit=sendinit, on_data=on_data)return cl", "docstring": "internal", "id": "f2338:m9"} {"signature": "def _streamSSE(url, on_data=print, accrue=False):", "body": "messages = SSEClient(url)if accrue:ret = []for msg in messages:data = msg.dataon_data(json.loads(data))if accrue:ret.append(msg)return ret", "docstring": "internal", "id": "f2338:m10"} {"signature": "def _strToList(st):", "body": "if isinstance(st, string_types):return [st]return st", "docstring": "internal", "id": "f2338:m4"} {"signature": "def _tryJson(data, raw=True):", "body": "if raw:return datatry:return json.loads(data)except ValueError:return data", "docstring": "internal", "id": "f2338:m8"} {"signature": "def _reindex(df, col):", "body": "if col in df.columns:df.set_index(col, inplace=True)", "docstring": "internal", "id": "f2338:m11"} {"signature": "def summary(date=None, token='', version=''):", "body": "if date:if isinstance(date, str):return _getJson('' + date, token, version)elif isinstance(date, datetime):return _getJson('' + date.strftime(''), token, version)else:raise PyEXception(\"\" % str(type(date)), token, version)return _getJson('', token, version)", "docstring": "https://iexcloud.io/docs/api/#stats-historical-summary\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2339:m6"} {"signature": "def daily(date=None, last='', token='', version=''):", "body": "if date:date = _strOrDate(date)return _getJson('' + date, token, version)elif last:return _getJson('' + last, token, version)return _getJson('', token, version)", "docstring": "https://iexcloud.io/docs/api/#stats-historical-daily\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2339:m8"} {"signature": "def records(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "https://iexcloud.io/docs/api/#stats-records\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2339:m4"} {"signature": "def stats(token='', version=''):", "body": "return _getJson('', token, version)", "docstring": "https://iexcloud.io/docs/api/#stats-intraday\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n dict: result", "id": "f2339:m0"} {"signature": "def get_config_bool(name):", "body": "cli_config = CLIConfig(SF_CLI_CONFIG_DIR, SF_CLI_ENV_VAR_PREFIX)return cli_config.getboolean('', name, False)", "docstring": "Checks if a config value is set to a valid bool value.", "id": "f2344:m1"} {"signature": "def aad_cache():", "body": "return jsonpickle.decode(get_config_value('', fallback=None)),jsonpickle.decode(get_config_value('', fallback=None))", "docstring": "AAD token cache.", "id": "f2344:m11"} {"signature": "def client_endpoint():", "body": "return get_config_value('', None)", "docstring": "Cluster HTTP gateway endpoint address and port, represented as a URL.", "id": "f2344:m3"} {"signature": "def set_auth(pem=None, cert=None, key=None, aad=False):", "body": "if any([cert, key]) and pem:raise ValueError('')if any([cert, key]) and not all([cert, key]):raise ValueError('')if pem:set_config_value('', '')set_config_value('', pem)elif cert or key:set_config_value('', '')set_config_value('', cert)set_config_value('', key)elif aad:set_config_value('', '')else:set_config_value('', '')", "docstring": "Set certificate usage paths", "id": "f2344:m15"} {"signature": "def set_aad_metadata(uri, resource, client):", "body": "set_config_value('', uri)set_config_value('', resource)set_config_value('', client)", "docstring": "Set AAD metadata.", "id": "f2344:m14"} {"signature": "def set_aad_cache(token, cache):", "body": "set_config_value('', jsonpickle.encode(token))set_config_value('', jsonpickle.encode(cache))", "docstring": "Set AAD token cache.", "id": "f2344:m12"} {"signature": "def set_no_verify(no_verify):", "body": "if no_verify:set_config_value('', '')else:set_config_value('', '')", "docstring": "Configure if cert verification should be skipped.", "id": "f2344:m7"} {"signature": "def no_verify_setting():", "body": "return get_config_bool('')", "docstring": "True to skip certificate SSL validation and verification", "id": "f2344:m6"} {"signature": "def set_ca_cert(ca_path=None):", "body": "if ca_path:set_config_value('', ca_path)set_config_value('', '')else:set_config_value('', '')", "docstring": "Configure paths to CA cert(s).", "id": "f2344:m9"} {"signature": "def cert_info():", "body": "sec_type = security_type()if sec_type == '':return get_config_value('', fallback=None)if sec_type == '':cert_path = get_config_value('', fallback=None)key_path = get_config_value('', fallback=None)return cert_path, key_pathreturn None", "docstring": "Path to certificate related files, either a single file path or a\n tuple. In the case of no security, returns None.", "id": "f2344:m10"} {"signature": "def select(endpoint, cert=None, key=None, pem=None, ca=None, aad=False, no_verify=False):", "body": "from sfctl.config import (set_ca_cert, set_auth, set_aad_cache,set_cluster_endpoint,set_no_verify)from msrest import ServiceClient, Configurationfrom sfctl.auth import ClientCertAuthentication, AdalAuthenticationselect_arg_verify(endpoint, cert, key, pem, ca, aad, no_verify)if aad:new_token, new_cache = get_aad_token(endpoint, no_verify)set_aad_cache(new_token, new_cache)rest_client = ServiceClient(AdalAuthentication(no_verify),Configuration(endpoint))rest_client.send(rest_client.get('')).raise_for_status()else:client_cert = Noneif pem:client_cert = pemelif cert:client_cert = (cert, key)rest_client = ServiceClient(ClientCertAuthentication(client_cert, ca, no_verify),Configuration(endpoint))rest_client.send(rest_client.get('')).raise_for_status()set_cluster_endpoint(endpoint)set_no_verify(no_verify)set_ca_cert(ca)set_auth(pem, cert, key, aad)", "docstring": "Connects to a Service Fabric cluster endpoint.\nIf connecting to secure cluster specify an absolute path to a cert (.crt)\nand key file (.key) or a single file with both (.pem). Do not specify both.\nOptionally, if connecting to a secure cluster, specify also an absolute\npath to a CA bundle file or directory of trusted CA certs.\n:param str endpoint: Cluster endpoint URL, including port and HTTP or HTTPS\nprefix\n:param str cert: Absolute path to a client certificate file\n:param str key: Absolute path to client certificate key file\n:param str pem: Absolute path to client certificate, as a .pem file\n:param str ca: Absolute path to CA certs directory to treat as valid\nor CA bundle\nfile\n:param bool aad: Use Azure Active Directory for authentication\n:param bool no_verify: Disable verification for certificates when using\nHTTPS, note: this is an insecure option and should not be used for\nproduction environments", "id": "f2345:m1"} {"signature": "def get_aad_token(endpoint, no_verify):", "body": "from azure.servicefabric.service_fabric_client_ap_is import (ServiceFabricClientAPIs)from sfctl.auth import ClientCertAuthenticationfrom sfctl.config import set_aad_metadataauth = ClientCertAuthentication(None, None, no_verify)client = ServiceFabricClientAPIs(auth, base_url=endpoint)aad_metadata = client.get_aad_metadata()if aad_metadata.type != \"\":raise CLIError(\"\")aad_resource = aad_metadata.metadatatenant_id = aad_resource.tenantauthority_uri = aad_resource.login + '' + tenant_idcontext = adal.AuthenticationContext(authority_uri,api_version=None)cluster_id = aad_resource.clusterclient_id = aad_resource.clientset_aad_metadata(authority_uri, cluster_id, client_id)code = context.acquire_user_code(cluster_id, client_id)print(code[''])token = context.acquire_token_with_device_code(cluster_id, code, client_id)print(\"\")return token, context.cache", "docstring": "Get AAD token", "id": "f2345:m2"} {"signature": "def launch():", "body": "cli_env = cli()return cli_env.invoke(sys.argv[:])", "docstring": "Entry point for Service Fabric CLI.\n\n Configures and invokes CLI with arguments passed during the time the python\n session is launched", "id": "f2346:m1"} {"signature": "def cli():", "body": "return VersionedCLI(cli_name=SF_CLI_NAME,config_dir=SF_CLI_CONFIG_DIR,config_env_var_prefix=SF_CLI_ENV_VAR_PREFIX,commands_loader_cls=SFCommandLoader,help_cls=SFCommandHelp)", "docstring": "Create CLI environment", "id": "f2346:m0"} {"signature": "def signed_session(self, session=None):", "body": "if session:session = super(ClientCertAuthentication, self).signed_session(session)else:session = super(ClientCertAuthentication, self).signed_session()if self.cert is not None:session.cert = self.certif self.ca_cert is not None:session.verify = self.ca_certif self.no_verify:session.verify = Falsereturn session", "docstring": "Create requests session with any required auth headers\n applied.\n\n :rtype: requests.Session.", "id": "f2347:c0:m1"} {"signature": "def signed_session(self, session=None):", "body": "from sfctl.config import (aad_metadata, aad_cache)if session:session = super(AdalAuthentication, self).signed_session(session)else:session = super(AdalAuthentication, self).signed_session()if self.no_verify:session.verify = Falseauthority_uri, cluster_id, client_id = aad_metadata()existing_token, existing_cache = aad_cache()context = adal.AuthenticationContext(authority_uri,cache=existing_cache)new_token = context.acquire_token(cluster_id,existing_token[''], client_id)header = \"\".format(\"\", new_token[''])session.headers[''] = headerreturn session", "docstring": "Create requests session with AAD auth headers\n\n :rtype: requests.Session.", "id": "f2347:c1:m1"} {"signature": "def load_command_table(self, args): ", "body": "with CommandSuperGroup(__name__, self,'') as super_group:with super_group.group('') as group:group.command('', '')with CommandSuperGroup(__name__, self, '',client_factory=client_create) as super_group: with super_group.group('') as group:group.command('', '')group.command('', '')group.command('', '')group.command('', '')group.command('', '')with ArgumentsContext(self, '') as ac:ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])ac.argument('', options_list=['', ''])return OrderedDict(self.command_table)", "docstring": "Load all Service Fabric commands", "id": "f2349:c1:m0"} {"signature": "def get_reliabledictionary_type_schema(client, application_name, service_name, dictionary_name, type_name, output_file=None):", "body": "cluster = Cluster.from_sfclient(client)dictionary = cluster.get_application(application_name).get_service(service_name).get_dictionary(dictionary_name)result = json.dumps(dictionary.get_complex_type(type_name), indent=)if (output_file == None):output_file = \"\".format(application_name, service_name, dictionary_name, type_name)with open(output_file, \"\") as output:output.write(result)print('' + output_file)print(result)", "docstring": "Query complex type information existing reliable dictionaries for given application and service. Make sure to provide entire namespace for your type if necessary.\n\n :param application_name: Name of the application.\n :type application_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param dictionary_name: Name of the reliable dictionary.\n :type dictionary_name: str\n :param type_name: Name of the complex type.\n :type type_name: str\n :param output_file: Optional file to save the schema.", "id": "f2353:m2"} {"signature": "def get_reliabledictionary_list(client, application_name, service_name):", "body": "cluster = Cluster.from_sfclient(client)service = cluster.get_application(application_name).get_service(service_name)for dictionary in service.get_dictionaries():print(dictionary.name)", "docstring": "List existing reliable dictionaries.\n\n List existing reliable dictionaries and respective schema for given application and service.\n\n :param application_name: Name of the application.\n :type application_name: str\n :param service_name: Name of the service.\n :type service_name: str", "id": "f2353:m0"} {"signature": "def query_reliabledictionary(client, application_name, service_name, dictionary_name, query_string, partition_key=None, partition_id=None, output_file=None):", "body": "cluster = Cluster.from_sfclient(client)dictionary = cluster.get_application(application_name).get_service(service_name).get_dictionary(dictionary_name)start = time.time()if (partition_id != None):result = dictionary.query(query_string, PartitionLookup.ID, partition_id)elif (partition_key != None):result = dictionary.query(query_string, PartitionLookup.KEY, partition_key)else:result = dictionary.query(query_string)if type(result) is str:print(result)returnelse:result = json.dumps(result.get(\"\"), indent=)print(\"\" + str(time.time() - start) + \"\")if (output_file == None):output_file = \"\".format(application_name, service_name, dictionary_name)with open(output_file, \"\") as output:output.write(result)print()print('' + output_file)print(result)", "docstring": "Query existing reliable dictionary.\n\n Query existing reliable dictionaries for given application and service.\n\n :param application_name: Name of the application.\n :type application_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param dictionary_name: Name of the reliable dictionary.\n :type dictionary_name: str\n :param query_string: An OData query string. For example $top=10. Check https://www.odata.org/documentation/ for more information.\n :type query_string: str\n :param partition_key: Optional partition key of the desired partition, either a string if named schema or int if Int64 schema\n :type partition_id: str\n :param partition_id: Optional partition GUID of the owning reliable dictionary.\n :type partition_id: str\n :param output_file: Optional file to save the schema.", "id": "f2353:m3"} {"signature": "def find(self, y):", "body": "node = self.rootwhile True:edge = self._edgeLabel(node, node.parent)if edge.startswith(y):return node.idxi = while(i < len(edge) and edge[i] == y[]):y = y[:]i += if i != :if i == len(edge) and y != '':passelse:return -node = node._get_transition_link(y[])if not node:return -", "docstring": "Returns starting position of the substring y in the string used for\n building the Suffix tree.\n\n :param y: String\n :return: Index of the starting position of string y in the string used for building the Suffix tree\n -1 if y is not a substring.", "id": "f2362:c0:m15"} {"signature": "def _terminalSymbolsGenerator(self):", "body": "py2 = sys.version[] < ''UPPAs = list(list(range(,+)) + list(range(,+)) + list(range(, +)))for i in UPPAs:if py2:yield(unichr(i))else:yield(chr(i))raise ValueError(\"\")", "docstring": "Generator of unique terminal symbols used for building the Generalized Suffix Tree.\n Unicode Private Use Area U+E000..U+F8FF is used to ensure that terminal symbols\n are not part of the input string.", "id": "f2362:c0:m18"} {"signature": "def _find_lcs(self, node, stringIdxs):", "body": "nodes = [self._find_lcs(n, stringIdxs)for (n,_) in node.transition_linksif n.generalized_idxs.issuperset(stringIdxs)]if nodes == []:return nodedeepestNode = max(nodes, key=lambda n: n.depth)return deepestNode", "docstring": "Helper method that finds LCS by traversing the labeled GSD.", "id": "f2362:c0:m13"} {"signature": "def _build_McCreight(self, x):", "body": "u = self.rootd = for i in range(len(x)):while u.depth == d and u._has_transition(x[d+i]):u = u._get_transition_link(x[d+i])d = d + while d < u.depth and x[u.idx + d] == x[i + d]:d = d + if d < u.depth:u = self._create_node(x, u, d)self._create_leaf(x, i, u, d)if not u._get_suffix_link():self._compute_slink(x, u)u = u._get_suffix_link()d = d - if d < :d = ", "docstring": "Builds a Suffix tree using McCreight O(n) algorithm.\n\n Algorithm based on:\n McCreight, Edward M. \"A space-economical suffix tree construction algorithm.\" - ACM, 1976.\n Implementation based on:\n UH CS - 58093 String Processing Algorithms Lecture Notes", "id": "f2362:c0:m4"} {"signature": "def _build_generalized(self, xs):", "body": "terminal_gen = self._terminalSymbolsGenerator()_xs = ''.join([x + next(terminal_gen) for x in xs])self.word = _xsself._generalized_word_starts(xs)self._build(_xs)self.root._traverse(self._label_generalized)", "docstring": "Builds a Generalized Suffix Tree (GST) from the array of strings provided.", "id": "f2362:c0:m9"} {"signature": "def _generalized_word_starts(self, xs):", "body": "self.word_starts = []i = for n in range(len(xs)):self.word_starts.append(i)i += len(xs[n]) + ", "docstring": "Helper method returns the starting indexes of strings in GST", "id": "f2362:c0:m14"} {"signature": "def _edgeLabel(self, node, parent):", "body": "return self.word[node.idx + parent.depth : node.idx + node.depth]", "docstring": "Helper method, returns the edge label between a node and it's parent", "id": "f2362:c0:m17"} {"signature": "def _label_generalized(self, node):", "body": "if node.is_leaf():x = {self._get_word_start_index(node.idx)}else:x = {n for ns in node.transition_links for n in ns[].generalized_idxs}node.generalized_idxs = x", "docstring": "Helper method that labels the nodes of GST with indexes of strings\n found in their descendants.", "id": "f2362:c0:m10"} {"signature": "def unsafe_method(self, x):", "body": "pass", "docstring": "Docstring.", "id": "f2370:c10:m4"} {"signature": "def execute_undo(self, message):", "body": "info(\"\")with self.world._unlock_temporarily():message._undo(self.world)self.world._react_to_undo_response(message)for actor in self.actors:actor._react_to_undo_response(message)", "docstring": "Manage the response when the server rejects a message.\n\nAn undo is when required this client sends a message that the server \nrefuses to pass on to the other clients playing the game. When this \nhappens, the client must undo the changes that the message made to the \nworld before being sent or crash. Note that unlike sync requests, undo \nrequests are only reported to the client that sent the offending \nmessage.", "id": "f2375:c0:m5"} {"signature": "def update (self):", "body": "delta_velocity = Vector.null()target_position = self.target.get_position()sprite_position = self.sprite.get_position()desired_direction = target_position - sprite_positionif == self.los or desired_direction.magnitude <= self.los:desired_normal = desired_direction.normaldesired_velocity = desired_normal * self.sprite.get_max_velocity()delta_velocity = desired_velocity - self.sprite.get_velocity()self.last_delta_velocity = delta_velocityreturn delta_velocity, self.power", "docstring": "Calculate what the desired change in velocity is. \n delta_velocity = acceleration * delta_time\n Time will be dealt with by the sprite.", "id": "f2380:c4:m1"} {"signature": "@read_onlydef get_last_id(self):", "body": "return max(self._tokens)", "docstring": "Return the largest token id registered with the world. If no tokens \nhave been added to the world, the id for the world itself (0) is \nreturned. This means that the first \"real\" token id is 1.", "id": "f2381:c3:m8"} {"signature": "def _set_actors(self, actors):", "body": "self._actors = actors", "docstring": "Tell the world which actors are running on this machine. This \ninformation is used to create extensions for new tokens.", "id": "f2381:c3:m19"} {"signature": "@debug_onlydef require_token(object):", "body": "require_instance(Token(), object)", "docstring": "Raise an ApiUsageError if the given object is not a fully constructed \ninstance of a Token subclass.", "id": "f2381:m2"} {"signature": "@read_onlydef get_token(self, id):", "body": "return self._tokens[id]", "docstring": "Return the token with the given id. If no token with the given id is \nregistered to the world, an IndexError is thrown.", "id": "f2381:c3:m7"} {"signature": "@read_onlydef watch_method(self, method_name, callback):", "body": "try:method = getattr(self, method_name)except AttributeError:raise ApiUsageError(\"\"\"\"\"\")if not isinstance(method, Token.WatchedMethod):setattr(self, method_name, Token.WatchedMethod(method))method = getattr(self, method_name)method.add_watcher(callback)", "docstring": "Register the given callback to be called whenever the method with the \ngiven name is called. You can easily take advantage of this feature in \ntoken extensions by using the @watch_token decorator.", "id": "f2381:c2:m12"} {"signature": "def watch_token(method):", "body": "method._kxg_watch_token = Truereturn method", "docstring": "Mark a token extension method that should automatically be called when a \ntoken method of the same name is called.\n\nThis decorator must only be used on TokenExtension methods, otherwise it \nwill silently do nothing. The reason is that the decorator itself can't do \nanything but label the given method, because at the time of decoration the \ntoken to watch isn't known. The method is actually setup to watch a token \nin the TokenExtension constructor, which searches for the label added here. \nBut other classes won't make this search and will silently do nothing.", "id": "f2381:m1"} {"signature": "@staticmethoddef add_safety_check(member_name, member_value):", "body": "import functoolsfrom types import FunctionTypeis_method = isinstance(member_value, FunctionType)is_read_only = hasattr(member_value, '')is_private = member_name.startswith('')if not is_method or is_read_only or is_private:return member_valuedef safety_checked_method(self, *args, **kwargs):\"\"\"\"\"\"world = getattr(self, '', None)if world and world.is_locked():nonlocal member_nameraise ApiUsageError(\"\"\"\"\"\")return member_value(self, *args, **kwargs)functools.update_wrapper(safety_checked_method, member_value,assigned=functools.WRAPPER_ASSIGNMENTS + ('','','',))return safety_checked_method", "docstring": "If the given member is a method that is public (i.e. doesn't start with \nan underscore) and hasn't been marked as read-only, replace it with a \nversion that will check to make sure the world is locked. This ensures \nthat methods that alter the token are only called from update methods \nor messages.", "id": "f2381:c0:m2"} {"signature": "def _check_if_forum_observation_enabled(self):", "body": "try:super()._check_if_forum_observation_enabled()except ApiUsageError:raise ApiUsageError(\"\"\"\"\"\")", "docstring": "Give a helpful error if the user attempts to subscribe or unsubscribe \nfrom messages while the token is not registered with a world. This can \neasily happen if the user attempts to subscribe to messages in the \nconstructor. However, because the constructor is only called on one \nclient and message handlers cannot be pickled, subscribing at this time \nwould create hard-to-find synchronization bugs.", "id": "f2381:c2:m18"} {"signature": "@debug_onlydef require_active_token(object):", "body": "require_token(object)token = objectif not token.has_id:raise ApiUsageError(\"\"\"\"\"\")if not token.has_world:raise ApiUsageError(\"\"\"\"\"\")", "docstring": "Raise an ApiUsageError if the given object is not a token that is currently \nparticipating in the game. To be participating in the game, the given \ntoken must have an id number and be associated with the world.", "id": "f2381:m3"} {"signature": "def on_exit_stage(self):", "body": "pass", "docstring": "Give the stage a chance to react before it is stopped and the next \nstage is started.\n\nYou can define the next stage by setting the Stage.successor attribute. \nIf the successor is static, you can just set it in the constructor. \nBut if it will differ depending on the context, this method may be a \ngood place to calculate it because it is called only once and just \nbefore the theater queries for the successor.", "id": "f2382:c2:m6"} {"signature": "def _assign_token_ids(self, id_factory):", "body": "for token in self.tokens_to_add():token._give_id(id_factory)", "docstring": "Assign id numbers to any tokens that will be added to the world by this \nmessage.\n\nThis method is called by Actor but not by ServerActor, so it's \nguaranteed to be called exactly once. In fact, this method is not \nreally different from the constructor, except that the id_factory \nobject is nicely provided. That's useful for assigning ids to tokens \nbut probably nothing else. This method is called before _check() so \nthat _check() can make sure that valid ids were assigned (although by \ndefault it doesn't).", "id": "f2384:c0:m17"} {"signature": "def _run_supervisor(self):", "body": "import timestill_supervising = lambda: (multiprocessing.active_children()or not self.log_queue.empty()or not self.exception_queue.empty())try:while still_supervising():try:record = self.log_queue.get_nowait()logger = logging.getLogger(record.name)logger.handle(record)except queue.Empty:passtry:exception = self.exception_queue.get_nowait()except queue.Empty:passelse:raise exceptiontime.sleep(/self.frame_rate)self.elapsed_time += /self.frame_rateif self.time_limit and self.elapsed_time > self.time_limit:raise RuntimeError(\"\")finally:for process in multiprocessing.active_children():process.terminate()", "docstring": "Poll the queues that the worker can use to communicate with the \nsupervisor, until all the workers are done and all the queues are \nempty. Handle messages as they appear.", "id": "f2386:c3:m5"} {"signature": "def main(world_cls, referee_cls, gui_cls, gui_actor_cls, ai_actor_cls,theater_cls=PygletTheater, default_host=DEFAULT_HOST,default_port=DEFAULT_PORT, argv=None):", "body": "import sys, os, docopt, nonstdlibexe_name = os.path.basename(sys.argv[])usage = main.__doc__.format(**locals()).strip()args = docopt.docopt(usage, argv or sys.argv[:])num_guis = int(args[''] or )num_ais = int(args[''] or )host, port = args[''], int(args[''])logging.basicConfig(format='',level=nonstdlib.verbosity(args['']),)if args['']:print(\"\"\"\"\"\")game = MultiplayerDebugger(world_cls, referee_cls, gui_cls, gui_actor_cls, num_guis,ai_actor_cls, num_ais, theater_cls, host, port)else:game = theater_cls()ai_actors = [ai_actor_cls() for i in range(num_ais)]if args['']:game.gui = gui_cls()game.initial_stage = UniplayerGameStage(world_cls(), referee_cls(), gui_actor_cls(), ai_actors)game.initial_stage.successor = PostgameSplashStage()if args['']:game.gui = gui_cls()game.initial_stage = ClientConnectionStage(world_cls(), gui_actor_cls(), host, port)if args['']:game.initial_stage = ServerConnectionStage(world_cls(), referee_cls(), num_guis, ai_actors,host, port)game.play()", "docstring": "Run a game being developed with the kxg game engine.\n\nUsage:\n {exe_name} sandbox [] [-v...]\n {exe_name} client [--host HOST] [--port PORT] [-v...]\n {exe_name} server [] [--host HOST] [--port PORT] [-v...] \n {exe_name} debug [] [--host HOST] [--port PORT] [-v...]\n {exe_name} --help\n\nCommands:\n sandbox\n Play a single-player game with the specified number of AIs. None of \n the multiplayer machinery will be used.\n\n client\n Launch a client that will try to connect to a server on the given host \n and port. Once it connects and the game starts, the client will allow \n you to play the game against any other connected clients.\n\n server\n Launch a server that will manage a game between the given number of \n human and AI players. The human players must connect using this \n command's client mode.\n\n debug\n Debug a multiplayer game locally. This command launches a server and \n the given number of clients all in different processes, and configures \n the logging system such that the output from each process can be easily \n distinguished.\n\nArguments:\n \n The number of human players that will be playing the game. Only needed \n by commands that will launch some sort of multiplayer server.\n\n \n The number of AI players that will be playing the game. Only needed by \n commands that will launch single-player games or multiplayer servers.\n\nOptions:\n -x --host HOST [default: {default_host}]\n The address of the machine running the server. Must be accessible from \n the machines running the clients.\n\n -p --port PORT [default: {default_port}]\n The port that the server should listen on. Don't specify a value less \n than 1024 unless the server is running with root permissions.\n\n -v --verbose \n Have the game engine log more information about what it's doing. You \n can specify this option several times to get more and more information.\n\nThis command is provided so that you can start writing your game with the least \npossible amount of boilerplate code. However, the clients and servers provided \nby this command are not capable of running a production game. Once you have \nwritten your game and want to give it a polished set of menus and options, \nyou'll have to write new Stage subclasses encapsulating that logic and you'll \nhave to call those stages yourself by interacting more directly with the \nTheater class. The online documentation has more information on this process.", "id": "f2386:m0"} {"signature": "def pre_poll(self):", "body": "pass", "docstring": "Called before polling for process status", "id": "f2388:c0:m1"} {"signature": "def input(self, input, song):", "body": "try:cmd = getattr(self, self.CMD_MAP[input][])except (IndexError, KeyError):return self.screen.print_error(\"\".format(input))cmd(song)", "docstring": "Input callback, handles key presses", "id": "f2388:c1:m17"} {"signature": "def post_poll(self):", "body": "pass", "docstring": "Called after polling for process status", "id": "f2388:c0:m2"} {"signature": "def input(self, value, song):", "body": "pass", "docstring": "Called after user input during song playback", "id": "f2388:c0:m3"} {"signature": "def play(self, song):", "body": "pass", "docstring": "Called once when a song starts playing", "id": "f2388:c0:m0"} {"signature": "def _post_start(self):", "body": "return", "docstring": "Optionally, do something after the audio backend is started", "id": "f2389:c3:m6"} {"signature": "def _loop_hook(self):", "body": "return", "docstring": "Optionally, do something each main loop iteration", "id": "f2389:c3:m7"} {"signature": "def _send_cmd(self, cmd):", "body": "self._process.stdin.write(\"\".format(cmd).encode(\"\"))self._process.stdin.flush()", "docstring": "Write command to remote process", "id": "f2389:c3:m9"} {"signature": "def _read_from_process(self, handle):", "body": "return handle.readline().strip()", "docstring": "Read a line from the process and clean it\n\n Different audio backends return text in different formats so provides a\n hook for each subclass to customize reader behaviour.", "id": "f2389:c3:m8"} {"signature": "def __init__(self, callbacks, control_channel):", "body": "self._control_channel = control_channelself._control_fd = control_channel.fileno()self._callbacks = callbacksself._process = Noneself._cmd = [self._find_path()]", "docstring": "Constructor\n\n Will attempt to find the player binary on construction and fail if it\n is not found. Subclasses should append any additional arguments to\n _cmd.", "id": "f2389:c3:m0"} {"signature": "def _player_stopped(self, value):", "body": "raise NotImplementedError", "docstring": "Determine if player has stopped", "id": "f2389:c3:m3"} {"signature": "def play(self, song):", "body": "self._callbacks.play(song)self._load_track(song)time.sleep() while True:try:self._callbacks.pre_poll()self._ensure_started()self._loop_hook()readers, _, _ = select.select(self._get_select_readers(), [], [], )for handle in readers:if handle.fileno() == self._control_fd:self._callbacks.input(handle.readline().strip(), song)else:value = self._read_from_process(handle)if self._player_stopped(value):returnfinally:self._callbacks.post_poll()", "docstring": "Play a new song from a Pandora model\n\n Returns once the stream starts but does not shut down the remote audio\n output backend process. Calls the input callback when the user has\n input.", "id": "f2389:c3:m16"} {"signature": "def lower_volume(self):", "body": "raise NotImplementedError", "docstring": "Lower the volume of the audio output\n\n The player backend may not support this functionality in which case it\n should not override this method.", "id": "f2389:c3:m5"} {"signature": "def pause(self):", "body": "self._send_cmd(\"\")", "docstring": "Pause the player", "id": "f2389:c3:m11"} {"signature": "def end_station(self):", "body": "raise StopIteration", "docstring": "Stop playing the station", "id": "f2389:c3:m17"} {"signature": "def iterate_forever(func, *args, **kwargs):", "body": "output = func(*args, **kwargs)while True:try:playlist_item = next(output)playlist_item.prepare_playback()yield playlist_itemexcept StopIteration:output = func(*args, **kwargs)", "docstring": "Iterate over a finite iterator forever\n\n When the iterator is exhausted will call the function again to generate a\n new iterator and keep iterating.", "id": "f2390:m0"} {"signature": "@staticmethoddef get_integer(prompt):", "body": "while True:try:return int(input(prompt).strip())except ValueError:print(Colors.red(\"\"))", "docstring": "Gather user input and convert it to an integer\n\n Will keep trying till the user enters an interger or until they ^C the\n program.", "id": "f2390:c4:m7"} {"signature": "def formatter(self, api_client, data, newval):", "body": "raise NotImplementedError", "docstring": "Format Value for Model\n\n The return value of this method is used as a value for the field in the\n model of which this field is a member\n\n api_client\n instance of a Pandora API client\n data\n complete JSON data blob for the parent model of which this field is\n a member\n newval\n the value of this field as retrieved from the JSON data after\n having resolved default value logic", "id": "f2402:c1:m0"} {"signature": "@classmethoddef from_json_list(cls, api_client, data):", "body": "return [cls.from_json(api_client, item) for item in data]", "docstring": "Convert a list of JSON values to a list of models", "id": "f2402:c4:m0"} {"signature": "def prepare_playback(self):", "body": "return self", "docstring": "Prepare Track for Playback\n\n This method must be called by clients before beginning playback\n otherwise the track recieved may not be playable.", "id": "f2404:c4:m1"} {"signature": "def retries(max_tries, exceptions=(Exception,)):", "body": "def decorator(func):def function(*args, **kwargs):retries_left = max_trieswhile retries_left > :try:retries_left -= return func(*args, **kwargs)except exceptions as exc:if isinstance(exc, PandoraException):raiseif retries_left > :time.sleep(delay_exponential(, , max_tries - retries_left))else:raisereturn functionreturn decorator", "docstring": "Function decorator implementing retrying logic.\n\n exceptions: A tuple of exception classes; default (Exception,)\n\n The decorator will call the function up to max_tries times if it raises\n an exception.\n\n By default it catches instances of the Exception class and subclasses.\n This will recover after all but the most fatal errors. You may specify a\n custom tuple of exception classes with the 'exceptions' argument; the\n function will only be retried if it raises one of the specified\n exceptions.", "id": "f2408:m0"} {"signature": "def __init__(self, headers=None, data=None):", "body": "if headers is None:headers = {'': http_client.OK}self.data = dataself.response_headers = headersself.headers = Noneself.uri = Noneself.method = Noneself.body = Noneself.headers = Noneself.requests = ", "docstring": "HttpMock constructor.\n\n Args:\n headers: dict, header to return with response", "id": "f2444:c1:m0"} {"signature": "def __init__(self, iterable):", "body": "self._iterable = iterableself.requests = []", "docstring": "HttpMockSequence constructor.\n\n Args:\n iterable: iterable, a sequence of pairs of (headers, body)", "id": "f2444:c2:m0"} {"signature": "def _parse_pem_key(raw_key_input):", "body": "offset = raw_key_input.find(b'')if offset != -:return raw_key_input[offset:]", "docstring": "Identify and extract PEM keys.\n\n Determines whether the given key is in the format of PEM key, and extracts\n the relevant part of the key if it is.\n\n Args:\n raw_key_input: The contents of a private key file (either PEM or\n PKCS12).\n\n Returns:\n string, The actual key if the contents are from a PEM file, or\n else None.", "id": "f2445:m7"} {"signature": "def scopes_to_string(scopes):", "body": "if isinstance(scopes, six.string_types):return scopeselse:return ''.join(scopes)", "docstring": "Converts scope value to a string.\n\n If scopes is a string then it is simply passed through. If scopes is an\n iterable then a string is returned that is all the individual scopes\n concatenated with spaces.\n\n Args:\n scopes: string or iterable of strings, the scopes.\n\n Returns:\n The scopes formatted as a single string.", "id": "f2445:m1"} {"signature": "def _from_bytes(value):", "body": "result = (value.decode('')if isinstance(value, six.binary_type) else value)if isinstance(result, six.text_type):return resultelse:raise ValueError(''.format(value))", "docstring": "Converts bytes to a string value, if necessary.\n\n Args:\n value: The string/bytes value to be converted.\n\n Returns:\n The original value converted to unicode (if bytes) or as passed in\n if it started out as unicode.\n\n Raises:\n ValueError if the value could not be converted to unicode.", "id": "f2445:m10"} {"signature": "def update_query_params(uri, params):", "body": "parts = urllib.parse.urlparse(uri)query_params = parse_unique_urlencoded(parts.query)query_params.update(params)new_query = urllib.parse.urlencode(query_params)new_parts = parts._replace(query=new_query)return urllib.parse.urlunparse(new_parts)", "docstring": "Updates a URI with new query parameters.\n\n If a given key from ``params`` is repeated in the ``uri``, then\n the URI will be considered invalid and an error will occur.\n\n If the URI is valid, then each value from ``params`` will\n replace the corresponding value in the query parameters (if\n it exists).\n\n Args:\n uri: string, A valid URI, with potential existing query parameters.\n params: dict, A dictionary of query parameters.\n\n Returns:\n The same URI but with the new query parameters added.", "id": "f2445:m4"} {"signature": "def parse_unique_urlencoded(content):", "body": "urlencoded_params = urllib.parse.parse_qs(content)params = {}for key, value in six.iteritems(urlencoded_params):if len(value) != :msg = ('''' % (key, ''.join(value)))raise ValueError(msg)params[key] = value[]return params", "docstring": "Parses unique key-value parameters from urlencoded content.\n\n Args:\n content: string, URL-encoded key-value pairs.\n\n Returns:\n dict, The key-value pairs from ``content``.\n\n Raises:\n ValueError: if one of the keys is repeated.", "id": "f2445:m3"} {"signature": "def positional(max_positional_args):", "body": "def positional_decorator(wrapped):@functools.wraps(wrapped)def positional_wrapper(*args, **kwargs):if len(args) > max_positional_args:plural_s = ''if max_positional_args != :plural_s = ''message = (''''.format(function=wrapped.__name__,args_max=max_positional_args,args_given=len(args),plural=plural_s))if positional_parameters_enforcement == POSITIONAL_EXCEPTION:raise TypeError(message)elif positional_parameters_enforcement == POSITIONAL_WARNING:logger.warning(message)return wrapped(*args, **kwargs)return positional_wrapperif isinstance(max_positional_args, six.integer_types):return positional_decoratorelse:args, _, _, defaults = inspect.getargspec(max_positional_args)return positional(len(args) - len(defaults))(max_positional_args)", "docstring": "A decorator to declare that only the first N arguments my be positional.\n\n This decorator makes it easy to support Python 3 style keyword-only\n parameters. For example, in Python 3 it is possible to write::\n\n def fn(pos1, *, kwonly1=None, kwonly1=None):\n ...\n\n All named parameters after ``*`` must be a keyword::\n\n fn(10, 'kw1', 'kw2') # Raises exception.\n fn(10, kwonly1='kw1') # Ok.\n\n Example\n ^^^^^^^\n\n To define a function like above, do::\n\n @positional(1)\n def fn(pos1, kwonly1=None, kwonly2=None):\n ...\n\n If no default value is provided to a keyword argument, it becomes a\n required keyword argument::\n\n @positional(0)\n def fn(required_kw):\n ...\n\n This must be called with the keyword parameter::\n\n fn() # Raises exception.\n fn(10) # Raises exception.\n fn(required_kw=10) # Ok.\n\n When defining instance or class methods always remember to account for\n ``self`` and ``cls``::\n\n class MyClass(object):\n\n @positional(2)\n def my_method(self, pos1, kwonly1=None):\n ...\n\n @classmethod\n @positional(2)\n def my_method(cls, pos1, kwonly1=None):\n ...\n\n The positional decorator behavior is controlled by\n ``_helpers.positional_parameters_enforcement``, which may be set to\n ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or\n ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do\n nothing, respectively, if a declaration is violated.\n\n Args:\n max_positional_arguments: Maximum number of positional arguments. All\n parameters after the this index must be\n keyword only.\n\n Returns:\n A decorator that prevents using arguments after max_positional_args\n from being used as positional parameters.\n\n Raises:\n TypeError: if a key-word only argument is provided as a positional\n parameter, but only if\n _helpers.positional_parameters_enforcement is set to\n POSITIONAL_EXCEPTION.", "id": "f2445:m0"} {"signature": "def _to_bytes(value, encoding=''):", "body": "result = (value.encode(encoding)if isinstance(value, six.text_type) else value)if isinstance(result, six.binary_type):return resultelse:raise ValueError(''.format(value))", "docstring": "Converts a string value to bytes, if necessary.\n\n Unfortunately, ``six.b`` is insufficient for this task since in\n Python2 it does not modify ``unicode`` objects.\n\n Args:\n value: The string/bytes value to be converted.\n encoding: The encoding to use to convert unicode to bytes. Defaults\n to \"ascii\", which will not allow any characters from ordinals\n larger than 127. Other useful values are \"latin-1\", which\n which will only allows byte ordinals (up to 255) and \"utf-8\",\n which will encode any unicode that needs to be.\n\n Returns:\n The original value converted to bytes (if unicode) or as passed in\n if it started out as bytes.\n\n Raises:\n ValueError if the value could not be converted to bytes.", "id": "f2445:m9"} {"signature": "def sign(self, message):", "body": "message = _helpers._to_bytes(message, encoding='')return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))", "docstring": "Signs a message.\n\n Args:\n message: string, Message to be signed.\n\n Returns:\n string, The signature of the message for the given key.", "id": "f2446:c1:m1"} {"signature": "@staticmethoddef from_string(key_pem, is_x509_cert):", "body": "if is_x509_cert:key_pem = _helpers._to_bytes(key_pem)pemLines = key_pem.replace(b'', b'').split()certDer = _helpers._urlsafe_b64decode(b''.join(pemLines[:-]))certSeq = DerSequence()certSeq.decode(certDer)tbsSeq = DerSequence()tbsSeq.decode(certSeq[])pubkey = RSA.importKey(tbsSeq[])else:pubkey = RSA.importKey(key_pem)return PyCryptoVerifier(pubkey)", "docstring": "Construct a Verified instance from a string.\n\n Args:\n key_pem: string, public key in PEM format.\n is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it\n is expected to be an RSA key in PEM format.\n\n Returns:\n Verifier instance.", "id": "f2446:c0:m2"} {"signature": "@staticmethoddef from_string(key, password=''):", "body": "parsed_pem_key = _helpers._parse_pem_key(_helpers._to_bytes(key))if parsed_pem_key:pkey = RSA.importKey(parsed_pem_key)else:raise NotImplementedError('''''')return PyCryptoSigner(pkey)", "docstring": "Construct a Signer instance from a string.\n\n Args:\n key: string, private key in PEM format.\n password: string, password for private key file. Unused for PEM\n files.\n\n Returns:\n Signer instance.\n\n Raises:\n NotImplementedError if the key isn't in PEM format.", "id": "f2446:c1:m2"} {"signature": "def __init__(self, pkey):", "body": "self._key = pkey", "docstring": "Constructor.\n\n Args:\n pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.", "id": "f2446:c1:m0"} {"signature": "def __init__(self, pubkey):", "body": "self._pubkey = pubkey", "docstring": "Constructor.\n\n Args:\n pubkey: OpenSSL.crypto.PKey (or equiv), The public key to verify\n with.", "id": "f2446:c0:m0"} {"signature": "def oauth2_callback(request):", "body": "if '' in request.GET:reason = request.GET.get('', request.GET.get('', ''))reason = html.escape(reason)return http.HttpResponseBadRequest(''.format(reason))try:encoded_state = request.GET['']code = request.GET['']except KeyError:return http.HttpResponseBadRequest('')try:server_csrf = request.session[_CSRF_KEY]except KeyError:return http.HttpResponseBadRequest('')try:state = json.loads(encoded_state)client_csrf = state['']return_url = state['']except (ValueError, KeyError):return http.HttpResponseBadRequest('')if client_csrf != server_csrf:return http.HttpResponseBadRequest('')flow = _get_flow_for_token(client_csrf, request)if not flow:return http.HttpResponseBadRequest('')try:credentials = flow.step2_exchange(code)except client.FlowExchangeError as exchange_error:return http.HttpResponseBadRequest(''.format(exchange_error))get_storage(request).put(credentials)signals.oauth2_authorized.send(sender=signals.oauth2_authorized,request=request, credentials=credentials)return shortcuts.redirect(return_url)", "docstring": "View that handles the user's return from OAuth2 provider.\n\n This view verifies the CSRF state and OAuth authorization code, and on\n success stores the credentials obtained in the storage provider,\n and redirects to the return_url specified in the authorize view and\n stored in the session.\n\n Args:\n request: Django request.\n\n Returns:\n A redirect response back to the return_url.", "id": "f2449:m2"} {"signature": "def __init__(self, model_class, key_name, key_value, property_name):", "body": "super(DjangoORMStorage, self).__init__()self.model_class = model_classself.key_name = key_nameself.key_value = key_valueself.property_name = property_name", "docstring": "Constructor for Storage.\n\n Args:\n model: string, fully qualified name of db.Model model class.\n key_name: string, key name for the entity that has the credentials\n key_value: string, key value for the entity that has the\n credentials.\n property_name: string, name of the property that is an\n CredentialsProperty.", "id": "f2450:c0:m0"} {"signature": "def locked_delete(self):", "body": "query = {self.key_name: self.key_value}self.model_class.objects.filter(**query).delete()", "docstring": "Delete Credentials from the datastore.", "id": "f2450:c0:m3"} {"signature": "def _get_scopes(self):", "body": "if _credentials_from_request(self.request):return (self._scopes |_credentials_from_request(self.request).scopes)else:return self._scopes", "docstring": "Returns the scopes associated with this object, kept up to\n date for incremental auth.", "id": "f2452:c1:m3"} {"signature": "@propertydef credentials(self):", "body": "return _credentials_from_request(self.request)", "docstring": "Gets the authorized credentials for this flow, if they exist.", "id": "f2452:c1:m5"} {"signature": "def has_credentials(self):", "body": "credentials = _credentials_from_request(self.request)return (credentials and not credentials.invalid andcredentials.has_scopes(self._get_scopes()))", "docstring": "Returns True if there are valid credentials for the current user\n and required scopes.", "id": "f2452:c1:m2"} {"signature": "@propertydef scopes(self):", "body": "return self._get_scopes()", "docstring": "Returns the scopes associated with this OAuth2 object.", "id": "f2452:c1:m4"} {"signature": "def get_authorize_redirect(self):", "body": "get_params = {'': self.return_url,'': self._get_scopes()}return _redirect_with_params('', **get_params)", "docstring": "Creates a URl to start the OAuth2 authorization flow.", "id": "f2452:c1:m1"} {"signature": "def _redirect_with_params(url_name, *args, **kwargs):", "body": "url = urlresolvers.reverse(url_name, args=args)params = parse.urlencode(kwargs, True)return \"\".format(url, params)", "docstring": "Helper method to create a redirect response with URL params.\n\n This builds a redirect string that converts kwargs into a\n query string.\n\n Args:\n url_name: The name of the url to redirect to.\n kwargs: the query string param and their values to build.\n\n Returns:\n A properly formatted redirect string.", "id": "f2452:m4"} {"signature": "def _get_storage_model():", "body": "storage_model_settings = getattr(django.conf.settings,'', None)if storage_model_settings is not None:return (storage_model_settings[''],storage_model_settings[''],storage_model_settings[''])else:return None, None, None", "docstring": "This configures whether the credentials will be stored in the session\n or the Django ORM based on the settings. By default, the credentials\n will be stored in the session, unless `GOOGLE_OAUTH2_STORAGE_MODEL`\n is found in the settings. Usually, the ORM storage is used to integrate\n credentials into an existing Django user system.\n\n Returns:\n A tuple containing three strings, or None. If\n ``GOOGLE_OAUTH2_STORAGE_MODEL`` is configured, the tuple\n will contain the fully qualifed path of the `django.db.model`,\n the name of the ``django.contrib.auth.models.User`` field on the\n model, and the name of the\n :class:`oauth2client.contrib.django_util.models.CredentialsField`\n field on the model. If Django ORM storage is not configured,\n this function returns None.", "id": "f2452:m2"} {"signature": "def get_storage(request):", "body": "storage_model = oauth2_settings.storage_modeluser_property = oauth2_settings.storage_model_user_propertycredentials_property = oauth2_settings.storage_model_credentials_propertyif storage_model:module_name, class_name = storage_model.rsplit('', )module = importlib.import_module(module_name)storage_model_class = getattr(module, class_name)return storage.DjangoORMStorage(storage_model_class,user_property,request.user,credentials_property)else:return dictionary_storage.DictionaryStorage(request.session, key=_CREDENTIALS_KEY)", "docstring": "Gets a Credentials storage object provided by the Django OAuth2 Helper\n object.\n\n Args:\n request: Reference to the current request object.\n\n Returns:\n An :class:`oauth2.client.Storage` object.", "id": "f2452:m3"} {"signature": "@propertydef http(self):", "body": "if self.has_credentials():return self.credentials.authorize(transport.get_http_object())return None", "docstring": "Helper: create HTTP client authorized with OAuth2 credentials.", "id": "f2452:c1:m6"} {"signature": "def _credentials_from_request(request):", "body": "if (oauth2_settings.storage_model is None orrequest.user.is_authenticated()):return get_storage(request).get()else:return None", "docstring": "Gets the authorized credentials for this flow, if they exist.", "id": "f2452:m5"} {"signature": "def from_db_value(self, value, expression, connection, context):", "body": "return self.to_python(value)", "docstring": "Overrides ``models.Field`` method. This converts the value\n returned from the database to an instance of this class.", "id": "f2453:c0:m2"} {"signature": "def get_prep_value(self, value):", "body": "if value is None:return Noneelse:return encoding.smart_text(base64.b64encode(jsonpickle.encode(value).encode()))", "docstring": "Overrides ``models.Field`` method. This is used to convert\n the value from an instances of this class to bytes that can be\n inserted into the database.", "id": "f2453:c0:m4"} {"signature": "def oauth_required(decorated_function=None, scopes=None, **decorator_kwargs):", "body": "def curry_wrapper(wrapped_function):@wraps(wrapped_function)def required_wrapper(request, *args, **kwargs):if not (django_util.oauth2_settings.storage_model is None orrequest.user.is_authenticated()):redirect_str = ''.format(django.conf.settings.LOGIN_URL,parse.quote(request.path))return shortcuts.redirect(redirect_str)return_url = decorator_kwargs.pop('',request.get_full_path())user_oauth = django_util.UserOAuth2(request, scopes, return_url)if not user_oauth.has_credentials():return shortcuts.redirect(user_oauth.get_authorize_redirect())setattr(request, django_util.oauth2_settings.request_prefix,user_oauth)return wrapped_function(request, *args, **kwargs)return required_wrapperif decorated_function:return curry_wrapper(decorated_function)else:return curry_wrapper", "docstring": "Decorator to require OAuth2 credentials for a view.\n\n\n .. code-block:: python\n :caption: views.py\n :name: views_required_2\n\n\n from oauth2client.django_util.decorators import oauth_required\n\n @oauth_required\n def requires_default_scopes(request):\n email = request.credentials.id_token['email']\n service = build(serviceName='calendar', version='v3',\n http=request.oauth.http,\n developerKey=API_KEY)\n events = service.events().list(\n calendarId='primary').execute()['items']\n return HttpResponse(\n \"email: {0}, calendar: {1}\".format(email, str(events)))\n\n Args:\n decorated_function: View function to decorate, must have the Django\n request object as the first argument.\n scopes: Scopes to require, will default.\n decorator_kwargs: Can include ``return_url`` to specify the URL to\n return to after OAuth2 authorization is complete.\n\n Returns:\n An OAuth2 Authorize view if credentials are not found or if the\n credentials are missing the required scopes. Otherwise,\n the decorated view.", "id": "f2454:m0"} {"signature": "def oauth_enabled(decorated_function=None, scopes=None, **decorator_kwargs):", "body": "def curry_wrapper(wrapped_function):@wraps(wrapped_function)def enabled_wrapper(request, *args, **kwargs):return_url = decorator_kwargs.pop('',request.get_full_path())user_oauth = django_util.UserOAuth2(request, scopes, return_url)setattr(request, django_util.oauth2_settings.request_prefix,user_oauth)return wrapped_function(request, *args, **kwargs)return enabled_wrapperif decorated_function:return curry_wrapper(decorated_function)else:return curry_wrapper", "docstring": "Decorator to enable OAuth Credentials if authorized, and setup\n the oauth object on the request object to provide helper functions\n to start the flow otherwise.\n\n .. code-block:: python\n :caption: views.py\n :name: views_enabled3\n\n from oauth2client.django_util.decorators import oauth_enabled\n\n @oauth_enabled\n def optional_oauth2(request):\n if request.oauth.has_credentials():\n # this could be passed into a view\n # request.oauth.http is also initialized\n return HttpResponse(\"User email: {0}\".format(\n request.oauth.credentials.id_token['email'])\n else:\n return HttpResponse('Here is an OAuth Authorize link:\n Authorize'.format(\n request.oauth.get_authorize_redirect()))\n\n\n Args:\n decorated_function: View function to decorate.\n scopes: Scopes to require, will default.\n decorator_kwargs: Can include ``return_url`` to specify the URL to\n return to after OAuth2 authorization is complete.\n\n Returns:\n The decorated view function.", "id": "f2454:m1"} {"signature": "def locked_put(self, credentials):", "body": "filters = {self.key_name: self.key_value}query = self.session.query(self.model_class).filter_by(**filters)entity = query.first()if not entity:entity = self.model_class(**filters)setattr(entity, self.property_name, credentials)self.session.add(entity)", "docstring": "Write a credentials to the SQLAlchemy datastore.\n\n Args:\n credentials: :class:`oauth2client.Credentials`", "id": "f2455:c1:m2"} {"signature": "def __init__(self, session, model_class, key_name,key_value, property_name):", "body": "super(Storage, self).__init__()self.session = sessionself.model_class = model_classself.key_name = key_nameself.key_value = key_valueself.property_name = property_name", "docstring": "Constructor for Storage.\n\n Args:\n session: An instance of :class:`sqlalchemy.orm.Session`.\n model_class: SQLAlchemy declarative mapping.\n key_name: string, key name for the entity that has the credentials\n key_value: key value for the entity that has the credentials\n property_name: A string indicating which property on the\n ``model_class`` to store the credentials.\n This property must be a\n :class:`CredentialsType` column.", "id": "f2455:c1:m0"} {"signature": "def locked_delete(self):", "body": "filters = {self.key_name: self.key_value}self.session.query(self.model_class).filter_by(**filters).delete()", "docstring": "Delete credentials from the SQLAlchemy datastore.", "id": "f2455:c1:m3"} {"signature": "@_helpers.positional()def generate_token(key, user_id, action_id='', when=None):", "body": "digester = hmac.new(_helpers._to_bytes(key, encoding=''))digester.update(_helpers._to_bytes(str(user_id), encoding=''))digester.update(DELIMITER)digester.update(_helpers._to_bytes(action_id, encoding=''))digester.update(DELIMITER)when = _helpers._to_bytes(str(when or int(time.time())), encoding='')digester.update(when)digest = digester.digest()token = base64.urlsafe_b64encode(digest + DELIMITER + when)return token", "docstring": "Generates a URL-safe token for the given user, action, time tuple.\n\n Args:\n key: secret key to use.\n user_id: the user ID of the authenticated user.\n action_id: a string identifier of the action they requested\n authorization for.\n when: the time in seconds since the epoch at which the user was\n authorized for this action. If not set the current time is used.\n\n Returns:\n A string XSRF protection token.", "id": "f2456:m0"} {"signature": "@_helpers.positional()def validate_token(key, token, user_id, action_id=\"\", current_time=None):", "body": "if not token:return Falsetry:decoded = base64.urlsafe_b64decode(token)token_time = int(decoded.split(DELIMITER)[-])except (TypeError, ValueError, binascii.Error):return Falseif current_time is None:current_time = time.time()if current_time - token_time > DEFAULT_TIMEOUT_SECS:return Falseexpected_token = generate_token(key, user_id, action_id=action_id,when=token_time)if len(token) != len(expected_token):return Falsedifferent = for x, y in zip(bytearray(token), bytearray(expected_token)):different |= x ^ yreturn not different", "docstring": "Validates that the given token authorizes the user for the action.\n\n Tokens are invalid if the time of issue is too old or if the token\n does not match what generateToken outputs (i.e. the token was forged).\n\n Args:\n key: secret key to use.\n token: a string of the token generated by generateToken.\n user_id: the user ID of the authenticated user.\n action_id: a string identifier of the action they requested\n authorization for.\n\n Returns:\n A boolean - True if the user is authorized for the action, False\n otherwise.", "id": "f2456:m1"} {"signature": "def __init__(self, dictionary, key, lock=None):", "body": "super(DictionaryStorage, self).__init__(lock=lock)self._dictionary = dictionaryself._key = key", "docstring": "Construct a DictionaryStorage instance.", "id": "f2457:c0:m0"} {"signature": "def locked_delete(self):", "body": "self._dictionary.pop(self._key, None)", "docstring": "Remove the credentials from the dictionary, if they exist.", "id": "f2457:c0:m3"} {"signature": "def retrieve_scopes(self, http):", "body": "self._retrieve_info(http)return self.scopes", "docstring": "Retrieves the canonical list of scopes for this access token.\n\n Overrides client.Credentials.retrieve_scopes. Fetches scopes info\n from the metadata server.\n\n Args:\n http: httplib2.Http, an http object to be used to make the refresh\n request.\n\n Returns:\n A set of strings containing the canonical list of scopes.", "id": "f2458:c0:m3"} {"signature": "def sign_blob(self, blob):", "body": "raise NotImplementedError('')", "docstring": "Cryptographically sign a blob (of bytes).\n\n This method is provided to support a common interface, but\n the actual key used for a Google Compute Engine service account\n is not available, so it can't be used to sign content.\n\n Args:\n blob: bytes, Message to be signed.\n\n Raises:\n NotImplementedError, always.", "id": "f2458:c0:m8"} {"signature": "def _refresh(self, http):", "body": "try:self._retrieve_info(http)self.access_token, self.token_expiry = _metadata.get_token(http, service_account=self.service_account_email)except http_client.HTTPException as err:raise client.HttpAccessTokenRefreshError(str(err))", "docstring": "Refreshes the access token.\n\n Skip all the storage hoops and just refresh using the API.\n\n Args:\n http: an object to be used to make HTTP requests.\n\n Raises:\n HttpAccessTokenRefreshError: When the refresh fails.", "id": "f2458:c0:m5"} {"signature": "@classmethoddef _get_kind(cls):", "body": "return ''", "docstring": "Return the kind name for this class.", "id": "f2459:c3:m0"} {"signature": "def _validate(self, value):", "body": "_LOGGER.info('', type(value))if value is not None and not isinstance(value, client.Credentials):raise TypeError(''''.format(self._name, value))", "docstring": "Validates a value as a proper credentials object.\n\n Args:\n value: A value to be set on the property.\n\n Raises:\n TypeError if the value is not an instance of Credentials.", "id": "f2459:c2:m0"} {"signature": "def _from_base_type(self, value):", "body": "if not value:return Nonetry:credentials = client.Credentials.new_from_json(value)except ValueError:credentials = Nonereturn credentials", "docstring": "Converts our stored JSON string back to the desired type.\n\n Args:\n value: A value from the datastore to be converted to the\n desired type.\n\n Returns:\n A deserialized Credentials (or subclass) object, else None if\n the value can't be parsed.", "id": "f2459:c2:m2"} {"signature": "def _validate(self, value):", "body": "_LOGGER.info('', type(value))if value is not None and not isinstance(value, client.Flow):raise TypeError(''''.format(self._name, value))", "docstring": "Validates a value as a proper Flow object.\n\n Args:\n value: A value to be set on the property.\n\n Raises:\n TypeError if the value is not an instance of Flow.", "id": "f2459:c1:m0"} {"signature": "def _load_credentials(self):", "body": "if not self._file:returnloaded_credentials = _load_credentials_file(self._file)self._credentials.update(loaded_credentials)logger.debug('')", "docstring": "(Re-)loads the credentials from the file.", "id": "f2461:c0:m1"} {"signature": "def locked_put(self, credentials):", "body": "return self._backend.locked_put(self._key, credentials)", "docstring": "Writes the given credentials to the store.\n\n Args:\n credentials: an instance of\n :class:`oauth2client.client.Credentials`.", "id": "f2461:c1:m4"} {"signature": "def has_credentials(self):", "body": "return self.credentials is not None and not self.credentials.invalid", "docstring": "True if for the logged in user there are valid access Credentials.\n\n Must only be called from with a webapp.RequestHandler subclassed method\n that had been decorated with either @oauth_required or @oauth_aware.", "id": "f2463:c6:m9"} {"signature": "def _is_ndb(self):", "body": "if isinstance(self._model, type):if _NDB_MODEL is not None and issubclass(self._model, _NDB_MODEL):return Trueelif issubclass(self._model, db.Model):return Falseraise TypeError(''.format(self._model))", "docstring": "Determine whether the model of the instance is an NDB model.\n\n Returns:\n Boolean indicating whether or not the model is an NDB or DB model.", "id": "f2463:c4:m1"} {"signature": "@_helpers.positional()def __init__(self, model, key_name, property_name, cache=None, user=None):", "body": "super(StorageByKeyName, self).__init__()if key_name is None:if user is None:raise ValueError('''')key_name = user.user_id()self._model = modelself._key_name = key_nameself._property_name = property_nameself._cache = cache", "docstring": "Constructor for Storage.\n\n Args:\n model: db.Model or ndb.Model, model class\n key_name: string, key name for the entity that has the credentials\n property_name: string, name of the property that is a\n CredentialsProperty or CredentialsNDBProperty.\n cache: memcache, a write-through cache to put in front of the\n datastore. If the model you are using is an NDB model, using\n a cache will be redundant since the model uses an instance\n cache and memcache for you.\n user: users.User object, optional. Can be used to grab user ID as a\n key_name if no key name is specified.", "id": "f2463:c4:m0"} {"signature": "@db.non_transactional(allow_existing=True)def locked_put(self, credentials):", "body": "entity = self._model.get_or_insert(self._key_name)setattr(entity, self._property_name, credentials)entity.put()if self._cache:self._cache.set(self._key_name, credentials.to_json())", "docstring": "Write a Credentials to the datastore.\n\n Args:\n credentials: Credentials, the credentials to store.", "id": "f2463:c4:m5"} {"signature": "@db.non_transactional(allow_existing=True)def locked_delete(self):", "body": "if self._cache:self._cache.delete(self._key_name)self._delete_entity()", "docstring": "Delete Credential from datastore.", "id": "f2463:c4:m6"} {"signature": "def get_flow(self):", "body": "return getattr(self._tls, '', None)", "docstring": "A thread local Flow object.\n\n Returns:\n A credentials.Flow object, or None if the flow hasn't been set in\n this thread yet, which happens in _create_flow() since Flows are\n created lazily.", "id": "f2463:c6:m3"} {"signature": "def oauth_aware(self, method):", "body": "def setup_oauth(request_handler, *args, **kwargs):if self._in_error:self._display_error_message(request_handler)returnuser = users.get_current_user()if not user:request_handler.redirect(users.create_login_url(request_handler.request.uri))returnself._create_flow(request_handler)self.flow.params[''] = _build_state_value(request_handler,user)self.credentials = self._storage_class(self._credentials_class, None,self._credentials_property_name, user=user).get()try:resp = method(request_handler, *args, **kwargs)finally:self.credentials = Nonereturn respreturn setup_oauth", "docstring": "Decorator that sets up for OAuth 2.0 dance, but doesn't do it.\n\n Does all the setup for the OAuth dance, but doesn't initiate it.\n This decorator is useful if you want to create a page that knows\n whether or not the user has granted access to this application.\n From within a method decorated with @oauth_aware the has_credentials()\n and authorize_url() methods can be called.\n\n Args:\n method: callable, to be decorated method of a webapp.RequestHandler\n instance.", "id": "f2463:c6:m8"} {"signature": "@_helpers.positional()def __init__(self, scope, **kwargs):", "body": "self.scope = _helpers.scopes_to_string(scope)self._kwargs = kwargsself.service_account_id = kwargs.get('', None)self._service_account_email = Nonesuper(AppAssertionCredentials, self).__init__(None)", "docstring": "Constructor for AppAssertionCredentials\n\n Args:\n scope: string or iterable of strings, scope(s) of the credentials\n being requested.\n **kwargs: optional keyword args, including:\n service_account_id: service account id of the application. If None\n or unspecified, the default service account for\n the app is used.", "id": "f2463:c1:m0"} {"signature": "@_helpers.positional()def oauth2decorator_from_clientsecrets(filename, scope,message=None, cache=None):", "body": "return OAuth2DecoratorFromClientSecrets(filename, scope,message=message, cache=cache)", "docstring": "Creates an OAuth2Decorator populated from a clientsecrets file.\n\n Args:\n filename: string, File name of client secrets.\n scope: string or list of strings, scope(s) of the credentials being\n requested.\n message: string, A friendly string to display to the user if the\n clientsecrets file is missing or invalid. The message may\n contain HTML and will be presented on the web interface for\n any method that uses the decorator.\n cache: An optional cache service client that implements get() and set()\n methods. See clientsecrets.loadfile() for details.\n\n Returns: An OAuth2Decorator", "id": "f2463:m5"} {"signature": "def has_credentials(self):", "body": "if not self.credentials:return Falseelif (self.credentials.access_token_expired andnot self.credentials.refresh_token):return Falseelse:return True", "docstring": "Returns True if there are valid credentials for the current user.", "id": "f2464:c0:m9"} {"signature": "def _get_flow_for_token(csrf_token):", "body": "flow_pickle = session.pop(_FLOW_KEY.format(csrf_token), None)if flow_pickle is None:return Noneelse:return pickle.loads(flow_pickle)", "docstring": "Retrieves the flow instance associated with a given CSRF token from\n the Flask session.", "id": "f2464:m0"} {"signature": "def authorize_view(self):", "body": "args = request.args.to_dict()args[''] = request.args.getlist('')return_url = args.pop('', None)if return_url is None:return_url = request.referrer or ''flow = self._make_flow(return_url=return_url, **args)auth_url = flow.step1_get_authorize_url()return redirect(auth_url)", "docstring": "Flask view that starts the authorization flow.\n\n Starts flow by redirecting the user to the OAuth2 provider.", "id": "f2464:c0:m6"} {"signature": "@propertydef credentials(self):", "body": "ctx = _app_ctx_stack.topif not hasattr(ctx, _CREDENTIALS_KEY):ctx.google_oauth2_credentials = self.storage.get()return ctx.google_oauth2_credentials", "docstring": "The credentials for the current user or None if unavailable.", "id": "f2464:c0:m8"} {"signature": "def authorize_url(self, return_url, **kwargs):", "body": "return url_for('', return_url=return_url, **kwargs)", "docstring": "Creates a URL that can be used to start the authorization flow.\n\n When the user is directed to the URL, the authorization flow will\n begin. Once complete, the user will be redirected to the specified\n return URL.\n\n Any kwargs are passed into the flow constructor.", "id": "f2464:c0:m12"} {"signature": "def __init__(self, service_name, user_name):", "body": "super(Storage, self).__init__(lock=threading.Lock())self._service_name = service_nameself._user_name = user_name", "docstring": "Constructor.\n\n Args:\n service_name: string, The name of the service under which the\n credentials are stored.\n user_name: string, The name of the user to store credentials for.", "id": "f2465:c0:m0"} {"signature": "def locked_delete(self):", "body": "keyring.set_password(self._service_name, self._user_name, '')", "docstring": "Delete Credentials file.\n\n Args:\n credentials: Credentials, the credentials to store.", "id": "f2465:c0:m3"} {"signature": "def verify(self, message, signature):", "body": "message = _helpers._to_bytes(message, encoding='')try:return rsa.pkcs1.verify(message, signature, self._pubkey)except (ValueError, rsa.pkcs1.VerificationError):return False", "docstring": "Verifies a message against a signature.\n\n Args:\n message: string or bytes, The message to verify. If string, will be\n encoded to bytes as utf-8.\n signature: string or bytes, The signature on the message. If\n string, will be encoded to bytes as utf-8.\n\n Returns:\n True if message was signed by the private key associated with the\n public key that this object was constructed with.", "id": "f2467:c0:m1"} {"signature": "@classmethoddef from_string(cls, key, password=''):", "body": "key = _helpers._from_bytes(key) marker_id, key_bytes = pem.readPemBlocksFromFile(six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER)if marker_id == :pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes,format='')elif marker_id == :key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)if remaining != b'':raise ValueError('', remaining)pkey_info = key_info.getComponentByName('')pkey = rsa.key.PrivateKey.load_pkcs1(pkey_info.asOctets(),format='')else:raise ValueError('')return cls(pkey)", "docstring": "Construct an RsaSigner instance from a string.\n\n Args:\n key: string, private key in PEM format.\n password: string, password for private key file. Unused for PEM\n files.\n\n Returns:\n RsaSigner instance.\n\n Raises:\n ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in\n PEM format.", "id": "f2467:c1:m2"} {"signature": "def verify_signed_jwt_with_certs(jwt, certs, audience=None):", "body": "jwt = _helpers._to_bytes(jwt)if jwt.count(b'') != :raise AppIdentityError(''.format(jwt))header, payload, signature = jwt.split(b'')message_to_sign = header + b'' + payloadsignature = _helpers._urlsafe_b64decode(signature)payload_bytes = _helpers._urlsafe_b64decode(payload)try:payload_dict = json.loads(_helpers._from_bytes(payload_bytes))except:raise AppIdentityError(''.format(payload_bytes))_verify_signature(message_to_sign, signature, certs.values())_verify_time_range(payload_dict)_check_audience(payload_dict, audience)return payload_dict", "docstring": "Verify a JWT against public certs.\n\n See http://self-issued.info/docs/draft-jones-json-web-token.html.\n\n Args:\n jwt: string, A JWT.\n certs: dict, Dictionary where values of public keys in PEM format.\n audience: string, The audience, 'aud', that this JWT should contain. If\n None then the JWT's 'aud' parameter is not verified.\n\n Returns:\n dict, The deserialized JSON payload in the JWT.\n\n Raises:\n AppIdentityError: if any checks are failed.", "id": "f2468:m5"} {"signature": "def _verify_signature(message, signature, certs):", "body": "for pem in certs:verifier = Verifier.from_string(pem, is_x509_cert=True)if verifier.verify(message, signature):returnraise AppIdentityError('')", "docstring": "Verifies signed content using a list of certificates.\n\n Args:\n message: string or bytes, The message to verify.\n signature: string or bytes, The signature on the message.\n certs: iterable, certificates in PEM format.\n\n Raises:\n AppIdentityError: If none of the certificates can verify the message\n against the signature.", "id": "f2468:m2"} {"signature": "def loadfile(filename, cache=None):", "body": "_SECRET_NAMESPACE = ''if not cache:return _loadfile(filename)obj = cache.get(filename, namespace=_SECRET_NAMESPACE)if obj is None:client_type, client_info = _loadfile(filename)obj = {client_type: client_info}cache.set(filename, obj, namespace=_SECRET_NAMESPACE)return next(six.iteritems(obj))", "docstring": "Loading of client_secrets JSON file, optionally backed by a cache.\n\n Typical cache storage would be App Engine memcache service,\n but you can pass in any other cache client that implements\n these methods:\n\n * ``get(key, namespace=ns)``\n * ``set(key, value, namespace=ns)``\n\n Usage::\n\n # without caching\n client_type, client_info = loadfile('secrets.json')\n # using App Engine memcache service\n from google.appengine.api import memcache\n client_type, client_info = loadfile('secrets.json', cache=memcache)\n\n Args:\n filename: string, Path to a client_secrets.json file on a filesystem.\n cache: An optional cache service client that implements get() and set()\n methods. If not specified, the file is always being loaded from\n a filesystem.\n\n Raises:\n InvalidClientSecretsError: In case of a validation error or some\n I/O failure. Can happen only on cache miss.\n\n Returns:\n (client_type, client_info) tuple, as _loadfile() normally would.\n JSON contents is validated only during first load. Cache hits are not\n validated.", "id": "f2469:m4"} {"signature": "def code_challenge(verifier):", "body": "digest = hashlib.sha256(verifier).digest()return base64.urlsafe_b64encode(digest).rstrip(b'')", "docstring": "Creates a 'code_challenge' as described in section 4.2 of RFC 7636\nby taking the sha256 hash of the verifier and then urlsafe\nbase64-encoding it.\n\nArgs:\n verifier: bytestring, representing a code_verifier as generated by\n code_verifier().\n\nReturns:\n Bytestring, representing a urlsafe base64-encoded sha256 hash digest,\n without '=' padding.", "id": "f2470:m1"} {"signature": "def code_verifier(n_bytes=):", "body": "verifier = base64.urlsafe_b64encode(os.urandom(n_bytes)).rstrip(b'')if len(verifier) < :raise ValueError(\"\")elif len(verifier) > :raise ValueError(\"\")else:return verifier", "docstring": "Generates a 'code_verifier' as described in section 4.1 of RFC 7636.\n\nThis is a 'high-entropy cryptographic random string' that will be\nimpractical for an attacker to guess.\n\nArgs:\n n_bytes: integer between 31 and 96, inclusive. default: 64\n number of bytes of entropy to include in verifier.\n\nReturns:\n Bytestring, representing urlsafe base64-encoded random data.", "id": "f2470:m0"} {"signature": "def sign(self, message):", "body": "message = _helpers._to_bytes(message, encoding='')return crypto.sign(self._key, message, '')", "docstring": "Signs a message.\n\n Args:\n message: bytes, Message to be signed.\n\n Returns:\n string, The signature of the message for the given key.", "id": "f2471:c1:m1"} {"signature": "def __init__(self, pkey):", "body": "self._key = pkey", "docstring": "Constructor.\n\n Args:\n pkey: OpenSSL.crypto.PKey (or equiv), The private key to sign with.", "id": "f2471:c1:m0"} {"signature": "def verify(self, message, signature):", "body": "message = _helpers._to_bytes(message, encoding='')signature = _helpers._to_bytes(signature, encoding='')try:crypto.verify(self._pubkey, signature, message, '')return Trueexcept crypto.Error:return False", "docstring": "Verifies a message against a signature.\n\n Args:\n message: string or bytes, The message to verify. If string, will be\n encoded to bytes as utf-8.\n signature: string or bytes, The signature on the message. If string,\n will be encoded to bytes as utf-8.\n\n Returns:\n True if message was signed by the private key associated with the\n public key that this object was constructed with.", "id": "f2471:c0:m1"} {"signature": "def __init__(self, pubkey):", "body": "self._pubkey = pubkey", "docstring": "Constructor.\n\n Args:\n pubkey: OpenSSL.crypto.PKey, The public key to verify with.", "id": "f2471:c0:m0"} {"signature": "def save_to_well_known_file(credentials, well_known_file=None):", "body": "if well_known_file is None:well_known_file = _get_well_known_file()config_dir = os.path.dirname(well_known_file)if not os.path.isdir(config_dir):raise OSError(''.format(config_dir))credentials_data = credentials.serialization_data_save_private_file(well_known_file, credentials_data)", "docstring": "Save the provided GoogleCredentials to the well known file.\n\n Args:\n credentials: the credentials to be saved to the well known file;\n it should be an instance of GoogleCredentials\n well_known_file: the name of the file where the credentials are to be\n saved; this parameter is supposed to be used for\n testing only", "id": "f2472:m5"} {"signature": "@_helpers.positional()def __init__(self, access_token, client_id, client_secret, refresh_token,token_expiry, token_uri, user_agent, revoke_uri=None,id_token=None, token_response=None, scopes=None,token_info_uri=None, id_token_jwt=None):", "body": "self.access_token = access_tokenself.client_id = client_idself.client_secret = client_secretself.refresh_token = refresh_tokenself.store = Noneself.token_expiry = token_expiryself.token_uri = token_uriself.user_agent = user_agentself.revoke_uri = revoke_uriself.id_token = id_tokenself.id_token_jwt = id_token_jwtself.token_response = token_responseself.scopes = set(_helpers.string_to_scopes(scopes or []))self.token_info_uri = token_info_uriself.invalid = False", "docstring": "Create an instance of OAuth2Credentials.\n\n This constructor is not usually called by the user, instead\n OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.\n\n Args:\n access_token: string, access token.\n client_id: string, client identifier.\n client_secret: string, client secret.\n refresh_token: string, refresh token.\n token_expiry: datetime, when the access_token expires.\n token_uri: string, URI of token endpoint.\n user_agent: string, The HTTP User-Agent to provide for this\n application.\n revoke_uri: string, URI for revoke endpoint. Defaults to None; a\n token can't be revoked if this is None.\n id_token: object, The identity of the resource owner.\n token_response: dict, the decoded response to the token request.\n None if a token hasn't been requested yet. Stored\n because some providers (e.g. wordpress.com) include\n extra fields that clients may want.\n scopes: list, authorized scopes for these credentials.\n token_info_uri: string, the URI for the token info endpoint.\n Defaults to None; scopes can not be refreshed if\n this is None.\n id_token_jwt: string, the encoded and signed identity JWT. The\n decoded version of this is stored in id_token.\n\n Notes:\n store: callable, A callable that when passed a Credential\n will store the credential back to where it came from.\n This is needed to store the latest access_token if it\n has expired and been refreshed.", "id": "f2472:c16:m0"} {"signature": "def retrieve_scopes(self, http):", "body": "self._retrieve_scopes(http)return self.scopes", "docstring": "Retrieves the canonical list of scopes for this access token.\n\n Gets the scopes from the OAuth2 provider.\n\n Args:\n http: httplib2.Http, an http object to be used to make the refresh\n request.\n\n Returns:\n A set of strings containing the canonical list of scopes.", "id": "f2472:c16:m6"} {"signature": "def _updateFromCredential(self, other):", "body": "self.__dict__.update(other.__getstate__())", "docstring": "Update this Credential from another instance.", "id": "f2472:c16:m12"} {"signature": "@_helpers.positional()def __init__(self, assertion_type, user_agent=None,token_uri=oauth2client.GOOGLE_TOKEN_URI,revoke_uri=oauth2client.GOOGLE_REVOKE_URI,**unused_kwargs):", "body": "super(AssertionCredentials, self).__init__(None,None,None,None,None,token_uri,user_agent,revoke_uri=revoke_uri)self.assertion_type = assertion_type", "docstring": "Constructor for AssertionFlowCredentials.\n\n Args:\n assertion_type: string, assertion type that will be declared to the\n auth server\n user_agent: string, The HTTP User-Agent to provide for this\n application.\n token_uri: string, URI for token endpoint. For convenience defaults\n to Google's endpoints but any OAuth 2.0 provider can be\n used.\n revoke_uri: string, URI for revoke endpoint.", "id": "f2472:c19:m0"} {"signature": "@staticmethoddef from_stream(credential_filename):", "body": "if credential_filename and os.path.isfile(credential_filename):try:return _get_application_default_credential_from_file(credential_filename)except (ApplicationDefaultCredentialsError, ValueError) as error:extra_help = ('''')_raise_exception_for_reading_json(credential_filename,extra_help,error)else:raise ApplicationDefaultCredentialsError('''')", "docstring": "Create a Credentials object by reading information from a file.\n\n It returns an object of type GoogleCredentials.\n\n Args:\n credential_filename: the path to the file from where the\n credentials are to be read\n\n Raises:\n ApplicationDefaultCredentialsError: raised when the credentials\n fail to be retrieved.", "id": "f2472:c18:m10"} {"signature": "def _do_revoke(self, http, token):", "body": "logger.info('')query_params = {'': token}token_revoke_uri = _helpers.update_query_params(self.revoke_uri, query_params)resp, content = transport.request(http, token_revoke_uri)if resp.status == http_client.METHOD_NOT_ALLOWED:body = urllib.parse.urlencode(query_params)resp, content = transport.request(http, token_revoke_uri,method='', body=body)if resp.status == http_client.OK:self.invalid = Trueelse:error_msg = ''.format(resp.status)try:d = json.loads(_helpers._from_bytes(content))if '' in d:error_msg = d['']except (TypeError, ValueError):passraise TokenRevokeError(error_msg)if self.store:self.store.delete()", "docstring": "Revokes this credential and deletes the stored copy (if it exists).\n\n Args:\n http: an object to be used to make HTTP requests.\n token: A string used as the token to be revoked. Can be either an\n access_token or refresh_token.\n\n Raises:\n TokenRevokeError: If the revoke request does not return with a\n 200 OK.", "id": "f2472:c16:m20"} {"signature": "def _parse_exchange_token_response(content):", "body": "resp = {}content = _helpers._from_bytes(content)try:resp = json.loads(content)except Exception:resp = _helpers.parse_unique_urlencoded(content)if resp and '' in resp:resp[''] = resp.pop('')return resp", "docstring": "Parses response of an exchange token request.\n\n Most providers return JSON but some (e.g. Facebook) return a\n url-encoded string.\n\n Args:\n content: The body of a response\n\n Returns:\n Content as a dictionary object. Note that the dict could be empty,\n i.e. {}. That basically indicates a failure.", "id": "f2472:m16"} {"signature": "def _get_application_default_credential_from_file(filename):", "body": "with open(filename) as file_obj:client_credentials = json.load(file_obj)credentials_type = client_credentials.get('')if credentials_type == AUTHORIZED_USER:required_fields = set(['', '', ''])elif credentials_type == SERVICE_ACCOUNT:required_fields = set(['', '', '',''])else:raise ApplicationDefaultCredentialsError(\"\" +AUTHORIZED_USER + \"\" + SERVICE_ACCOUNT + \"\")missing_fields = required_fields.difference(client_credentials.keys())if missing_fields:_raise_exception_for_missing_fields(missing_fields)if client_credentials[''] == AUTHORIZED_USER:return GoogleCredentials(access_token=None,client_id=client_credentials[''],client_secret=client_credentials[''],refresh_token=client_credentials[''],token_expiry=None,token_uri=oauth2client.GOOGLE_TOKEN_URI,user_agent='')else: from oauth2client import service_accountreturn service_account._JWTAccessCredentials.from_json_keyfile_dict(client_credentials)", "docstring": "Build the Application Default Credentials from file.", "id": "f2472:m8"} {"signature": "def authorize(self, http):", "body": "raise NotImplementedError", "docstring": "Take an httplib2.Http instance (or equivalent) and authorizes it.\n\n Authorizes it for the set of credentials, usually by replacing\n http.request() with a method that adds in the appropriate headers and\n then delegates to the original Http.request() method.\n\n Args:\n http: httplib2.Http, an http object to be used to make the refresh\n request.", "id": "f2472:c13:m0"} {"signature": "def _get_well_known_file():", "body": "default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR)if default_config_dir is None:if os.name == '':try:default_config_dir = os.path.join(os.environ[''],_CLOUDSDK_CONFIG_DIRECTORY)except KeyError:drive = os.environ.get('', '')default_config_dir = os.path.join(drive, '',_CLOUDSDK_CONFIG_DIRECTORY)else:default_config_dir = os.path.join(os.path.expanduser(''),'',_CLOUDSDK_CONFIG_DIRECTORY)return os.path.join(default_config_dir, _WELL_KNOWN_CREDENTIALS_FILE)", "docstring": "Get the well known file produced by command 'gcloud auth login'.", "id": "f2472:m7"} {"signature": "@_helpers.positional()def flow_from_clientsecrets(filename, scope, redirect_uri=None,message=None, cache=None, login_hint=None,device_uri=None, pkce=None, code_verifier=None,prompt=None):", "body": "try:client_type, client_info = clientsecrets.loadfile(filename,cache=cache)if client_type in (clientsecrets.TYPE_WEB,clientsecrets.TYPE_INSTALLED):constructor_kwargs = {'': redirect_uri,'': client_info[''],'': client_info[''],'': login_hint,}revoke_uri = client_info.get('')optional = ('','','','','')for param in optional:if locals()[param] is not None:constructor_kwargs[param] = locals()[param]return OAuth2WebServerFlow(client_info[''], client_info[''],scope, **constructor_kwargs)except clientsecrets.InvalidClientSecretsError as e:if message is not None:if e.args:message = (''''.format(e, message))sys.exit(message)else:raiseelse:raise UnknownClientSecretsFlowError(''.format(client_type))", "docstring": "Create a Flow from a clientsecrets file.\n\n Will create the right kind of Flow based on the contents of the\n clientsecrets file or will raise InvalidClientSecretsError for unknown\n types of Flows.\n\n Args:\n filename: string, File name of client secrets.\n scope: string or iterable of strings, scope(s) to request.\n redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for\n a non-web-based application, or a URI that handles the\n callback from the authorization server.\n message: string, A friendly string to display to the user if the\n clientsecrets file is missing or invalid. If message is\n provided then sys.exit will be called in the case of an error.\n If message in not provided then\n clientsecrets.InvalidClientSecretsError will be raised.\n cache: An optional cache service client that implements get() and set()\n methods. See clientsecrets.loadfile() for details.\n login_hint: string, Either an email address or domain. Passing this\n hint will either pre-fill the email box on the sign-in form\n or select the proper multi-login session, thereby\n simplifying the login flow.\n device_uri: string, URI for device authorization endpoint. For\n convenience defaults to Google's endpoints but any\n OAuth 2.0 provider can be used.\n\n Returns:\n A Flow object.\n\n Raises:\n UnknownClientSecretsFlowError: if the file describes an unknown kind of\n Flow.\n clientsecrets.InvalidClientSecretsError: if the clientsecrets file is\n invalid.", "id": "f2472:m20"} {"signature": "def revoke(self, http):", "body": "self._revoke(http)", "docstring": "Revokes a refresh_token and makes the credentials void.\n\n Args:\n http: httplib2.Http, an http object to be used to make the revoke\n request.", "id": "f2472:c16:m3"} {"signature": "@_helpers.positional()def step2_exchange(self, code=None, http=None, device_flow_info=None):", "body": "if code is None and device_flow_info is None:raise ValueError('')if code is not None and device_flow_info is not None:raise ValueError('')if code is None:code = device_flow_info.device_codeelif not isinstance(code, (six.string_types, six.binary_type)):if '' not in code:raise FlowExchangeError(code.get('', ''))code = code['']post_data = {'': self.client_id,'': code,'': self.scope,}if self.client_secret is not None:post_data[''] = self.client_secretif self._pkce:post_data[''] = self.code_verifierif device_flow_info is not None:post_data[''] = ''else:post_data[''] = ''post_data[''] = self.redirect_uribody = urllib.parse.urlencode(post_data)headers = {'': '',}if self.authorization_header is not None:headers[''] = self.authorization_headerif self.user_agent is not None:headers[''] = self.user_agentif http is None:http = transport.get_http_object()resp, content = transport.request(http, self.token_uri, method='', body=body, headers=headers)d = _parse_exchange_token_response(content)if resp.status == http_client.OK and '' in d:access_token = d['']refresh_token = d.get('', None)if not refresh_token:logger.info(''\"\")token_expiry = Noneif '' in d:delta = datetime.timedelta(seconds=int(d['']))token_expiry = delta + _UTCNOW()extracted_id_token = Noneid_token_jwt = Noneif '' in d:extracted_id_token = _extract_id_token(d[''])id_token_jwt = d['']logger.info('')return OAuth2Credentials(access_token, self.client_id, self.client_secret,refresh_token, token_expiry, self.token_uri, self.user_agent,revoke_uri=self.revoke_uri, id_token=extracted_id_token,id_token_jwt=id_token_jwt, token_response=d, scopes=self.scope,token_info_uri=self.token_info_uri)else:logger.info('', content)if '' in d:error_msg = (str(d['']) +str(d.get('', '')))else:error_msg = ''.format(str(resp.status))raise FlowExchangeError(error_msg)", "docstring": "Exchanges a code for OAuth2Credentials.\n\n Args:\n code: string, a dict-like object, or None. For a non-device\n flow, this is either the response code as a string, or a\n dictionary of query parameters to the redirect_uri. For a\n device flow, this should be None.\n http: httplib2.Http, optional http instance to use when fetching\n credentials.\n device_flow_info: DeviceFlowInfo, return value from step1 in the\n case of a device flow.\n\n Returns:\n An OAuth2Credentials object that can be used to authorize requests.\n\n Raises:\n FlowExchangeError: if a problem occurred exchanging the code for a\n refresh_token.\n ValueError: if code and device_flow_info are both provided or both\n missing.", "id": "f2472:c21:m3"} {"signature": "def apply(self, headers):", "body": "headers[''] = '' + self.access_token", "docstring": "Add the authorization to the headers.\n\n Args:\n headers: dict, the headers to add the Authorization header to.", "id": "f2472:c16:m4"} {"signature": "def _to_json(self, strip, to_serialize=None):", "body": "curr_type = self.__class__if to_serialize is None:to_serialize = copy.copy(self.__dict__)else:to_serialize = copy.copy(to_serialize)for member in strip:if member in to_serialize:del to_serialize[member]to_serialize[''] = _parse_expiry(to_serialize.get(''))to_serialize[''] = curr_type.__name__to_serialize[''] = curr_type.__module__for key, val in to_serialize.items():if isinstance(val, bytes):to_serialize[key] = val.decode('')if isinstance(val, set):to_serialize[key] = list(val)return json.dumps(to_serialize)", "docstring": "Utility function that creates JSON repr. of a Credentials object.\n\n Args:\n strip: array, An array of names of members to exclude from the\n JSON.\n to_serialize: dict, (Optional) The properties for this object\n that will be serialized. This allows callers to\n modify before serializing.\n\n Returns:\n string, a JSON representation of this instance, suitable to pass to\n from_json().", "id": "f2472:c13:m4"} {"signature": "def _in_gce_environment():", "body": "if SETTINGS.env_name is not None:return SETTINGS.env_name == ''if NO_GCE_CHECK != '' and _detect_gce_environment():SETTINGS.env_name = ''return Truereturn False", "docstring": "Detect if the code is running in the Compute Engine environment.\n\n Returns:\n True if running in the GCE environment, False otherwise.", "id": "f2472:m3"} {"signature": "def _detect_gce_environment():", "body": "http = transport.get_http_object(timeout=GCE_METADATA_TIMEOUT)try:response, _ = transport.request(http, _GCE_METADATA_URI, headers=_GCE_HEADERS)return (response.status == http_client.OK andresponse.get(_METADATA_FLAVOR_HEADER) == _DESIRED_METADATA_FLAVOR)except socket.error: logger.info('')return False", "docstring": "Determine if the current environment is Compute Engine.\n\n Returns:\n Boolean indicating whether or not the current environment is Google\n Compute Engine.", "id": "f2472:m1"} {"signature": "def create_scoped_required(self):", "body": "return False", "docstring": "Whether this Credentials object is scopeless.\n\n create_scoped(scopes) method needs to be called in order to create\n a Credentials object for API calls.", "id": "f2472:c18:m1"} {"signature": "def _oauth2_web_server_flow_params(kwargs):", "body": "params = {'': '','': '',}params.update(kwargs)approval_prompt = params.get('')if approval_prompt is not None:logger.warning('''')if approval_prompt == '':logger.warning('''')params[''] = ''del params['']return params", "docstring": "Configures redirect URI parameters for OAuth2WebServerFlow.", "id": "f2472:m19"} {"signature": "def _do_refresh_request(self, http):", "body": "body = self._generate_refresh_request_body()headers = self._generate_refresh_request_headers()logger.info('')resp, content = transport.request(http, self.token_uri, method='',body=body, headers=headers)content = _helpers._from_bytes(content)if resp.status == http_client.OK:d = json.loads(content)self.token_response = dself.access_token = d['']self.refresh_token = d.get('', self.refresh_token)if '' in d:delta = datetime.timedelta(seconds=int(d['']))self.token_expiry = delta + _UTCNOW()else:self.token_expiry = Noneif '' in d:self.id_token = _extract_id_token(d[''])self.id_token_jwt = d['']else:self.id_token = Noneself.id_token_jwt = Noneself.invalid = Falseif self.store:self.store.locked_put(self)else:logger.info('', content)error_msg = ''.format(resp.status)try:d = json.loads(content)if '' in d:error_msg = d['']if '' in d:error_msg += '' + d['']self.invalid = Trueif self.store is not None:self.store.locked_put(self)except (TypeError, ValueError):passraise HttpAccessTokenRefreshError(error_msg, status=resp.status)", "docstring": "Refresh the access_token using the refresh_token.\n\n Args:\n http: an object to be used to make HTTP requests.\n\n Raises:\n HttpAccessTokenRefreshError: When the refresh fails.", "id": "f2472:c16:m18"} {"signature": "def _revoke(self, http):", "body": "self._do_revoke(http, self.refresh_token or self.access_token)", "docstring": "Revokes this credential and deletes the stored copy (if it exists).\n\n Args:\n http: an object to be used to make HTTP requests.", "id": "f2472:c16:m19"} {"signature": "@_helpers.positional()def credentials_from_clientsecrets_and_code(filename, scope, code,message=None,redirect_uri='',http=None,cache=None,device_uri=None):", "body": "flow = flow_from_clientsecrets(filename, scope, message=message,cache=cache, redirect_uri=redirect_uri,device_uri=device_uri)credentials = flow.step2_exchange(code, http=http)return credentials", "docstring": "Returns OAuth2Credentials from a clientsecrets file and an auth code.\n\n Will create the right kind of Flow based on the contents of the\n clientsecrets file or will raise InvalidClientSecretsError for unknown\n types of Flows.\n\n Args:\n filename: string, File name of clientsecrets.\n scope: string or iterable of strings, scope(s) to request.\n code: string, An authorization code, most likely passed down from\n the client\n message: string, A friendly string to display to the user if the\n clientsecrets file is missing or invalid. If message is\n provided then sys.exit will be called in the case of an error.\n If message in not provided then\n clientsecrets.InvalidClientSecretsError will be raised.\n redirect_uri: string, this is generally set to 'postmessage' to match\n the redirect_uri that the client specified\n http: httplib2.Http, optional http instance to use to do the fetch\n cache: An optional cache service client that implements get() and set()\n methods. See clientsecrets.loadfile() for details.\n device_uri: string, OAuth 2.0 device authorization endpoint\n pkce: boolean, default: False, Generate and include a \"Proof Key\n for Code Exchange\" (PKCE) with your authorization and token\n requests. This adds security for installed applications that\n cannot protect a client_secret. See RFC 7636 for details.\n code_verifier: bytestring or None, default: None, parameter passed\n as part of the code exchange when pkce=True. If\n None, a code_verifier will automatically be\n generated as part of step1_get_authorize_url(). See\n RFC 7636 for details.\n\n Returns:\n An OAuth2Credentials object.\n\n Raises:\n FlowExchangeError: if the authorization code cannot be exchanged for an\n access token\n UnknownClientSecretsFlowError: if the file describes an unknown kind\n of Flow.\n clientsecrets.InvalidClientSecretsError: if the clientsecrets file is\n invalid.", "id": "f2472:m18"} {"signature": "def __init__(self, access_token, user_agent, revoke_uri=None):", "body": "super(AccessTokenCredentials, self).__init__(access_token,None,None,None,None,None,user_agent,revoke_uri=revoke_uri)", "docstring": "Create an instance of OAuth2Credentials\n\n This is one of the few types if Credentials that you should contrust,\n Credentials objects are usually instantiated by a Flow.\n\n Args:\n access_token: string, access token.\n user_agent: string, The HTTP User-Agent to provide for this\n application.\n revoke_uri: string, URI for revoke endpoint. Defaults to None; a\n token can't be revoked if this is None.", "id": "f2472:c17:m0"} {"signature": "def _in_gae_environment():", "body": "if SETTINGS.env_name is not None:return SETTINGS.env_name in ('', '')try:import google.appengine except ImportError:passelse:server_software = os.environ.get(_SERVER_SOFTWARE, '')if server_software.startswith(''):SETTINGS.env_name = ''return Trueelif server_software.startswith(''):SETTINGS.env_name = ''return Truereturn False", "docstring": "Detects if the code is running in the App Engine environment.\n\n Returns:\n True if running in the GAE environment, False otherwise.", "id": "f2472:m2"} {"signature": "def locked_get(self):", "body": "raise NotImplementedError", "docstring": "Retrieve credential.\n\n The Storage lock must be held when this is called.\n\n Returns:\n oauth2client.client.Credentials", "id": "f2472:c15:m3"} {"signature": "def __init__(self, access_token, client_id, client_secret, refresh_token,token_expiry, token_uri, user_agent,revoke_uri=oauth2client.GOOGLE_REVOKE_URI):", "body": "super(GoogleCredentials, self).__init__(access_token, client_id, client_secret, refresh_token,token_expiry, token_uri, user_agent, revoke_uri=revoke_uri)", "docstring": "Create an instance of GoogleCredentials.\n\n This constructor is not usually called by the user, instead\n GoogleCredentials objects are instantiated by\n GoogleCredentials.from_stream() or\n GoogleCredentials.get_application_default().\n\n Args:\n access_token: string, access token.\n client_id: string, client identifier.\n client_secret: string, client secret.\n refresh_token: string, refresh token.\n token_expiry: datetime, when the access_token expires.\n token_uri: string, URI of token endpoint.\n user_agent: string, The HTTP User-Agent to provide for this\n application.\n revoke_uri: string, URI for revoke endpoint. Defaults to\n oauth2client.GOOGLE_REVOKE_URI; a token can't be\n revoked if this is None.", "id": "f2472:c18:m0"} {"signature": "def apply(self, headers):", "body": "raise NotImplementedError", "docstring": "Add the authorization to the headers.\n\n Args:\n headers: dict, the headers to add the Authorization header to.", "id": "f2472:c13:m3"} {"signature": "def __init__(self, lock=None):", "body": "self._lock = lock", "docstring": "Create a Storage instance.\n\n Args:\n lock: An optional threading.Lock-like object. Must implement at\n least acquire() and release(). Does not need to be\n re-entrant.", "id": "f2472:c15:m0"} {"signature": "def get(self):", "body": "self.acquire_lock()try:return self.locked_get()finally:self.release_lock()", "docstring": "Retrieve credential.\n\n The Storage lock must *not* be held when this is called.\n\n Returns:\n oauth2client.client.Credentials", "id": "f2472:c15:m6"} {"signature": "def set_store(self, store):", "body": "self.store = store", "docstring": "Set the Storage for the credential.\n\n Args:\n store: Storage, an implementation of Storage object.\n This is needed to store the latest access_token if it\n has expired and been refreshed. This implementation uses\n locking to check for updates before updating the\n access_token.", "id": "f2472:c16:m10"} {"signature": "def _do_retrieve_scopes(self, http, token):", "body": "logger.info('')query_params = {'': token, '': ''}token_info_uri = _helpers.update_query_params(self.token_info_uri, query_params)resp, content = transport.request(http, token_info_uri)content = _helpers._from_bytes(content)if resp.status == http_client.OK:d = json.loads(content)self.scopes = set(_helpers.string_to_scopes(d.get('', '')))else:error_msg = ''.format(resp.status)try:d = json.loads(content)if '' in d:error_msg = d['']except (TypeError, ValueError):passraise Error(error_msg)", "docstring": "Retrieves the list of authorized scopes from the OAuth2 provider.\n\n Args:\n http: an object to be used to make HTTP requests.\n token: A string used as the token to identify the credentials to\n the provider.\n\n Raises:\n Error: When refresh fails, indicating the the access token is\n invalid.", "id": "f2472:c16:m22"} {"signature": "def acquire_lock(self):", "body": "if self._lock is not None:self._lock.acquire()", "docstring": "Acquires any lock necessary to access this Storage.\n\n This lock is not reentrant.", "id": "f2472:c15:m1"} {"signature": "def authorize(self, http):", "body": "transport.wrap_http_for_auth(self, http)return http", "docstring": "Authorize an httplib2.Http instance with these credentials.\n\n The modified http.request method will add authentication headers to\n each request and will refresh access_tokens when a 401 is received on a\n request. In addition the http.request method has a credentials\n property, http.request.credentials, which is the Credentials object\n that authorized it.\n\n Args:\n http: An instance of ``httplib2.Http`` or something that acts\n like it.\n\n Returns:\n A modified instance of http that was passed in.\n\n Example::\n\n h = httplib2.Http()\n h = credentials.authorize(h)\n\n You can't create a new OAuth subclass of httplib2.Authentication\n because it never gets passed the absolute URI, which is needed for\n signing. So instead we have to overload 'request' with a closure\n that adds in the Authorization header and then calls the original\n version of 'request()'.", "id": "f2472:c16:m1"} {"signature": "def release_lock(self):", "body": "if self._lock is not None:self._lock.release()", "docstring": "Release the Storage lock.\n\n Trying to release a lock that isn't held will result in a\n RuntimeError in the case of a threading.Lock or multiprocessing.Lock.", "id": "f2472:c15:m2"} {"signature": "def create_scoped(self, scopes):", "body": "return self", "docstring": "Create a Credentials object for the given scopes.\n\n The Credentials type is preserved.", "id": "f2472:c18:m2"} {"signature": "def sign_blob(self, blob):", "body": "raise NotImplementedError('')", "docstring": "Cryptographically sign a blob (of bytes).\n\n Args:\n blob: bytes, Message to be signed.\n\n Returns:\n tuple, A pair of the private key ID used to sign the blob and\n the signed contents.", "id": "f2472:c19:m4"} {"signature": "def revoke(self, http):", "body": "raise NotImplementedError", "docstring": "Revokes a refresh_token and makes the credentials void.\n\n Args:\n http: httplib2.Http, an http object to be used to make the revoke\n request.", "id": "f2472:c13:m2"} {"signature": "def to_json(self):", "body": "return self._to_json(self.NON_SERIALIZED_MEMBERS)", "docstring": "Creating a JSON representation of an instance of Credentials.\n\n Returns:\n string, a JSON representation of this instance, suitable to pass to\n from_json().", "id": "f2472:c13:m5"} {"signature": "@_helpers.positional()def verify_id_token(id_token, audience, http=None,cert_uri=ID_TOKEN_VERIFICATION_CERTS):", "body": "_require_crypto_or_die()if http is None:http = transport.get_cached_http()resp, content = transport.request(http, cert_uri)if resp.status == http_client.OK:certs = json.loads(_helpers._from_bytes(content))return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)else:raise VerifyJwtTokenError(''.format(resp.status))", "docstring": "Verifies a signed JWT id_token.\n\n This function requires PyOpenSSL and because of that it does not work on\n App Engine.\n\n Args:\n id_token: string, A Signed JWT.\n audience: string, The audience 'aud' that the token should be for.\n http: httplib2.Http, instance to use to make the HTTP request. Callers\n should supply an instance that has caching enabled.\n cert_uri: string, URI of the certificates in JSON format to\n verify the JWT against.\n\n Returns:\n The deserialized JSON in the JWT.\n\n Raises:\n oauth2client.crypt.AppIdentityError: if the JWT fails to verify.\n CryptoUnavailableError: if no crypto library is available.", "id": "f2472:m14"} {"signature": "@_helpers.positional()def step1_get_device_and_user_codes(self, http=None):", "body": "if self.device_uri is None:raise ValueError('')body = urllib.parse.urlencode({'': self.client_id,'': self.scope,})headers = {'': '',}if self.user_agent is not None:headers[''] = self.user_agentif http is None:http = transport.get_http_object()resp, content = transport.request(http, self.device_uri, method='', body=body, headers=headers)content = _helpers._from_bytes(content)if resp.status == http_client.OK:try:flow_info = json.loads(content)except ValueError as exc:raise OAuth2DeviceCodeError(''''.format(content, exc))return DeviceFlowInfo.FromResponse(flow_info)else:error_msg = ''.format(resp.status)try:error_dict = json.loads(content)if '' in error_dict:error_msg += ''.format(error_dict[''])except ValueError:passraise OAuth2DeviceCodeError(error_msg)", "docstring": "Returns a user code and the verification URL where to enter it\n\n Returns:\n A user code as a string for the user to authorize the application\n An URL as a string where the user has to enter the code", "id": "f2472:c21:m2"} {"signature": "def wrap_http_for_auth(credentials, http):", "body": "orig_request_method = http.requestdef new_request(uri, method='', body=None, headers=None,redirections=httplib2.DEFAULT_MAX_REDIRECTS,connection_type=None):if not credentials.access_token:_LOGGER.info('''')credentials._refresh(orig_request_method)headers = _initialize_headers(headers)credentials.apply(headers)_apply_user_agent(headers, credentials.user_agent)body_stream_position = Noneif all(getattr(body, stream_prop, None) for stream_prop in_STREAM_PROPERTIES):body_stream_position = body.tell()resp, content = request(orig_request_method, uri, method, body,clean_headers(headers),redirections, connection_type)max_refresh_attempts = for refresh_attempt in range(max_refresh_attempts):if resp.status not in REFRESH_STATUS_CODES:break_LOGGER.info('',resp.status, refresh_attempt + ,max_refresh_attempts)credentials._refresh(orig_request_method)credentials.apply(headers)if body_stream_position is not None:body.seek(body_stream_position)resp, content = request(orig_request_method, uri, method, body,clean_headers(headers),redirections, connection_type)return resp, contenthttp.request = new_requesthttp.request.credentials = credentials", "docstring": "Prepares an HTTP object's request method for auth.\n\n Wraps HTTP requests with logic to catch auth failures (typically\n identified via a 401 status code). In the event of failure, tries\n to refresh the token used and then retry the original request.\n\n Args:\n credentials: Credentials, the credentials used to identify\n the authenticated user.\n http: httplib2.Http, an http object to be used to make\n auth requests.", "id": "f2474:m5"} {"signature": "def get_http_object(*args, **kwargs):", "body": "return httplib2.Http(*args, **kwargs)", "docstring": "Return a new HTTP object.\n\n Args:\n *args: tuple, The positional arguments to be passed when\n contructing a new HTTP object.\n **kwargs: dict, The keyword arguments to be passed when\n contructing a new HTTP object.\n\n Returns:\n httplib2.Http, an HTTP object.", "id": "f2474:m1"} {"signature": "def request(http, uri, method='', body=None, headers=None,redirections=httplib2.DEFAULT_MAX_REDIRECTS,connection_type=None):", "body": "http_callable = getattr(http, '', http)return http_callable(uri, method=method, body=body, headers=headers,redirections=redirections,connection_type=connection_type)", "docstring": "Make an HTTP request with an HTTP object and arguments.\n\n Args:\n http: httplib2.Http, an http object to be used to make requests.\n uri: string, The URI to be requested.\n method: string, The HTTP method to use for the request. Defaults\n to 'GET'.\n body: string, The payload / body in HTTP request. By default\n there is no payload.\n headers: dict, Key-value pairs of request headers. By default\n there are no headers.\n redirections: int, The number of allowed 203 redirects for\n the request. Defaults to 5.\n connection_type: httplib.HTTPConnection, a subclass to be used for\n establishing connection. If not set, the type\n will be determined from the ``uri``.\n\n Returns:\n tuple, a pair of a httplib2.Response with the status code and other\n headers and the bytes of the content returned.", "id": "f2474:m7"} {"signature": "def clean_headers(headers):", "body": "clean = {}try:for k, v in six.iteritems(headers):if not isinstance(k, six.binary_type):k = str(k)if not isinstance(v, six.binary_type):v = str(v)clean[_helpers._to_bytes(k)] = _helpers._to_bytes(v)except UnicodeEncodeError:from oauth2client.client import NonAsciiHeaderErrorraise NonAsciiHeaderError(k, '', v)return clean", "docstring": "Forces header keys and values to be strings, i.e not unicode.\n\n The httplib module just concats the header keys and values in a way that\n may make the message header a unicode string, which, if it then tries to\n contatenate to a binary request body may result in a unicode decode error.\n\n Args:\n headers: dict, A dictionary of headers.\n\n Returns:\n The same dictionary but with all the keys converted to strings.", "id": "f2474:m4"} {"signature": "def wrap_http_for_jwt_access(credentials, http):", "body": "orig_request_method = http.requestwrap_http_for_auth(credentials, http)authenticated_request_method = http.requestdef new_request(uri, method='', body=None, headers=None,redirections=httplib2.DEFAULT_MAX_REDIRECTS,connection_type=None):if '' in credentials._kwargs:if (credentials.access_token is None orcredentials.access_token_expired):credentials.refresh(None)return request(authenticated_request_method, uri,method, body, headers, redirections,connection_type)else:headers = _initialize_headers(headers)_apply_user_agent(headers, credentials.user_agent)uri_root = uri.split('', )[]token, unused_expiry = credentials._create_token({'': uri_root})headers[''] = '' + tokenreturn request(orig_request_method, uri, method, body,clean_headers(headers),redirections, connection_type)http.request = new_requesthttp.request.credentials = credentials", "docstring": "Prepares an HTTP object's request method for JWT access.\n\n Wraps HTTP requests with logic to catch auth failures (typically\n identified via a 401 status code). In the event of failure, tries\n to refresh the token used and then retry the original request.\n\n Args:\n credentials: _JWTAccessCredentials, the credentials used to identify\n a service account that uses JWT access tokens.\n http: httplib2.Http, an http object to be used to make\n auth requests.", "id": "f2474:m6"} {"signature": "def _apply_user_agent(headers, user_agent):", "body": "if user_agent is not None:if '' in headers:headers[''] = (user_agent + '' + headers[''])else:headers[''] = user_agentreturn headers", "docstring": "Adds a user-agent to the headers.\n\n Args:\n headers: dict, request headers to add / modify user\n agent within.\n user_agent: str, the user agent to add.\n\n Returns:\n dict, the original headers passed in, but modified if the\n user agent is not None.", "id": "f2474:m3"} {"signature": "def _create_file_if_needed(self):", "body": "if not os.path.exists(self._filename):old_umask = os.umask()try:open(self._filename, '').close()finally:os.umask(old_umask)", "docstring": "Create an empty file if necessary.\n\n This method will not initialize the file. Instead it implements a\n simple version of \"touch\" to ensure the file has been created.", "id": "f2475:c0:m2"} {"signature": "def locked_delete(self):", "body": "os.unlink(self._filename)", "docstring": "Delete Credentials file.\n\n Args:\n credentials: Credentials, the credentials to store.", "id": "f2475:c0:m4"} {"signature": "def do_GET(self):", "body": "self.send_response(http_client.OK)self.send_header('', '')self.end_headers()parts = urllib.parse.urlparse(self.path)query = _helpers.parse_unique_urlencoded(parts.query)self.server.query_params = queryself.wfile.write(b'')self.wfile.write(b'')self.wfile.write(b'')", "docstring": "Handle a GET request.\n\n Parses the query parameters and prints a message\n if the flow has completed. Note that we can't detect\n if an error occurred.", "id": "f2476:c1:m0"} {"signature": "@_helpers.positional()def run_flow(flow, storage, flags=None, http=None):", "body": "if flags is None:flags = argparser.parse_args()logging.getLogger().setLevel(getattr(logging, flags.logging_level))if not flags.noauth_local_webserver:success = Falseport_number = for port in flags.auth_host_port:port_number = porttry:httpd = ClientRedirectServer((flags.auth_host_name, port),ClientRedirectHandler)except socket.error:passelse:success = Truebreakflags.noauth_local_webserver = not successif not success:print(_FAILED_START_MESSAGE)if not flags.noauth_local_webserver:oauth_callback = ''.format(host=flags.auth_host_name, port=port_number)else:oauth_callback = client.OOB_CALLBACK_URNflow.redirect_uri = oauth_callbackauthorize_url = flow.step1_get_authorize_url()if not flags.noauth_local_webserver:import webbrowserwebbrowser.open(authorize_url, new=, autoraise=True)print(_BROWSER_OPENED_MESSAGE.format(address=authorize_url))else:print(_GO_TO_LINK_MESSAGE.format(address=authorize_url))code = Noneif not flags.noauth_local_webserver:httpd.handle_request()if '' in httpd.query_params:sys.exit('')if '' in httpd.query_params:code = httpd.query_params['']else:print('''')sys.exit('')else:code = input('').strip()try:credential = flow.step2_exchange(code, http=http)except client.FlowExchangeError as e:sys.exit(''.format(e))storage.put(credential)credential.set_store(storage)print('')return credential", "docstring": "Core code for a command-line application.\n\n The ``run()`` function is called from your application and runs\n through all the steps to obtain credentials. It takes a ``Flow``\n argument and attempts to open an authorization server page in the\n user's default web browser. The server asks the user to grant your\n application access to the user's data. If the user grants access,\n the ``run()`` function returns new credentials. The new credentials\n are also stored in the ``storage`` argument, which updates the file\n associated with the ``Storage`` object.\n\n It presumes it is run from a command-line application and supports the\n following flags:\n\n ``--auth_host_name`` (string, default: ``localhost``)\n Host name to use when running a local web server to handle\n redirects during OAuth authorization.\n\n ``--auth_host_port`` (integer, default: ``[8080, 8090]``)\n Port to use when running a local web server to handle redirects\n during OAuth authorization. Repeat this option to specify a list\n of values.\n\n ``--[no]auth_local_webserver`` (boolean, default: ``True``)\n Run a local web server to handle redirects during OAuth\n authorization.\n\n The tools module defines an ``ArgumentParser`` the already contains the\n flag definitions that ``run()`` requires. You can pass that\n ``ArgumentParser`` to your ``ArgumentParser`` constructor::\n\n parser = argparse.ArgumentParser(\n description=__doc__,\n formatter_class=argparse.RawDescriptionHelpFormatter,\n parents=[tools.argparser])\n flags = parser.parse_args(argv)\n\n Args:\n flow: Flow, an OAuth 2.0 Flow to step through.\n storage: Storage, a ``Storage`` to store the credential in.\n flags: ``argparse.Namespace``, (Optional) The command-line flags. This\n is the object returned from calling ``parse_args()`` on\n ``argparse.ArgumentParser`` as described above. Defaults\n to ``argparser.parse_args()``.\n http: An instance of ``httplib2.Http.request`` or something that\n acts like it.\n\n Returns:\n Credentials, the obtained credential.", "id": "f2476:m1"} {"signature": "def revoke(self, http):", "body": "pass", "docstring": "Cannot revoke JWTAccessCredentials tokens.", "id": "f2477:c1:m3"} {"signature": "@classmethoddef _from_p12_keyfile_contents(cls, service_account_email,private_key_pkcs12,private_key_password=None, scopes='',token_uri=oauth2client.GOOGLE_TOKEN_URI,revoke_uri=oauth2client.GOOGLE_REVOKE_URI):", "body": "if private_key_password is None:private_key_password = _PASSWORD_DEFAULTif crypt.Signer is not crypt.OpenSSLSigner:raise NotImplementedError(_PKCS12_ERROR)signer = crypt.Signer.from_string(private_key_pkcs12,private_key_password)credentials = cls(service_account_email, signer, scopes=scopes,token_uri=token_uri, revoke_uri=revoke_uri)credentials._private_key_pkcs12 = private_key_pkcs12credentials._private_key_password = private_key_passwordreturn credentials", "docstring": "Factory constructor from JSON keyfile.\n\n Args:\n service_account_email: string, The email associated with the\n service account.\n private_key_pkcs12: string, The contents of a PKCS#12 keyfile.\n private_key_password: string, (Optional) Password for PKCS#12\n private key. Defaults to ``notasecret``.\n scopes: List or string, (Optional) Scopes to use when acquiring an\n access token.\n token_uri: string, URI for token endpoint. For convenience defaults\n to Google's endpoints but any OAuth 2.0 provider can be\n used.\n revoke_uri: string, URI for revoke endpoint. For convenience\n defaults to Google's endpoints but any OAuth 2.0\n provider can be used.\n\n Returns:\n ServiceAccountCredentials, a credentials object created from\n the keyfile.\n\n Raises:\n NotImplementedError if pyOpenSSL is not installed / not the\n active crypto library.", "id": "f2477:c0:m5"} {"signature": "def get_access_token(self, http=None, additional_claims=None):", "body": "if additional_claims is None:if self.access_token is None or self.access_token_expired:self.refresh(None)return client.AccessTokenInfo(access_token=self.access_token, expires_in=self._expires_in())else:token, unused_expiry = self._create_token(additional_claims)return client.AccessTokenInfo(access_token=token, expires_in=self._MAX_TOKEN_LIFETIME_SECS)", "docstring": "Create a signed jwt.\n\n Args:\n http: unused\n additional_claims: dict, additional claims to add to\n the payload of the JWT.\n Returns:\n An AccessTokenInfo with the signed jwt", "id": "f2477:c1:m2"} {"signature": "@classmethoddef from_p12_keyfile(cls, service_account_email, filename,private_key_password=None, scopes='',token_uri=oauth2client.GOOGLE_TOKEN_URI,revoke_uri=oauth2client.GOOGLE_REVOKE_URI):", "body": "with open(filename, '') as file_obj:private_key_pkcs12 = file_obj.read()return cls._from_p12_keyfile_contents(service_account_email, private_key_pkcs12,private_key_password=private_key_password, scopes=scopes,token_uri=token_uri, revoke_uri=revoke_uri)", "docstring": "Factory constructor from JSON keyfile.\n\n Args:\n service_account_email: string, The email associated with the\n service account.\n filename: string, The location of the PKCS#12 keyfile.\n private_key_password: string, (Optional) Password for PKCS#12\n private key. Defaults to ``notasecret``.\n scopes: List or string, (Optional) Scopes to use when acquiring an\n access token.\n token_uri: string, URI for token endpoint. For convenience defaults\n to Google's endpoints but any OAuth 2.0 provider can be\n used.\n revoke_uri: string, URI for revoke endpoint. For convenience\n defaults to Google's endpoints but any OAuth 2.0\n provider can be used.\n\n Returns:\n ServiceAccountCredentials, a credentials object created from\n the keyfile.\n\n Raises:\n NotImplementedError if pyOpenSSL is not installed / not the\n active crypto library.", "id": "f2477:c0:m6"} {"signature": "@classmethoddef _from_parsed_json_keyfile(cls, keyfile_dict, scopes,token_uri=None, revoke_uri=None):", "body": "creds_type = keyfile_dict.get('')if creds_type != client.SERVICE_ACCOUNT:raise ValueError('', creds_type,'', client.SERVICE_ACCOUNT)service_account_email = keyfile_dict['']private_key_pkcs8_pem = keyfile_dict['']private_key_id = keyfile_dict['']client_id = keyfile_dict['']if not token_uri:token_uri = keyfile_dict.get('',oauth2client.GOOGLE_TOKEN_URI)if not revoke_uri:revoke_uri = keyfile_dict.get('',oauth2client.GOOGLE_REVOKE_URI)signer = crypt.Signer.from_string(private_key_pkcs8_pem)credentials = cls(service_account_email, signer, scopes=scopes,private_key_id=private_key_id,client_id=client_id, token_uri=token_uri,revoke_uri=revoke_uri)credentials._private_key_pkcs8_pem = private_key_pkcs8_pemreturn credentials", "docstring": "Helper for factory constructors from JSON keyfile.\n\n Args:\n keyfile_dict: dict-like object, The parsed dictionary-like object\n containing the contents of the JSON keyfile.\n scopes: List or string, Scopes to use when acquiring an\n access token.\n token_uri: string, URI for OAuth 2.0 provider token endpoint.\n If unset and not present in keyfile_dict, defaults\n to Google's endpoints.\n revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.\n If unset and not present in keyfile_dict, defaults\n to Google's endpoints.\n\n Returns:\n ServiceAccountCredentials, a credentials object created from\n the keyfile contents.\n\n Raises:\n ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.\n KeyError, if one of the expected keys is not present in\n the keyfile.", "id": "f2477:c0:m2"} {"signature": "@classmethoddef from_json_keyfile_dict(cls, keyfile_dict, scopes='',token_uri=None, revoke_uri=None):", "body": "return cls._from_parsed_json_keyfile(keyfile_dict, scopes,token_uri=token_uri,revoke_uri=revoke_uri)", "docstring": "Factory constructor from parsed JSON keyfile.\n\n Args:\n keyfile_dict: dict-like object, The parsed dictionary-like object\n containing the contents of the JSON keyfile.\n scopes: List or string, (Optional) Scopes to use when acquiring an\n access token.\n token_uri: string, URI for OAuth 2.0 provider token endpoint.\n If unset and not present in keyfile_dict, defaults\n to Google's endpoints.\n revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.\n If unset and not present in keyfile_dict, defaults\n to Google's endpoints.\n\n Returns:\n ServiceAccountCredentials, a credentials object created from\n the keyfile.\n\n Raises:\n ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.\n KeyError, if one of the expected keys is not present in\n the keyfile.", "id": "f2477:c0:m4"} {"signature": "@classmethoddef from_p12_keyfile_buffer(cls, service_account_email, file_buffer,private_key_password=None, scopes='',token_uri=oauth2client.GOOGLE_TOKEN_URI,revoke_uri=oauth2client.GOOGLE_REVOKE_URI):", "body": "private_key_pkcs12 = file_buffer.read()return cls._from_p12_keyfile_contents(service_account_email, private_key_pkcs12,private_key_password=private_key_password, scopes=scopes,token_uri=token_uri, revoke_uri=revoke_uri)", "docstring": "Factory constructor from JSON keyfile.\n\n Args:\n service_account_email: string, The email associated with the\n service account.\n file_buffer: stream, A buffer that implements ``read()``\n and contains the PKCS#12 key contents.\n private_key_password: string, (Optional) Password for PKCS#12\n private key. Defaults to ``notasecret``.\n scopes: List or string, (Optional) Scopes to use when acquiring an\n access token.\n token_uri: string, URI for token endpoint. For convenience defaults\n to Google's endpoints but any OAuth 2.0 provider can be\n used.\n revoke_uri: string, URI for revoke endpoint. For convenience\n defaults to Google's endpoints but any OAuth 2.0\n provider can be used.\n\n Returns:\n ServiceAccountCredentials, a credentials object created from\n the keyfile.\n\n Raises:\n NotImplementedError if pyOpenSSL is not installed / not the\n active crypto library.", "id": "f2477:c0:m7"} {"signature": "def create_delegated(self, sub):", "body": "return self.create_with_claims({'': sub})", "docstring": "Create credentials that act as domain-wide delegation of authority.\n\n Use the ``sub`` parameter as the subject to delegate on behalf of\n that user.\n\n For example::\n\n >>> account_sub = 'foo@email.com'\n >>> delegate_creds = creds.create_delegated(account_sub)\n\n Args:\n sub: string, An email address that this service account will\n act on behalf of (via domain-wide delegation).\n\n Returns:\n ServiceAccountCredentials, a copy of the current service account\n updated to act on behalf of ``sub``.", "id": "f2477:c0:m16"} {"signature": "def discover_modules(self):", "body": "modules = [self.package_name]for dirpath, dirnames, filenames in os.walk(self.root_path):root_uri = self._path2uri(os.path.join(self.root_path,dirpath))filenames = [f[:-] for f in filenames iff.endswith('') and not f.startswith('')]for filename in filenames:package_uri = ''.join((dirpath, filename))for subpkg_name in dirnames + filenames:package_uri = ''.join((root_uri, subpkg_name))package_path = self._uri2path(package_uri)if (package_path andself._survives_exclude(package_uri, '')):modules.append(package_uri)return sorted(modules)", "docstring": "Return module sequence discovered from ``self.package_name``\n\n\n Parameters\n ----------\n None\n\n Returns\n -------\n mods : sequence\n Sequence of module names within ``self.package_name``\n\n Examples\n --------\n >>> dw = ApiDocWriter('sphinx')\n >>> mods = dw.discover_modules()\n >>> 'sphinx.util' in mods\n True\n >>> dw.package_skip_patterns.append('\\.util$')\n >>> 'sphinx.util' in dw.discover_modules()\n False\n >>>", "id": "f2494:c0:m12"} {"signature": "def _get_object_name(self, line):", "body": "name = line.split()[].split('')[].strip()return name.rstrip('')", "docstring": "Get second token in line\n >>> docwriter = ApiDocWriter('sphinx')\n >>> docwriter._get_object_name(\" def func(): \")\n 'func'\n >>> docwriter._get_object_name(\" class Klass(object): \")\n 'Klass'\n >>> docwriter._get_object_name(\" class Klass: \")\n 'Klass'", "id": "f2494:c0:m4"} {"signature": "def _parse_lines(self, linesource):", "body": "functions = []classes = []for line in linesource:if line.startswith('') and line.count(''):name = self._get_object_name(line)if not name.startswith(''):functions.append(name)elif line.startswith(''):name = self._get_object_name(line)if not name.startswith(''):classes.append(name)else:passfunctions.sort()classes.sort()return functions, classes", "docstring": "Parse lines of text for functions and classes", "id": "f2494:c0:m9"} {"signature": "def write_index(self, outdir, froot='', relative_to=None):", "body": "if self.written_modules is None:raise ValueError('')path = os.path.join(outdir, froot+self.rst_extension)if relative_to is not None:relpath = (outdir + os.path.sep).replace(relative_to + os.path.sep, '')else:relpath = outdiridx = open(path,'')w = idx.writew('')title = \"\"w(title + \"\")w(\"\" * len(title) + \"\")w('')for f in self.written_modules:w('' % os.path.join(relpath,f))idx.close()", "docstring": "Make a reST API index file from written files\n\n Parameters\n ----------\n path : string\n Filename to write index to\n outdir : string\n Directory to which to write generated index file\n froot : string, optional\n root (filename without extension) of filename to write to\n Defaults to 'gen'. We add ``self.rst_extension``.\n relative_to : string\n path to which written filenames are relative. This\n component of the written file path will be removed from\n outdir, in the generated index. Default is None, meaning,\n leave path as it is.", "id": "f2494:c0:m15"} {"signature": "def _import(self, name):", "body": "mod = __import__(name)components = name.split('')for comp in components[:]:mod = getattr(mod, comp)return mod", "docstring": "Import namespace package", "id": "f2494:c0:m3"} {"signature": "def set_package_name(self, package_name):", "body": "self._package_name = package_nameroot_module = self._import(package_name)self.root_path = root_module.__path__[-]self.written_modules = None", "docstring": "Set package_name\n\n >>> docwriter = ApiDocWriter('sphinx')\n >>> import sphinx\n >>> docwriter.root_path == sphinx.__path__[0]\n True\n >>> docwriter.package_name = 'docutils'\n >>> import docutils\n >>> docwriter.root_path == docutils.__path__[0]\n True", "id": "f2494:c0:m2"} {"signature": "def _bias_correction(V_IJ, inbag, pred_centered, n_trees):", "body": "n_train_samples = inbag.shape[]n_var = np.mean(np.square(inbag[:n_trees]).mean(axis=).T.view() -np.square(inbag[:n_trees].mean(axis=)).T.view())boot_var = np.square(pred_centered).sum(axis=) / n_treesbias_correction = n_train_samples * n_var * boot_var / n_treesV_IJ_unbiased = V_IJ - bias_correctionreturn V_IJ_unbiased", "docstring": "Helper functions that implements bias correction\n\nParameters\n----------\nV_IJ : ndarray\n Intermediate result in the computation.\n\ninbag : ndarray\n The inbag matrix that fit the data. If set to `None` (default) it\n will be inferred from the forest. However, this only works for trees\n for which bootstrapping was set to `True`. That is, if sampling was\n done with replacement. Otherwise, users need to provide their own\n inbag matrix.\n\npred_centered : ndarray\n Centered predictions that are an intermediate result in the\n computation.\n\nn_trees : int\n The number of trees in the forest object.", "id": "f2501:m2"} {"signature": "def calc_inbag(n_samples, forest):", "body": "if not forest.bootstrap:e_s = \"\"e_s = \"\"raise ValueError(e_s)n_trees = forest.n_estimatorsinbag = np.zeros((n_samples, n_trees))sample_idx = []for t_idx in range(n_trees):sample_idx.append(_generate_sample_indices(forest.estimators_[t_idx].random_state,n_samples))inbag[:, t_idx] = np.bincount(sample_idx[-], minlength=n_samples)return inbag", "docstring": "Derive samples used to create trees in scikit-learn RandomForest objects.\n\nRecovers the samples in each tree from the random state of that tree using\n:func:`forest._generate_sample_indices`.\n\nParameters\n----------\nn_samples : int\n The number of samples used to fit the scikit-learn RandomForest object.\n\nforest : RandomForest\n Regressor or Classifier object that is already fit by scikit-learn.\n\nReturns\n-------\nArray that records how many times a data point was placed in a tree.\nColumns are individual trees. Rows are the number of times a sample was\nused in a tree.", "id": "f2501:m0"} {"signature": "def gbayes(x0, g_est, sigma):", "body": "Kx = norm().pdf((g_est[] - x0) / sigma)post = Kx * g_est[]post /= sum(post)return sum(post * g_est[])", "docstring": "Estimate Bayes posterior with Gaussian noise [Efron2014]_.\n\nParameters\n----------\nx0: ndarray\n an observation\ng_est: float\n a prior density, as returned by gfit\nsigma: int\n noise estimate\n\nReturns\n-------\nAn array of the posterior estimate E[mu | x0]", "id": "f2502:m1"} {"signature": "def gfit(X, sigma, p=, nbin=, unif_fraction=):", "body": "min_x = min(min(X) - * np.std(X, ddof=), )max_x = max(max(X) + * np.std(X, ddof=),np.std(X, ddof=))xvals = np.linspace(min_x, max_x, nbin)binw = (max_x - min_x) / (nbin - )zero_idx = max(np.where(xvals <= )[])noise_kernel = norm().pdf(xvals / sigma) * binw / sigmaif zero_idx > :noise_rotate = noise_kernel[list(np.arange(zero_idx, len(xvals))) +list(np.arange(, zero_idx))]else:noise_rotate = noise_kernelXX = np.zeros((p, len(xvals)), dtype=np.float)for ind, exp in enumerate(range(, p+)):mask = np.ones_like(xvals)mask[np.where(xvals <= )[]] = XX[ind, :] = pow(xvals, exp) * maskXX = XX.Tdef neg_loglik(eta):mask = np.ones_like(xvals)mask[np.where(xvals <= )[]] = g_eta_raw = np.exp(np.dot(XX, eta)) * maskif ((np.sum(g_eta_raw) == np.inf) |(np.sum(g_eta_raw) <= * np.finfo(np.double).tiny)):return ( * (len(X) + sum(eta ** )))g_eta_main = g_eta_raw / sum(g_eta_raw)g_eta = (( - unif_fraction) * g_eta_main +unif_fraction * mask / sum(mask))f_eta = fftconvolve(g_eta, noise_rotate, mode='')return np.sum(np.interp(X, xvals,-np.log(np.maximum(f_eta, ))))eta_hat = minimize(neg_loglik,list(itertools.repeat(-, p))).xg_eta_raw = np.exp(np.dot(XX, eta_hat)) * maskg_eta_main = g_eta_raw / sum(g_eta_raw)g_eta = (( - unif_fraction) * g_eta_main +unif_fraction * mask) / sum(mask)return xvals, g_eta", "docstring": "Fit empirical Bayes prior in the hierarchical model [Efron2014]_.\n\n.. math::\n\n mu ~ G, X ~ N(mu, sigma^2)\n\nParameters\n----------\nX: ndarray\n A 1D array of observations.\nsigma: float\n Noise estimate on X.\np: int\n Number of parameters used to fit G. Default: 5\nnbin: int\n Number of bins used for discrete approximation.\n Default: 200\nunif_fraction: float\n Fraction of G modeled as \"slab\". Default: 0.1\n\nReturns\n-------\nAn array of the posterior density estimate g.", "id": "f2502:m0"} {"signature": "def calibrateEB(variances, sigma2):", "body": "if (sigma2 <= or min(variances) == max(variances)):return(np.maximum(variances, ))sigma = np.sqrt(sigma2)eb_prior = gfit(variances, sigma)part = functools.partial(gbayes, g_est=eb_prior,sigma=sigma)if len(variances) >= :calib_x = np.percentile(variances,np.arange(, , ))calib_y = list(map(part, calib_x))calib_all = np.interp(variances, calib_x, calib_y)else:calib_all = list(map(part, variances))return np.asarray(calib_all)", "docstring": "Calibrate noisy variance estimates with empirical Bayes.\n\nParameters\n----------\nvars: ndarray\n List of variance estimates.\nsigma2: int\n Estimate of the Monte Carlo noise in vars.\n\nReturns\n-------\nAn array of the calibrated variance estimates", "id": "f2502:m2"} {"signature": "def dcite(self, *args, **kwargs):", "body": "def nondecorating_decorator(func):return funcreturn nondecorating_decorator", "docstring": "If I could cite I would", "id": "f2504:c0:m1"} {"signature": "def repr_failure(self, excinfo):", "body": "return \"\".format(excinfo,self.indent(self.code),excinfo.getrepr(funcargs=True, style=''))", "docstring": "called when self.runtest() raises an exception.", "id": "f2522:c1:m4"} {"signature": "def backend_extras(*requirements):", "body": "return [\"\"] + list(requirements)", "docstring": "Construct list of requirements for backend integration.\n\n All built-in backends depend on PyOpenGL so add it as default requirement.", "id": "f2536:m3"} {"signature": "def __init__(self, shape, cols=[]):", "body": "self._arrays = {}self._names = []self.shape = shapefor colname, value in cols:self.set(colname, value)self._dtype = None", "docstring": "cols is a list of (colname, values), shape has to be 1D.", "id": "f2538:c1:m0"} {"signature": "def read(self, n):", "body": "while len(self.pool) < n:self.cur = self.files.next()self.pool = numpy.append(self.pool,self.fetch(self.cur), axis=)rt = self.pool[:n]if n == len(self.pool):self.pool = self.fetch(None)else:self.pool = self.pool[n:]return rt", "docstring": "return at most n array items, move the cursor.", "id": "f2540:c0:m2"} {"signature": "def flatten_dtype(dtype, _next=None):", "body": "types = []if _next is None: _next = [, '']primary = Trueelse:primary = Falseprefix = _next[]if dtype.names is None:for i in numpy.ndindex(dtype.shape):if dtype.base == dtype:types.append(('' % (prefix, simplerepr(i)), dtype))_next[] += else:_next[] = '' % (prefix, simplerepr(i))types.extend(flatten_dtype(dtype.base, _next))else:for field in dtype.names:typ_fields = dtype.fields[field]if len(prefix) > :_next[] = prefix + '' + fieldelse:_next[] = '' + fieldflat_dt = flatten_dtype(typ_fields[], _next)types.extend(flat_dt)_next[] = prefixif primary:return numpy.dtype(types)else:return types", "docstring": "Unpack a structured data-type.", "id": "f2541:m8"} {"signature": "def __init__(self, func, ins=None, outdtype=None, altreduce=None):", "body": "if isinstance(func, numpy.ufunc):self.ufunc = funcself.nin = func.ninself.ins = (, , , )[:func.nin]self.nout = func.noutself.outdtype = Noneself.altreduce = Noneelse:self.ufunc = funcself.nin = len(ins)self.ins = insself.nout = self.outdtype = outdtypeself.altreduce = altreduceself.__doc__ = func.__doc__if self.nout != :raise TypeError(\"\")", "docstring": "if func is not ufunc, a bit complicated:\n ins tells which positional argument will be striped\n after done, reducefunc is called on the results", "id": "f2542:c0:m0"} {"signature": "def __new__(cls, array, start=None, end=None):", "body": "self = array.view(type=cls)if end is None and start is None:start = numpy.array([len(arr) for arr in array], dtype='')array = numpy.concatenate(array)if end is None:sizes = startself.start = numpy.zeros(shape=len(sizes), dtype='')self.end = numpy.zeros(shape=len(sizes), dtype='')self.end[:] = sizes.cumsum()self.start[:] = self.end[:-]else:self.start = startself.end = endself.A = arrayreturn self", "docstring": "if end is none, start contains the sizes. \n if start is also none, array is a list of arrays to concatenate", "id": "f2542:c1:m0"} {"signature": "def total_memory():", "body": "with file('', '') as f:for line in f:words = line.split()if words[].upper() == '':return int(words[]) * raise IOError('')", "docstring": "Returns the the amount of memory available for use.\n\n The memory is obtained from MemTotal entry in /proc/meminfo.\n\n Notes\n =====\n This function is not very useful and not very portable.", "id": "f2546:m2"} {"signature": "def get(self, Q):", "body": "while self.Errors.empty():try:return Q.get(timeout=)except queue.Empty:if not self.is_alive():try:return Q.get(timeout=)except queue.Empty:raise StopProcessGroupelse:continueelse:raise StopProcessGroup", "docstring": "Protected get. Get an item from Q.\n Will block. but if the process group has errors,\n raise an StopProcessGroup exception.\n\n A slave process will terminate upon StopProcessGroup.\n The master process shall read the error from the process group.", "id": "f2546:c3:m8"} {"signature": "def copy(a):", "body": "shared = anonymousmemmap(a.shape, dtype=a.dtype)shared[:] = a[:]return shared", "docstring": "Copy an array to the shared memory. \n\n Notes\n -----\n copy is not always necessary because the private memory is always copy-on-write.\n\n Use :code:`a = copy(a)` to immediately dereference the old 'a' on private memory", "id": "f2546:m9"} {"signature": "def full_like(array, value, dtype=None):", "body": "shared = empty_like(array, dtype)shared[:] = valuereturn shared", "docstring": "Create a shared memory array with the same shape and type as a given array, filled with `value`.", "id": "f2546:m7"} {"signature": "def empty_like(array, dtype=None):", "body": "array = numpy.asarray(array)if dtype is None: dtype = array.dtypereturn anonymousmemmap(array.shape, dtype)", "docstring": "Create a shared memory array from the shape of array.", "id": "f2546:m5"} {"signature": "def start(self):", "body": "self.thread = Thread(target=self.main)self.thread.daemon = Trueself.thread.start()", "docstring": "master only", "id": "f2547:c3:m3"} {"signature": "def slaveraise(self, type, error, traceback):", "body": "message = '' * + pickle.dumps((type,''.join(tb.format_exception(type, error, traceback))))if self.pipe is not None:self.pipe.put(message)", "docstring": "slave only", "id": "f2547:c3:m5"} {"signature": "def abort(self):", "body": "self.mutex.release()self.turnstile.release()self.mutex.release()self.turnstile2.release()", "docstring": "ensure the master exit from Barrier", "id": "f2547:c5:m1"} {"signature": "def haserror(self):", "body": "return self.message is not None", "docstring": "master only", "id": "f2547:c3:m2"} {"signature": "def kill_all(self):", "body": "for pid in self.children:try:os.kill(pid, signal.SIGTRAP)except OSError:continueself.join()", "docstring": "kill all slaves and reap the monitor", "id": "f2547:c2:m3"} {"signature": "@staticmethoddef _get_classes(package_name, base_class):", "body": "classes = {}base_dir = os.getcwd()root_module_name = base_dir.split('')[-]package_dir = base_dir + '' % package_nameif os.path.isdir(package_dir):for module_path in os.listdir(package_dir):if not module_path.endswith(''):continuemodule_name = os.path.splitext(module_path)[]module_full_name = '' % (root_module_name, package_name, module_name)__import__(module_full_name)work_module = sys.modules[module_full_name]for module_item in work_module.__dict__.values():if type(module_item) is typeand issubclass(module_item, base_class)and module_item is not base_classand hasattr(module_item, '') and module_item.name:classes.setdefault(module_item.name, []).append(module_item)for work_name, work_modules in classes.items():if len(work_modules) > :raise DuplicatedNameException('' % (''.join(map(str, work_modules)),work_name))return tuple([(work_name, work_modules[]) for work_name, work_modules in classes.items()])", "docstring": "search monits or works classes. Class must have 'name' attribute\n:param package_name: 'monits' or 'works'\n:param base_class: Monit or Work\n:return: tuple of tuples monit/work-name and class", "id": "f2558:c4:m3"} {"signature": "def format_time_point(time_point_string):", "body": "time_point = dateutil.parser.parse(time_point_string)if not is_aware(time_point):time_point = make_aware(time_point)time_point = local_time_point(time_point)return time_point.strftime(\"\")", "docstring": ":param str time_point_string: String representation of a time point\n to format\n:return: Formatted time point\n:rtype: str\n:raises ValueError: If *time_point_string* is not formatted by\n dateutil.parser.parse\n\nSee :py:meth:`datetime.datetime.isoformat` function for supported formats.", "id": "f2570:m1"} {"signature": "def format_pathname(pathname,max_length):", "body": "if max_length <= :raise ValueError(\"\")if len(pathname) > max_length:pathname = \"\".format(pathname[-(max_length-):])return pathname", "docstring": "Format a pathname\n\n:param str pathname: Pathname to format\n:param int max_length: Maximum length of result pathname (> 3)\n:return: Formatted pathname\n:rtype: str\n:raises ValueError: If *max_length* is not larger than 3\n\nThis function formats a pathname so it is not longer than *max_length*\ncharacters. The resulting pathname is returned. It does so by replacing\ncharacters at the start of the *pathname* with three dots, if necessary.\nThe idea is that the end of the *pathname* is the most important part\nto be able to identify the file.", "id": "f2570:m0"} {"signature": "def register(app):", "body": "error_code.register(app)", "docstring": "Register all available error handlers\n\n:param flask.Flask app: Application instance", "id": "f2573:m0"} {"signature": "def response(code,description):", "body": "payload = jsonify({\"\": code,\"\": description})return payload, code", "docstring": "Format a response\n\n:param int code: HTTP error code\n:param str description: Error message\n:return: Tuple of a wrapped JSON snippet and the error code\n:rtype: Tuple of :py:class:`flask.Response` containing a JSON snippet,\n and the error code\n\nThe JSON snippet is formatted like this:\n\n.. code-block:: json\n\n {\n \"status_code\": 404,\n \"message\": \"The requested URL was not found on the server\"\n }", "id": "f2574:m0"} {"signature": "def consume_message(method):", "body": "def wrapper(self,channel,method_frame,header_frame,body):sys.stdout.write(\"\".format(body))sys.stdout.flush()try:body = body.decode(\"\")data = json.loads(body)method(self, data)except Exception as exception:sys.stderr.write(\"\".format(traceback.format_exc()))sys.stderr.flush()channel.basic_ack(delivery_tag=method_frame.delivery_tag)return wrapper", "docstring": "Decorator for methods handling requests from RabbitMQ\n\nThe goal of this decorator is to perform the tasks common to all\nmethods handling requests:\n\n- Log the raw message to *stdout*\n- Decode the message into a Python dictionary\n- Log errors to *stderr*\n- Signal the broker that we're done handling the request\n\nThe method passed in will be called with the message body as a\ndictionary. It is assumed here that the message body is a JSON string\nencoded in UTF8.", "id": "f2576:m1"} {"signature": "def consume_message_with_notify(notifier_uri_getter):", "body": "def consume_message_with_notify_decorator(method):@consume_messagedef wrapper(self,data):notifier_uri = notifier_uri_getter(self)client_id = data[\"\"]try:method(self, data)notify_client(notifier_uri, client_id, )except Exception as exception:notify_client(notifier_uri, client_id, , str(exception))raisereturn wrapperreturn consume_message_with_notify_decorator", "docstring": "Decorator for methods handling requests from RabbitMQ\n\nThis decorator builds on the :py:func:`consume_message` decorator. It extents\nit by logic for notifying a client of the result of handling the\nrequest.\n\nThe *notifier_uri_getter* argument must be a callable which accepts\n*self* and returns the uri of the notifier service.", "id": "f2576:m2"} {"signature": "def make_aware(time_point):", "body": "assert not is_aware(time_point)return time_point.replace(tzinfo=UTC)", "docstring": "Return an aware time point\n\n:param datetime.datetime time_point: Unaware time point in UTC\n:return: Aware time point in UTC timezone\n:rtype: datetime.datetime", "id": "f2578:m2"} {"signature": "def utc_now():", "body": "return datetime.now(timezone.utc)", "docstring": "Return an aware :py:class:`datetime.datetime` instance of the current\ndate and time, in UTC timezone\n\n:return: Current date and time, in UTC timezone\n:rtype: datetime.datetime", "id": "f2578:m0"} {"signature": "def predict(list_items):", "body": "return [i* for i in list_items]", "docstring": "Returns the double of the items", "id": "f2580:m0"} {"signature": "@classmethoddef get_data_manager(cls):", "body": "from parsl.dataflow.dflow import DataFlowKernelLoaderdfk = DataFlowKernelLoader.dfk()return dfk.executors['']", "docstring": "Return the DataManager of the currently loaded DataFlowKernel.", "id": "f2586:c0:m0"} {"signature": "def shutdown(self, block=False):", "body": "x = self.executor.shutdown(wait=block)logger.debug(\"\")return x", "docstring": "Shutdown the ThreadPool.\n\n Kwargs:\n - block (bool): To block for confirmations or not", "id": "f2586:c0:m6"} {"signature": "def stage_in(self, file, executor):", "body": "if file.scheme == '':working_dir = self.dfk.executors[executor].working_dirstage_in_app = self._ftp_stage_in_app(executor=executor)app_fut = stage_in_app(working_dir, outputs=[file])return app_fut._outputs[]elif file.scheme == '' or file.scheme == '':working_dir = self.dfk.executors[executor].working_dirstage_in_app = self._http_stage_in_app(executor=executor)app_fut = stage_in_app(working_dir, outputs=[file])return app_fut._outputs[]elif file.scheme == '':globus_ep = self._get_globus_endpoint(executor)stage_in_app = self._globus_stage_in_app()app_fut = stage_in_app(globus_ep, outputs=[file])return app_fut._outputs[]else:raise Exception(''.format(file.scheme))", "docstring": "Transport the file from the input source to the executor.\n\n This function returns a DataFuture.\n\n Args:\n - self\n - file (File) : file to stage in\n - executor (str) : an executor the file is going to be staged in to.\n If the executor argument is not specified for a file\n with 'globus' scheme, the file will be staged in to\n the first executor with the \"globus\" key in a config.", "id": "f2586:c0:m10"} {"signature": "@abstractpropertydef script_dir(self):", "body": "pass", "docstring": "This is a property. Returns the directory assigned for storing all internal scripts such as\n scheduler submit scripts. This is usually where error logs from the scheduler would reside on the\n channel destination side.\n\n Args:\n - None\n\n Returns:\n - Channel script dir", "id": "f2589:c0:m1"} {"signature": "@abstractmethoddef close(self):", "body": "pass", "docstring": "Closes the channel. Clean out any auth credentials.\n\n Args:\n None\n\n Returns:\n Bool", "id": "f2589:c0:m4"} {"signature": "@abstractmethoddef abspath(self, path):", "body": "pass", "docstring": "Return the absolute path.\n\n Parameters\n ----------\n path : str\n Path for which the absolute path will be returned.", "id": "f2589:c0:m7"} {"signature": "@abstractmethoddef makedirs(self, path, mode=, exist_ok=False):", "body": "pass", "docstring": "Create a directory.\n\n If intermediate directories do not exist, they will be created.\n\n Parameters\n ----------\n path : str\n Path of directory to create.\n mode : int\n Permissions (posix-style) for the newly-created directory.\n exist_ok : bool\n If False, raise an OSError if the target directory already exists.", "id": "f2589:c0:m5"} {"signature": "@abstractmethoddef execute_no_wait(self, cmd, walltime, envs={}, *args, **kwargs):", "body": "pass", "docstring": "Execute asynchronousely without waiting for exitcode\n\n Args:\n - cmd (string): Command string to execute over the channel\n - walltime (int) : Timeout in seconds\n\n KWargs:\n - envs (dict) : Environment variables to push to the remote side\n\n Returns:\n - the type of return value is channel specific", "id": "f2589:c0:m2"} {"signature": "def close(self):", "body": "return False", "docstring": "There's nothing to close here, and this really doesn't do anything\n\n Returns:\n - False, because it really did not \"close\" this channel.", "id": "f2590:c0:m4"} {"signature": "def makedirs(self, path, mode=, exist_ok=False):", "body": "return os.makedirs(path, mode, exist_ok)", "docstring": "Create a directory.\n\n If intermediate directories do not exist, they will be created.\n\n Parameters\n ----------\n path : str\n Path of directory to create.\n mode : int\n Permissions (posix-style) for the newly-created directory.\n exist_ok : bool\n If False, raise an OSError if the target directory already exists.", "id": "f2590:c0:m6"} {"signature": "def isdir(self, path):", "body": "return os.path.isdir(path)", "docstring": "Return true if the path refers to an existing directory.\n\n Parameters\n ----------\n path : str\n Path of directory to check.", "id": "f2590:c0:m5"} {"signature": "def execute_no_wait(self, cmd, walltime, envs={}):", "body": "current_env = copy.deepcopy(self._envs)current_env.update(envs)try:proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd=self.userhome,env=current_env,shell=True,preexec_fn=os.setpgrp)pid = proc.pidexcept Exception as e:logger.warn(\"\", (cmd, e))raisereturn pid, proc", "docstring": "Synchronously execute a commandline string on the shell.\n\n Args:\n - cmd (string) : Commandline string to execute\n - walltime (int) : walltime in seconds, this is not really used now.\n\n Returns a tuple containing:\n\n - pid : process id\n - proc : a subprocess.Popen object\n\n Raises:\n None.", "id": "f2590:c0:m2"} {"signature": "def abspath(self, path):", "body": "return os.path.abspath(path)", "docstring": "Return the absolute path.\n\n Parameters\n ----------\n path : str\n Path for which the absolute path will be returned.", "id": "f2590:c0:m7"} {"signature": "def abspath(self, path):", "body": "return self.sftp_client.normalize(path)", "docstring": "Return the absolute path on the remote side.\n\n Parameters\n ----------\n path : str\n Path for which the absolute path will be returned.", "id": "f2592:c1:m9"} {"signature": "def __init__(self, hostname, username=None, password=None, script_dir=None, envs=None, gssapi_auth=False, skip_auth=False, **kwargs):", "body": "self.hostname = hostnameself.username = usernameself.password = passwordself.kwargs = kwargsself.script_dir = script_dirself.skip_auth = skip_authself.gssapi_auth = gssapi_authif self.skip_auth:self.ssh_client = NoAuthSSHClient()else:self.ssh_client = paramiko.SSHClient()self.ssh_client.load_system_host_keys()self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())self.envs = {}if envs is not None:self.envs = envstry:self.ssh_client.connect(hostname,username=username,password=password,allow_agent=True,gss_auth=gssapi_auth,gss_kex=gssapi_auth,)t = self.ssh_client.get_transport()self.sftp_client = paramiko.SFTPClient.from_transport(t)except paramiko.BadHostKeyException as e:raise BadHostKeyException(e, self.hostname)except paramiko.AuthenticationException as e:raise AuthException(e, self.hostname)except paramiko.SSHException as e:raise SSHException(e, self.hostname)except Exception as e:raise SSHException(e, self.hostname)", "docstring": "Initialize a persistent connection to the remote system.\n We should know at this point whether ssh connectivity is possible\n\n Args:\n - hostname (String) : Hostname\n\n KWargs:\n - username (string) : Username on remote system\n - password (string) : Password for remote system\n - script_dir (string) : Full path to a script dir where\n generated scripts could be sent to.\n - envs (dict) : A dictionary of environment variables to be set when executing commands\n\n Raises:", "id": "f2592:c1:m0"} {"signature": "def isdir(self, path):", "body": "result = Truetry:self.sftp_client.lstat(path)except FileNotFoundError:result = Falsereturn result", "docstring": "Return true if the path refers to an existing directory.\n\n Parameters\n ----------\n path : str\n Path of directory on the remote side to check.", "id": "f2592:c1:m7"} {"signature": "def execute_no_wait(self, cmd, walltime=, envs={}):", "body": "stdin, stdout, stderr = self.ssh_client.exec_command(self.prepend_envs(cmd, envs), bufsize=-, timeout=walltime)return None, stdout, stderr", "docstring": "Execute asynchronousely without waiting for exitcode\n\n Args:\n - cmd (string): Commandline string to be executed on the remote side\n - walltime (int): timeout to exec_command\n\n KWargs:\n - envs (dict): A dictionary of env variables\n\n Returns:\n - None, stdout (readable stream), stderr (readable stream)\n\n Raises:\n - ChannelExecFailed (reason)", "id": "f2592:c1:m3"} {"signature": "def ping_time(ip, n=):", "body": "cmd = \"\".format(ip, n)p = subprocess.Popen(cmd.split(\"\"), stdout=subprocess.PIPE)output = str(p.communicate()[])stats = output.split(\"\")[-].split(\"\")[-].split(\"\")avg_ping_time = float(stats[]) return avg_ping_time * ", "docstring": "Returns the average ping time in microseconds.\n\nNote: This function is inherently platform specific.\nIt currently works on Midway.", "id": "f2620:m0"} {"signature": "def __init__(self,host=None,port=None,logging_server_host='',logging_server_port=):", "body": "self.host = hostself.port = portself.logging_server_host = logging_server_hostself.logging_server_port = logging_server_port", "docstring": "Parameters\n----------\nhost : str\n The hostname for running the visualization interface.\nport : int\n The port for the visualization interface.\nlogging_server_host : str\n The hostname for the logging server.\nlogging_server_port : int\n The port for the logging server.", "id": "f2750:c1:m0"} {"signature": "def __init__(self, monitoring_url, source_id=None, timeout=):", "body": "self.monitoring_url = monitoring_urlself.sock_timeout = timeoutself.source_id = source_idtry:self.scheme, self.ip, port = (x.strip('') for x in monitoring_url.split(''))self.port = int(port)except Exception:raise Exception(\"\".format(monitoring_url))self.sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM,socket.IPPROTO_UDP) self.sock.settimeout(self.sock_timeout)", "docstring": "Parameters\n----------\n\nmonitoring_url : str\n URL of the form ://:\nmessage : py obj\n Python object to send, this will be pickled\nsource_id : str\n String identifier of the source\ntimeout : int\n timeout, default=10s", "id": "f2764:c0:m0"} {"signature": "def monitor(pid, task_id, monitoring_hub_url, run_id, sleep_dur=):", "body": "import psutilradio = UDPRadio(monitoring_hub_url,source_id=task_id)simple = [\"\", '', '', '', '', '', '', '', '', '', '', '', '']summable_values = ['', '', '']pm = psutil.Process(pid)pm.cpu_percent()first_msg = Truewhile True:try:d = {\"\" + str(k): v for k, v in pm.as_dict().items() if k in simple}d[\"\"] = run_idd[\"\"] = task_idd[''] = sleep_durd[''] = first_msgd[''] = datetime.datetime.now()children = pm.children(recursive=True)d[\"\"] = psutil.cpu_count()d[''] = pm.memory_info().vmsd[''] = pm.memory_info().rssd[''] = pm.cpu_times().userd[''] = pm.cpu_times().systemd[''] = len(children)try:d[''] = pm.io_counters().write_bytesd[''] = pm.io_counters().read_bytesexcept psutil._exceptions.AccessDenied:d[''] = d[''] = for child in children:for k, v in child.as_dict(attrs=summable_values).items():d['' + str(k)] += vd[''] += child.cpu_times().userd[''] += child.cpu_times().systemd[''] += child.memory_info().vmsd[''] += child.memory_info().rsstry:d[''] += child.io_counters().write_bytesd[''] += child.io_counters().read_bytesexcept psutil._exceptions.AccessDenied:d[''] += d[''] += finally:radio.send(MessageType.TASK_INFO, task_id, d)time.sleep(sleep_dur)first_msg = False", "docstring": "Internal\n Monitors the Parsl task's resources by pointing psutil to the task's pid and watching it and its children.", "id": "f2764:m2"} {"signature": "def send(self, message_type, task_id, message):", "body": "x = try:buffer = pickle.dumps((self.source_id, int(time.time()), message_type,message))except Exception as e:print(\"\".format(e))returntry:x = self.sock.sendto(buffer, (self.ip, self.port))except socket.timeout:print(\"\")return Falsereturn x", "docstring": "Sends a message to the UDP receiver\n\n Parameter\n ---------\n\n message_type: monitoring.MessageType (enum)\n In this case message type is RESOURCE_INFO most often\n task_id: int\n Task identifier of the task for which resource monitoring is being reported\n message: object\n Arbitrary pickle-able object that is to be sent\n\n Returns:\n # bytes sent", "id": "f2764:c0:m1"} {"signature": "def __init__(self,hub_address,hub_port=None,hub_port_range=(, ),database=None, visualization_server=None, client_address=\"\",client_port=None,monitoring_hub_address=\"\",logdir=\"\",logging_level=logging.DEBUG,atexit_timeout= ):", "body": "try:os.makedirs(logdir)except FileExistsError:passself.logger = start_file_logger(\"\".format(logdir),name=\"\",level=logging_level)self.logger.debug(\"\")if not hub_port:self.logger.critical(\"\")self.hub_port = hub_portself.hub_address = hub_addressself.database = databaseself.visualization_server = visualization_serverself.atexit_timeout = atexit_timeoutself.loop_freq = self.logger.debug(\"\".format(hub_port))try:self.sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM,socket.IPPROTO_UDP)self.sock.bind(('', hub_port))self.sock.settimeout(self.loop_freq / )except OSError:self.logger.critical(\"\")self.hub_port = -self._context = zmq.Context()self.dfk_channel = self._context.socket(zmq.DEALER)self.dfk_channel.set_hwm()self.dfk_channel.RCVTIMEO = int(self.loop_freq) self.dfk_channel.connect(\"\".format(client_address, client_port))", "docstring": "Initializes a monitoring configuration class.\n\n Parameters\n ----------\n address : str\n IP address of the node on which the monitoring hub will run, this address must be\n reachable from the Parsl client as well as the worker nodes. Eg. ...\n\n port : int\n Used with Elasticsearch logging, the port of where to access Elasticsearch. Required when using logging_type = 'elasticsearch'.\n\n logging_endpoint : Endpoint object\n This is generally a database object to which logging data can be pushed to from the\n monitoring HUB.\n\n workflow_name : str, optional\n Name to record as the workflow base name, defaults to the name of the parsl script file if left as None.\n\n workflow_version : str, optional\n Optional workflow identification to distinguish between workflows with the same name, not used internally only for display to user.\n\n atexit_timeout : float, optional\n The amount of time in seconds to terminate the hub without receiving any messages, after the last dfk workflow message is received.", "id": "f2764:c2:m0"} {"signature": "def hash_lookup(self, hashsum):", "body": "return self.memo_lookup_table[hashsum]", "docstring": "Lookup a hash in the memoization table.\n\n Will raise a KeyError if hash is not in the memoization lookup table.\n\n Args:\n - hashsum (str?): The same hashes used to uniquely identify apps+inputs\n\n Returns:\n - Lookup result, this is unlikely to be None, since the hashes are set by this\n library and could not miss entried in it's dict.\n\n Raises:\n - KeyError: if hash not in table", "id": "f2767:c0:m3"} {"signature": "def __init__(self, dfk, memoize=True, checkpoint={}):", "body": "self.dfk = dfkself.memoize = memoizeif self.memoize:logger.info(\"\")self.memo_lookup_table = checkpointelse:logger.info(\"\")self.memo_lookup_table = {}", "docstring": "Initialize the memoizer.\n\n Args:\n - dfk (DFK obj): The DFK object\n\n KWargs:\n - memoize (Bool): enable memoization or not.\n - checkpoint (Dict): A checkpoint loaded as a dict.", "id": "f2767:c0:m0"} {"signature": "def make_hash(self, task):", "body": "t = [serialize_object(task[''])[],serialize_object(task[''])[],serialize_object(task[''])[],serialize_object(task[''])[],serialize_object(task[''])[]]x = b''.join(t)hashedsum = hashlib.md5(x).hexdigest()return hashedsum", "docstring": "Create a hash of the task inputs.\n\n This uses a serialization library borrowed from ipyparallel.\n If this fails here, then all ipp calls are also likely to fail due to failure\n at serialization.\n\n Args:\n - task (dict) : Task dictionary from dfk.tasks\n\n Returns:\n - hash (str) : A unique hash string", "id": "f2767:c0:m1"} {"signature": "def unset_logging(self):", "body": "if self.logger_flag is True:returnroot_logger = logging.getLogger()for hndlr in root_logger.handlers:if hndlr not in self.prior_loghandlers:hndlr.setLevel(logging.ERROR)self.logger_flag = True", "docstring": "Mute newly added handlers to the root level, right after calling executor.status", "id": "f2768:c0:m3"} {"signature": "def _strategy_noop(self, tasks, *args, kind=None, **kwargs):", "body": "", "docstring": "Do nothing.\n\n Args:\n - tasks (task_ids): Not used here.\n\n KWargs:\n - kind (Not used)", "id": "f2768:c0:m2"} {"signature": "@propertydef config(self):", "body": "return self._config", "docstring": "Returns the fully initialized config that the DFK is actively using.\n\n DO *NOT* update.\n\n Returns:\n - config (dict)", "id": "f2769:c0:m3"} {"signature": "def wait_for_current_tasks(self):", "body": "logger.info(\"\")for task_id in self.tasks:fut = self.tasks[task_id]['']if not fut.done():logger.debug(\"\".format(task_id))fut.exception()logger.info(\"\")", "docstring": "Waits for all tasks in the task list to be completed, by waiting for their\n AppFuture to be completed. This method will not necessarily wait for any tasks\n added after cleanup has started (such as data stageout?)", "id": "f2769:c0:m15"} {"signature": "def _create_task_log_info(self, task_id, fail_mode=None):", "body": "info_to_monitor = ['', '', '', '', '','', '', '', '', '', '']task_log_info = {\"\" + k: self.tasks[task_id][k] for k in info_to_monitor}task_log_info[''] = self.run_idtask_log_info[''] = datetime.datetime.now()task_log_info[''] = self.tasks[task_id][''].nametask_log_info[''] = self.tasks_failed_counttask_log_info[''] = self.tasks_completed_counttask_log_info[''] = str(self.tasks[task_id][''].get('', None))task_log_info[''] = str(self.tasks[task_id][''].get('', None))task_log_info[''] = self.tasks[task_id][''].get('', None)task_log_info[''] = self.tasks[task_id][''].get('', None)task_log_info[''] = Noneif self.tasks[task_id][''] is not None:task_log_info[''] = \"\".join([str(t._tid) for t in self.tasks[task_id]['']])task_log_info[''] = Noneif self.tasks[task_id][''] is not None:task_log_info[''] = (self.tasks[task_id][''] -self.tasks[task_id]['']).total_seconds()if fail_mode is not None:task_log_info[''] = fail_modereturn task_log_info", "docstring": "Create the dictionary that will be included in the log.", "id": "f2769:c0:m1"} {"signature": "def __init__(self, config=Config()):", "body": "self.cleanup_called = Falseif isinstance(config, dict):raise ConfigurationError('''')self._config = configself.run_dir = make_rundir(config.run_dir)parsl.set_file_logger(\"\".format(self.run_dir), level=logging.DEBUG)logger.debug(\"\".format(config))logger.info(\"\".format(get_version()))self.checkpoint_lock = threading.Lock()self.usage_tracker = UsageTracker(self)self.usage_tracker.send_message()self.tasks_completed_count = self.tasks_failed_count = self.monitoring = config.monitoringif self.monitoring:if self.monitoring.logdir is None:self.monitoring.logdir = self.run_dirself.monitoring.start()self.time_began = datetime.datetime.now()self.time_completed = Noneself.run_id = str(uuid4())logger.info(\"\" + self.run_id)self.workflow_name = Noneif self.monitoring is not None and self.monitoring.workflow_name is not None:self.workflow_name = self.monitoring.workflow_nameelse:for frame in inspect.stack():fname = os.path.basename(str(frame.filename))parsl_file_names = ['']if fname not in parsl_file_names:self.workflow_name = fnamebreakself.workflow_version = str(self.time_began)if self.monitoring is not None and self.monitoring.workflow_version is not None:self.workflow_version = self.monitoring.workflow_versionworkflow_info = {'': \"\".format(sys.version_info.major,sys.version_info.minor,sys.version_info.micro),'': get_version(),\"\": self.time_began,'': None,'': None,'': self.run_id,'': self.workflow_name,'': self.workflow_version,'': self.run_dir,'': self.tasks_completed_count,'': self.tasks_failed_count,'': getuser(),'': gethostname(),}if self.monitoring:self.monitoring.send(MessageType.WORKFLOW_INFO,workflow_info)checkpoints = self.load_checkpoints(config.checkpoint_files)self.memoizer = Memoizer(self, memoize=config.app_cache, checkpoint=checkpoints)self.checkpointed_tasks = self._checkpoint_timer = Noneself.checkpoint_mode = config.checkpoint_modeself.data_manager = DataManager(self, max_threads=config.data_management_max_threads)self.executors = {}self.add_executors(config.executors + [self.data_manager])if self.checkpoint_mode == \"\":try:h, m, s = map(int, config.checkpoint_period.split(''))checkpoint_period = (h * ) + (m * ) + sself._checkpoint_timer = Timer(self.checkpoint, interval=checkpoint_period)except Exception:logger.error(\"\".format(config.checkpoint_period))self._checkpoint_timer = Timer(self.checkpoint, interval=( * ))if any([x.managed for x in config.executors]):self.flowcontrol = FlowControl(self)else:self.flowcontrol = FlowNoControl(self)self.task_count = self.tasks = {}self.submitter_lock = threading.Lock()atexit.register(self.atexit_cleanup)", "docstring": "Initialize the DataFlowKernel.\n\n Parameters\n ----------\n config : Config\n A specification of all configuration options. For more details see the\n :class:~`parsl.config.Config` documentation.", "id": "f2769:c0:m0"} {"signature": "def launch_task(self, task_id, executable, *args, **kwargs):", "body": "self.tasks[task_id][''] = datetime.datetime.now()hit, memo_fu = self.memoizer.check_memo(task_id, self.tasks[task_id])if hit:logger.info(\"\".format(task_id))return memo_fuexecutor_label = self.tasks[task_id][\"\"]try:executor = self.executors[executor_label]except Exception:logger.exception(\"\".format(task_id, executor_label, self._config))if self.monitoring is not None and self.monitoring.resource_monitoring_enabled:executable = self.monitoring.monitor_wrapper(executable, task_id,self.monitoring.monitoring_hub_url,self.run_id,self.monitoring.resource_monitoring_interval)with self.submitter_lock:exec_fu = executor.submit(executable, *args, **kwargs)self.tasks[task_id][''] = States.launchedif self.monitoring is not None:task_log_info = self._create_task_log_info(task_id, '')self.monitoring.send(MessageType.TASK_INFO, task_log_info)exec_fu.retries_left = self._config.retries -self.tasks[task_id]['']logger.info(\"\".format(task_id, executor.label))return exec_fu", "docstring": "Handle the actual submission of the task to the executor layer.\n\n If the app task has the executors attributes not set (default=='all')\n the task is launched on a randomly selected executor from the\n list of executors. This behavior could later be updated to support\n binding to executors based on user specified criteria.\n\n If the app task specifies a particular set of executors, it will be\n targeted at those specific executors.\n\n Args:\n task_id (uuid string) : A uuid string that uniquely identifies the task\n executable (callable) : A callable object\n args (list of positional args)\n kwargs (arbitrary keyword arguments)\n\n\n Returns:\n Future that tracks the execution of the submitted executable", "id": "f2769:c0:m7"} {"signature": "def _count_deps(self, depends):", "body": "count = for dep in depends:if isinstance(dep, Future):if not dep.done():count += return count", "docstring": "Internal.\n\n Count the number of unresolved futures in the list depends.", "id": "f2769:c0:m2"} {"signature": "def _load_checkpoints(self, checkpointDirs):", "body": "memo_lookup_table = {}for checkpoint_dir in checkpointDirs:logger.info(\"\".format(checkpoint_dir))checkpoint_file = os.path.join(checkpoint_dir, '')try:with open(checkpoint_file, '') as f:while True:try:data = pickle.load(f)memo_fu = Future()if data['']:memo_fu.set_exception(data[''])else:memo_fu.set_result(data[''])memo_lookup_table[data['']] = memo_fuexcept EOFError:breakexcept FileNotFoundError:reason = \"\".format(checkpoint_file)logger.error(reason)raise BadCheckpoint(reason)except Exception:reason = \"\".format(checkpoint_file)logger.error(reason)raise BadCheckpoint(reason)logger.info(\"\".format(checkpoint_file,len(memo_lookup_table.keys())))return memo_lookup_table", "docstring": "Load a checkpoint file into a lookup table.\n\n The data being loaded from the pickle file mostly contains input\n attributes of the task: func, args, kwargs, env...\n To simplify the check of whether the exact task has been completed\n in the checkpoint, we hash these input params and use it as the key\n for the memoized lookup table.\n\n Args:\n - checkpointDirs (list) : List of filepaths to checkpoints\n Eg. ['runinfo/001', 'runinfo/002']\n\n Returns:\n - memoized_lookup_table (dict)", "id": "f2769:c0:m18"} {"signature": "@classmethod@typeguard.typecheckeddef load(cls, config: Optional[Config] = None):", "body": "if cls._dfk is not None:raise RuntimeError('')if config is None:cls._dfk = DataFlowKernel(Config())else:cls._dfk = DataFlowKernel(config)return cls._dfk", "docstring": "Load a DataFlowKernel.\n\n Args:\n - config (Config) : Configuration to load. This config will be passed to a\n new DataFlowKernel instantiation which will be set as the active DataFlowKernel.\n Returns:\n - DataFlowKernel : The loaded DataFlowKernel object.", "id": "f2769:c1:m1"} {"signature": "def handle_exec_update(self, task_id, future):", "body": "try:res = future.result()if isinstance(res, RemoteExceptionWrapper):res.reraise()except Exception:logger.exception(\"\".format(task_id))self.tasks[task_id][''].append(future._exception)self.tasks[task_id][''] += if not self._config.lazy_errors:logger.debug(\"\")self.tasks[task_id][''] = States.failedif self.monitoring:task_log_info = self._create_task_log_info(task_id, '')self.monitoring.send(MessageType.TASK_INFO, task_log_info)returnif self.tasks[task_id][''] <= self._config.retries:self.tasks[task_id][''] = States.pendinglogger.debug(\"\".format(task_id))else:logger.info(\"\".format(task_id,self._config.retries))self.tasks[task_id][''] = States.failedself.tasks_failed_count += self.tasks[task_id][''] = datetime.datetime.now()else:self.tasks[task_id][''] = States.doneself.tasks_completed_count += logger.info(\"\".format(task_id))self.tasks[task_id][''] = datetime.datetime.now()if self.monitoring:task_log_info = self._create_task_log_info(task_id, '')self.monitoring.send(MessageType.TASK_INFO, task_log_info)if self.tasks[task_id][''] == States.pending:self.launch_if_ready(task_id)return", "docstring": "This function is called only as a callback from an execution\n attempt reaching a final state (either successfully or failing).\n\n It will launch retries if necessary, and update the task\n structure.\n\n Args:\n task_id (string) : Task id which is a uuid string\n future (Future) : The future object corresponding to the task which\n makes this callback\n\n KWargs:\n memo_cbk(Bool) : Indicates that the call is coming from a memo update,\n that does not require additional memo updates.", "id": "f2769:c0:m4"} {"signature": "def launch_if_ready(self, task_id):", "body": "if self._count_deps(self.tasks[task_id]['']) == :new_args, kwargs, exceptions = self.sanitize_and_wrap(task_id,self.tasks[task_id][''],self.tasks[task_id][''])self.tasks[task_id][''] = new_argsself.tasks[task_id][''] = kwargsif not exceptions:exec_fu = Nonewith self.tasks[task_id]['']:if self.tasks[task_id][''] == States.pending:exec_fu = self.launch_task(task_id, self.tasks[task_id][''], *new_args, **kwargs)if exec_fu:try:exec_fu.add_done_callback(partial(self.handle_exec_update, task_id))except Exception as e:logger.error(\"\".format(e))self.tasks[task_id][''] = exec_futry:self.tasks[task_id][''].update_parent(exec_fu)self.tasks[task_id][''] = exec_fuexcept AttributeError as e:logger.error(\"\".format(task_id))raise eelse:logger.info(\"\".format(task_id))self.tasks[task_id][''] = States.dep_failif self.monitoring is not None:task_log_info = self._create_task_log_info(task_id, '')self.monitoring.send(MessageType.TASK_INFO, task_log_info)try:fu = Future()fu.retries_left = self.tasks[task_id][''] = fuself.tasks[task_id][''].update_parent(fu)fu.set_exception(DependencyError(exceptions,task_id,None))except AttributeError as e:logger.error(\"\".format(task_id))raise e", "docstring": "launch_if_ready will launch the specified task, if it is ready\nto run (for example, without dependencies, and in pending state).\n\nThis should be called by any piece of the DataFlowKernel that\nthinks a task may have become ready to run.\n\nIt is not an error to call launch_if_ready on a task that is not\nready to run - launch_if_ready will not incorrectly launch that\ntask.\n\nlaunch_if_ready is thread safe, so may be called from any thread\nor callback.", "id": "f2769:c0:m6"} {"signature": "def sanitize_and_wrap(self, task_id, args, kwargs):", "body": "dep_failures = []new_args = []for dep in args:if isinstance(dep, Future):try:new_args.extend([dep.result()])except Exception as e:if self.tasks[dep.tid][''] in FINAL_FAILURE_STATES:dep_failures.extend([e])else:new_args.extend([dep])for key in kwargs:dep = kwargs[key]if isinstance(dep, Future):try:kwargs[key] = dep.result()except Exception as e:if self.tasks[dep.tid][''] in FINAL_FAILURE_STATES:dep_failures.extend([e])if '' in kwargs:new_inputs = []for dep in kwargs['']:if isinstance(dep, Future):try:new_inputs.extend([dep.result()])except Exception as e:if self.tasks[dep.tid][''] in FINAL_FAILURE_STATES:dep_failures.extend([e])else:new_inputs.extend([dep])kwargs[''] = new_inputsreturn new_args, kwargs, dep_failures", "docstring": "This function should be called **ONLY** when all the futures we track have been resolved.\n\n If the user hid futures a level below, we will not catch\n it, and will (most likely) result in a type error.\n\n Args:\n task_id (uuid str) : Task id\n func (Function) : App function\n args (List) : Positional args to app function\n kwargs (Dict) : Kwargs to app function\n\n Return:\n partial function evaluated with all dependencies in args, kwargs and kwargs['inputs'] evaluated.", "id": "f2769:c0:m10"} {"signature": "def _wake_up_timer(self, kill_event):", "body": "while True:prev = self._wake_up_timetime_to_die = kill_event.wait(float(max(prev - time.time(), )))if time_to_die:returnif prev == self._wake_up_time:self.make_callback(kind='')else:print(\"\")", "docstring": "Internal. This is the function that the thread will execute.\n waits on an event so that the thread can make a quick exit when close() is called\n\n Args:\n - kill_event (threading.Event) : Event to wait on", "id": "f2770:c2:m1"} {"signature": "def close(self):", "body": "self._kill_event.set()self._thread.join()", "docstring": "Merge the threads and terminate.", "id": "f2770:c2:m3"} {"signature": "def make_callback(self, kind=None):", "body": "self._wake_up_time = time.time() + self.intervalself.callback(*self.cb_args)", "docstring": "Makes the callback and resets the timer.", "id": "f2770:c2:m2"} {"signature": "def notify(self, event_id):", "body": "self._event_buffer.extend([event_id])self._event_count += if self._event_count >= self.threshold:logger.debug(\"\")self.make_callback(kind=\"\")", "docstring": "Let the FlowControl system know that there is an event.", "id": "f2770:c1:m2"} {"signature": "def close(self):", "body": "pass", "docstring": "This close fn does nothing.", "id": "f2770:c0:m2"} {"signature": "def __init__(self, dfk, ip='', port=,domain_name=''):", "body": "self.domain_name = domain_nameself.ip = ipself.sock_timeout = self.UDP_PORT = portself.UDP_IP = Noneself.procs = []self.dfk = dfkself.config = self.dfk.configself.uuid = str(uuid.uuid4())self.parsl_version = PARSL_VERSIONself.python_version = \"\".format(sys.version_info.major,sys.version_info.minor,sys.version_info.micro)self.test_mode, self.tracking_enabled = self.check_tracking_enabled()logger.debug(\"\".format(self.tracking_enabled))logger.debug(\"\".format(self.test_mode))self.initialized = False", "docstring": "Initialize usage tracking unless the user has opted-out.\n\n We will try to resolve the hostname specified in kwarg:domain_name\n and if that fails attempt to use the kwarg:ip. Determining the\n IP and sending message is threaded to avoid slowing down DFK\n initialization.\n\n Tracks usage stats by inspecting the internal state of the dfk.\n\n Args:\n - dfk (DFK object) : Data Flow Kernel object\n\n KWargs:\n - ip (string) : IP address\n - port (int) : Port number, Default:50077\n - domain_name (string) : Domain name, will override IP\n Default: tracking.parsl-project.org", "id": "f2771:c0:m0"} {"signature": "def check_tracking_enabled(self):", "body": "track = True test = False testvar = str(os.environ.get(\"\", '')).lower()if testvar == '':test = Trueif not self.config.usage_tracking:track = Falseenvvar = str(os.environ.get(\"\", True)).lower()if envvar == \"\":track = Falsereturn test, track", "docstring": "By default tracking is enabled.\n\n If Test mode is set via env variable PARSL_TESTING, a test flag is set\n\n Tracking is disabled if :\n 1. config[\"globals\"][\"usageTracking\"] is set to False (Bool)\n 2. Environment variable PARSL_TRACKING is set to false (case insensitive)", "id": "f2771:c0:m1"} {"signature": "def async_process(fn):", "body": "def run(*args, **kwargs):proc = mp.Process(target=fn, args=args, kwargs=kwargs)proc.start()return procreturn run", "docstring": "Decorator function to launch a function as a separate process", "id": "f2771:m0"} {"signature": "def close(self):", "body": "for proc in self.procs:proc.terminate()", "docstring": "We terminate (SIGTERM) the processes added to the self.procs list", "id": "f2771:c0:m7"} {"signature": "def construct_end_message(self):", "body": "app_count = self.dfk.task_countsite_count = len([x for x in self.dfk.config.executors if x.managed])app_fails = len([t for t in self.dfk.tasks ifself.dfk.tasks[t][''] in FINAL_FAILURE_STATES])message = {'': self.uuid,'': time.time(),'': app_count,'': site_count,'': None,'': app_fails,'': self.test_mode,}return json.dumps(message)", "docstring": "Collect the final run information at the time of DFK cleanup.\n\n Returns:\n - Message dict dumped as json string, ready for UDP", "id": "f2771:c0:m3"} {"signature": "def construct_start_message(self):", "body": "uname = getpass.getuser().encode('')hashed_username = hashlib.sha256(uname).hexdigest()[:]hname = socket.gethostname().encode('')hashed_hostname = hashlib.sha256(hname).hexdigest()[:]message = {'': self.uuid,'': hashed_username,'': hashed_hostname,'': self.test_mode,'': self.parsl_version,'': self.python_version,'': platform.system(),'': platform.release(),'': time.time()}return json.dumps(message)", "docstring": "Collect preliminary run info at the start of the DFK.\n\n Returns :\n - Message dict dumped as json string, ready for UDP", "id": "f2771:c0:m2"} {"signature": "def make_rundir(path):", "body": "try:if not os.path.exists(path):os.makedirs(path)prev_rundirs = glob(os.path.join(path, \"\"))current_rundir = os.path.join(path, '')if prev_rundirs:x = sorted([int(os.path.basename(x)) for x in prev_rundirs])[-]current_rundir = os.path.join(path, ''.format(x + ))os.makedirs(current_rundir)logger.debug(\"\".format(current_rundir))return os.path.abspath(current_rundir)except Exception as e:logger.error(\"\")logger.error(\"\".format(e))raise", "docstring": "When a path has not been specified, make the run directory.\n\n Creates a rundir with the following hierarchy:\n ./runinfo <- Home of all run directories\n |----000\n |----001 <- Directories for each run\n | ....\n |----NNN\n\n Kwargs:\n - path (str): String path to a specific run dir\n Default : None.", "id": "f2774:m0"} {"signature": "def remote_side_bash_executor(func, *args, **kwargs):", "body": "import osimport timeimport subprocessimport loggingimport parsl.app.errors as pelogging.basicConfig(filename=''.format(time.time()), level=logging.DEBUG)func_name = func.__name__partial_cmdline = Nonetry:partial_cmdline = func(*args, **kwargs)executable = partial_cmdline.format(*args, **kwargs)except AttributeError as e:if partial_cmdline is not None:raise pe.AppBadFormatting(\"\".format(func_name, e))else:raise pe.BashAppNoReturn(\"\".format(func_name, e), None)except IndexError as e:raise pe.AppBadFormatting(\"\".format(func_name, e))except Exception as e:logging.error(\"\".format(func_name, e))raise elogging.debug(\"\", executable)def open_std_fd(fdname):stdfspec = kwargs.get(fdname) if stdfspec is None:return Noneelif isinstance(stdfspec, str):fname = stdfspecmode = ''elif isinstance(stdfspec, tuple):if len(stdfspec) != :raise pe.BadStdStreamFile(\"\" % (fdname, len(stdfspec)), TypeError(''))fname, mode = stdfspecelse:raise pe.BadStdStreamFile(\"\" % (fdname, str(type(stdfspec))), TypeError(''))try:fd = open(fname, mode)except Exception as e:raise pe.BadStdStreamFile(fname, e)return fdstd_out = open_std_fd('')std_err = open_std_fd('')timeout = kwargs.get('')returncode = Nonetry:proc = subprocess.Popen(executable, stdout=std_out, stderr=std_err, shell=True, executable='')proc.wait(timeout=timeout)returncode = proc.returncodeexcept subprocess.TimeoutExpired:raise pe.AppTimeout(\"\".format(func_name, timeout))except Exception as e:raise pe.AppException(\"\".format(func_name, proc.returncode), e)if returncode != :raise pe.AppFailure(\"\".format(func_name, proc.returncode), proc.returncode)missing = []for outputfile in kwargs.get('', []):fpath = outputfileif type(outputfile) != str:fpath = outputfile.filepathif not os.path.exists(fpath):missing.extend([outputfile])if missing:raise pe.MissingOutputs(\"\".format(func_name), missing)return returncode", "docstring": "Execute the bash app type function and return the command line string.\n\n This string is reformatted with the *args, and **kwargs\n from call time.", "id": "f2777:m0"} {"signature": "@propertydef tid(self):", "body": "return self._tid", "docstring": "Returns the task_id of the task that will resolve this DataFuture.", "id": "f2779:c0:m2"} {"signature": "@propertydef filepath(self):", "body": "return self.file_obj.filepath", "docstring": "Filepath of the File object this datafuture represents.", "id": "f2779:c0:m3"} {"signature": "@propertydef filename(self):", "body": "return self.filepath", "docstring": "Filepath of the File object this datafuture represents.", "id": "f2779:c0:m4"} {"signature": "def __init__(self, func, data_flow_kernel=None, walltime=, executors='', cache=False):", "body": "self.__name__ = func.__name__self.func = funcself.data_flow_kernel = data_flow_kernelself.status = ''self.executors = executorsself.cache = cacheif not (isinstance(executors, list) or isinstance(executors, str)):logger.error(\"\".format(func.__name__))if cache is True:try:self.fn_source = getsource(func)except OSError:logger.debug(\"\")self.fn_source = func.__name__self.func_hash = md5(self.fn_source.encode('')).hexdigest()else:self.func_hash = func.__name__params = signature(func).parametersself.kwargs = {}if '' in params:self.kwargs[''] = params[''].defaultif '' in params:self.kwargs[''] = params[''].defaultself.outputs = params[''].default if '' in params else []self.inputs = params[''].default if '' in params else []", "docstring": "Construct the App object.\n\n Args:\n - func (function): Takes the function to be made into an App\n\n Kwargs:\n - data_flow_kernel (DataFlowKernel): The :class:`~parsl.dataflow.dflow.DataFlowKernel` responsible for\n managing this app. This can be omitted only\n after calling :meth:`parsl.dataflow.dflow.DataFlowKernelLoader.load`.\n - walltime (int) : Walltime in seconds for the app execution.\n - executors (str|list) : Labels of the executors that this app can execute over. Default is 'all'.\n - cache (Bool) : Enable caching of this app ?\n\n Returns:\n - App object.", "id": "f2780:c0:m0"} {"signature": "def python_app(function=None, data_flow_kernel=None, walltime=, cache=False, executors=''):", "body": "from parsl.app.python import PythonAppdef decorator(func):def wrapper(f):return PythonApp(f,data_flow_kernel=data_flow_kernel,walltime=walltime,cache=cache,executors=executors)return wrapper(func)if function is not None:return decorator(function)return decorator", "docstring": "Decorator function for making python apps.\n\n Parameters\n ----------\n function : function\n Do not pass this keyword argument directly. This is needed in order to allow for omitted parenthesis,\n for example, `@python_app` if using all defaults or `@python_app(walltime=120)`. If the\n decorator is used alone, function will be the actual function being decorated, whereas if it\n is called with arguments, function will be None. Default is None.\n data_flow_kernel : DataFlowKernel\n The :class:`~parsl.dataflow.dflow.DataFlowKernel` responsible for managing this app. This can\n be omitted only after calling :meth:`parsl.dataflow.dflow.DataFlowKernelLoader.load`. Default is None.\n walltime : int\n Walltime for app in seconds. Default is 60.\n executors : string or list\n Labels of the executors that this app can execute over. Default is 'all'.\n cache : bool\n Enable caching of the app call. Default is False.", "id": "f2780:m1"} {"signature": "@typeguard.typecheckeddef set_file_logger(filename: str, name: str = '', level: int = logging.DEBUG, format_string: Optional[str] = None):", "body": "if format_string is None:format_string = \"\"logger = logging.getLogger(name)logger.setLevel(logging.DEBUG)handler = logging.FileHandler(filename)handler.setLevel(level)formatter = logging.Formatter(format_string, datefmt='')handler.setFormatter(formatter)logger.addHandler(handler)futures_logger = logging.getLogger(\"\")futures_logger.addHandler(handler)", "docstring": "Add a stream log handler.\n\n Args:\n - filename (string): Name of the file to write logs to\n - name (string): Logger name\n - level (logging.LEVEL): Set the logging level.\n - format_string (string): Set the format string\n\n Returns:\n - None", "id": "f2781:m1"} {"signature": "def submit(self, command='', blocksize=, tasks_per_node=, job_name=\"\"):", "body": "job_name = \"\".format(time.time())wrapped_cmd = self.launcher(command,tasks_per_node,self.nodes_per_block)[instance, *rest] = self.spin_up_instance(command=wrapped_cmd, job_name=job_name)if not instance:logger.error(\"\")return Nonelogger.debug(\"\".format(instance.instance_id))state = translate_table.get(instance.state[''], \"\")self.resources[instance.instance_id] = {\"\": instance.instance_id,\"\": instance,\"\": state}return instance.instance_id", "docstring": "Submit the command onto a freshly instantiated AWS EC2 instance.\n\n Submit returns an ID that corresponds to the task that was just submitted.\n\n Parameters\n ----------\n command : str\n Command to be invoked on the remote side.\n blocksize : int\n Number of blocks requested.\n tasks_per_node : int (default=1)\n Number of command invocations to be launched per node\n job_name : str\n Prefix for the job name.\n\n Returns\n -------\n None or str\n If at capacity, None will be returned. Otherwise, the job identifier will be returned.", "id": "f2782:c0:m13"} {"signature": "def write_state_file(self):", "body": "fh = open('', '')state = {}state[''] = self.vpc_idstate[''] = self.sg_idstate[''] = self.sn_idsstate[''] = self.instancesstate[\"\"] = self.instance_statesfh.write(json.dumps(state, indent=))", "docstring": "Save information that must persist to a file.\n\n We do not want to create a new VPC and new identical security groups, so we save\n information about them in a file between runs.", "id": "f2782:c0:m3"} {"signature": "def get_instance_state(self, instances=None):", "body": "if instances:desc = self.client.describe_instances(InstanceIds=instances)else:desc = self.client.describe_instances(InstanceIds=self.instances)for i in range(len(desc[''])):instance = desc[''][i][''][]self.instance_states[instance['']] = instance['']['']return self.instance_states", "docstring": "Get states of all instances on EC2 which were started by this file.", "id": "f2782:c0:m11"} {"signature": "def cancel(self, job_ids):", "body": "if self.linger is True:logger.debug(\"\")return [False for x in job_ids]try:self.client.terminate_instances(InstanceIds=list(job_ids))except Exception as e:logger.error(\"\".format(job_ids))raise eelse:logger.debug(\"\".format(job_ids))for job_id in job_ids:self.resources[job_id][\"\"] = \"\"for job_id in job_ids:self.instances.remove(job_id)return [True for x in job_ids]", "docstring": "Cancel the jobs specified by a list of job ids.\n\n Parameters\n ----------\n job_ids : list of str\n List of of job identifiers\n\n Returns\n -------\n list of bool\n Each entry in the list will contain False if the operation fails. Otherwise, the entry will be True.", "id": "f2782:c0:m14"} {"signature": "def config_route_table(self, vpc, internet_gateway):", "body": "route_table = vpc.create_route_table()route_table.create_route(DestinationCidrBlock='', GatewayId=internet_gateway.internet_gateway_id)return route_table", "docstring": "Configure route table for Virtual Private Cloud (VPC).\n\n Parameters\n ----------\n vpc : dict\n Representation of the VPC (created by create_vpc()).\n internet_gateway : dict\n Representation of the internet gateway (created by create_vpc()).", "id": "f2782:c0:m7"} {"signature": "def submit(self, command, blocksize, tasks_per_node, job_name=\"\"):", "body": "if self.provisioned_blocks >= self.max_blocks:logger.warn(\"\".format(self.label))return Nonejob_name = \"\".format(job_name, time.time())script_path = \"\".format(self.script_dir, job_name)script_path = os.path.abspath(script_path)logger.debug(\"\".format(self.nodes_per_block))job_config = {}job_config[\"\"] = self.channel.script_dirjob_config[\"\"] = self.nodes_per_blockjob_config[\"\"] = tasks_per_nodejob_config[\"\"] = wtime_to_minutes(self.walltime)job_config[\"\"] = self.scheduler_optionsjob_config[\"\"] = self.worker_initjob_config[\"\"] = self.partitionjob_config[\"\"] = commandjob_config[\"\"] = self.launcher(command,tasks_per_node,self.nodes_per_block)logger.debug(\"\")self._write_submit_script(template_string, script_path, job_name, job_config)if self.move_files:logger.debug(\"\")channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)else:logger.debug(\"\")channel_script_path = script_pathretcode, stdout, stderr = super().execute_wait(\"\".format(channel_script_path))job_id = Noneif retcode == :for line in stdout.split(''):if line.startswith(\"\"):job_id = line.split(\"\")[].strip()self.resources[job_id] = {'': job_id, '': '', '': blocksize}else:print(\"\")logger.error(\"\", retcode, stdout.strip(), stderr.strip())return job_id", "docstring": "Submit the command as a slurm job of blocksize parallel elements.\n\n Parameters\n ----------\n command : str\n Command to be made on the remote side.\n blocksize : int\n Not implemented.\n tasks_per_node : int\n Command invocations to be launched per node\n job_name : str\n Name for the job (must be unique).\n Returns\n -------\n None or str\n If at capacity, returns None; otherwise, a string identifier for the job", "id": "f2785:c0:m2"} {"signature": "def _status(self):", "body": "cmd = \"\"retcode, stdout, stderr = super().execute_wait(cmd)if retcode != :returnjobs_missing = list(self.resources.keys())for line in stdout.split(''):parts = line.split()if parts and parts[].lower().lower() != ''and not parts[].startswith(''):job_id = parts[]status = translate_table.get(parts[].lower(), '')if job_id in self.resources:self.resources[job_id][''] = statusjobs_missing.remove(job_id)for missing_job in jobs_missing:if self.resources[missing_job][''] in ['', '']:self.resources[missing_job][''] = ''", "docstring": "Get the status of a list of jobs identified by the job identifiers\n returned from the submit request.\n\n Returns:\n - A list of status from ['PENDING', 'RUNNING', 'CANCELLED', 'COMPLETED',\n 'FAILED', 'TIMEOUT'] corresponding to each job_id in the job_ids list.\n\n Raises:\n - ExecutionProviderException or its subclasses", "id": "f2787:c0:m3"} {"signature": "def cancel(self, job_ids):", "body": "job_id_list = ''.join(job_ids)cmd = \"\".format(job_id_list)retcode, stdout, stderr = super().execute_wait(cmd, )rets = Noneif retcode == :for jid in job_ids:self.resources[jid][''] = \"\"rets = [True for i in job_ids]else:rets = [False for i in job_ids]return rets", "docstring": "Cancels the resources identified by the job_ids provided by the user.\n\n Args:\n - job_ids (list): A list of job identifiers\n\n Returns:\n - A list of status from cancelling the job which can be True, False\n\n Raises:\n - ExecutionProviderException or its subclasses", "id": "f2787:c0:m4"} {"signature": "def _create_deployment_object(self, job_name, job_image,deployment_name, port=,replicas=,cmd_string=None,engine_json_file='',engine_dir='',volumes=[]):", "body": "security_context = Noneif self.user_id and self.group_id:security_context = client.V1SecurityContext(run_as_group=self.group_id,run_as_user=self.user_id,run_as_non_root=self.run_as_non_root)environment_vars = client.V1EnvVar(name=\"\", value=\"\")launch_args = [\"\", \"\".format(cmd_string)]volume_mounts = []for volume in volumes:volume_mounts.append(client.V1VolumeMount(mount_path=volume[],name=volume[]))container = Noneif security_context:container = client.V1Container(name=job_name,image=job_image,ports=[client.V1ContainerPort(container_port=port)],volume_mounts=volume_mounts,command=[''],args=launch_args,env=[environment_vars],security_context=security_context)else:container = client.V1Container(name=job_name,image=job_image,ports=[client.V1ContainerPort(container_port=port)],volume_mounts=volume_mounts,command=[''],args=launch_args,env=[environment_vars])secret = Noneif self.secret:secret = client.V1LocalObjectReference(name=self.secret)volume_defs = []for volume in volumes:volume_defs.append(client.V1Volume(name=volume[],persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(claim_name=volume[])))template = client.V1PodTemplateSpec(metadata=client.V1ObjectMeta(labels={\"\": job_name}),spec=client.V1PodSpec(containers=[container],image_pull_secrets=[secret],volumes=volume_defs))spec = client.ExtensionsV1beta1DeploymentSpec(replicas=replicas,template=template)deployment = client.ExtensionsV1beta1Deployment(api_version=\"\",kind=\"\",metadata=client.V1ObjectMeta(name=deployment_name),spec=spec)return deployment", "docstring": "Create a kubernetes deployment for the job.\n Args:\n - job_name (string) : Name of the job and deployment\n - job_image (string) : Docker image to launch\n KWargs:\n - port (integer) : Container port\n - replicas : Number of replica containers to maintain\n Returns:\n - True: The deployment object to launch", "id": "f2788:c0:m5"} {"signature": "def cancel(self, job_ids):", "body": "for job in job_ids:logger.debug(\"\".format(job))self._delete_deployment(job)self.resources[job][''] = ''rets = [True for i in job_ids]return rets", "docstring": "Cancels the jobs specified by a list of job ids\n Args:\n job_ids : [ ...]\n Returns :\n [True/False...] : If the cancel operation fails the entire list will be False.", "id": "f2788:c0:m3"} {"signature": "def _create_deployment(self, deployment):", "body": "api_response = self.kube_client.create_namespaced_deployment(body=deployment,namespace=self.namespace)logger.debug(\"\".format(str(api_response.status)))", "docstring": "Create the kubernetes deployment", "id": "f2788:c0:m6"} {"signature": "def _status(self):", "body": "jobs_ids = list(self.resources.keys())return jobs_ids", "docstring": "Internal: Do not call. Returns the status list for a list of job_ids\n Args:\n self\n Returns:\n [status...] : Status list of all jobs", "id": "f2788:c0:m4"} {"signature": "def _delete_deployment(self, deployment_name):", "body": "api_response = self.kube_client.delete_namespaced_deployment(name=deployment_name,namespace=self.namespace,body=client.V1DeleteOptions(propagation_policy='',grace_period_seconds=))logger.debug(\"\".format(str(api_response.status)))", "docstring": "Delete deployment", "id": "f2788:c0:m7"} {"signature": "def submit(self, command, blocksize, tasks_per_node, job_name=\"\"):", "body": "job_name = \"\".format(job_name, time.time())script_path = \"\".format(self.script_dir, job_name)script_path = os.path.abspath(script_path)wrap_command = self.worker_init + '' + self.launcher(command, tasks_per_node, self.nodes_per_block)self._write_submit_script(wrap_command, script_path)job_id = Noneproc = Noneremote_pid = Noneif (self.move_files is None and not isinstance(self.channel, LocalChannel)) or (self.move_files):logger.debug(\"\")script_path = self.channel.push_file(script_path, self.channel.script_dir)if not isinstance(self.channel, LocalChannel):logger.debug(\"\")cmd = ''.format(script_path)retcode, stdout, stderr = self.channel.execute_wait(cmd, self.cmd_timeout)for line in stdout.split(''):if line.startswith(\"\"):remote_pid = line.split(\"\")[].strip()job_id = remote_pidif job_id is None:logger.warning(\"\")else:try:job_id, proc = self.channel.execute_no_wait(''.format(script_path), self.cmd_timeout)except Exception as e:logger.debug(\"\".format(self.channel, e))raiseself.resources[job_id] = {'': job_id, '': '','': blocksize,'': remote_pid,'': proc}return job_id", "docstring": "Submits the command onto an Local Resource Manager job of blocksize parallel elements.\n Submit returns an ID that corresponds to the task that was just submitted.\n\n If tasks_per_node < 1:\n 1/tasks_per_node is provisioned\n\n If tasks_per_node == 1:\n A single node is provisioned\n\n If tasks_per_node > 1 :\n tasks_per_node * blocksize number of nodes are provisioned.\n\n Args:\n - command :(String) Commandline invocation to be made on the remote side.\n - blocksize :(float) - Not really used for local\n - tasks_per_node (int) : command invocations to be launched per node\n\n Kwargs:\n - job_name (String): Name for job, must be unique\n\n Returns:\n - None: At capacity, cannot provision more\n - job_id: (string) Identifier for the job", "id": "f2792:c0:m3"} {"signature": "def _write_submit_script(self, script_string, script_filename):", "body": "try:with open(script_filename, '') as f:f.write(script_string)except KeyError as e:logger.error(\"\", e)raise (SchedulerMissingArgs(e.args, self.label))except IOError as e:logger.error(\"\", script_filename)raise (ScriptPathError(script_filename, e))return True", "docstring": "Load the template string with config values and write the generated submit script to\na submit script file.\n\nArgs:\n - template_string (string) : The template string to be used for the writing submit script\n - script_filename (string) : Name of the submit script\n\nReturns:\n - True: on success\n\nRaises:\n SchedulerMissingArgs : If template is missing args\n ScriptPathError : Unable to write submit script out", "id": "f2792:c0:m2"} {"signature": "def cancel(self, job_ids):", "body": "for job in job_ids:logger.debug(\"\".format(job))if self.resources[job]['']:proc = self.resources[job]['']os.killpg(os.getpgid(proc.pid), signal.SIGTERM)self.resources[job][''] = ''elif self.resources[job]['']:cmd = \"\".format(self.resources[job][''])retcode, stdout, stderr = self.channel.execute_wait(cmd, self.cmd_timeout)if retcode != :logger.warning(\"\".format(self.resources[job][''],self.label))rets = [True for i in job_ids]return rets", "docstring": "Cancels the jobs specified by a list of job ids\n\n Args:\n job_ids : [ ...]\n\n Returns :\n [True/False...] : If the cancel operation fails the entire list will be False.", "id": "f2792:c0:m4"} {"signature": "def submit(self, command, blocksize, tasks_per_node, job_name=\"\"):", "body": "wrapped_cmd = self.launcher(command,tasks_per_node,)instance, name = self.create_instance(command=wrapped_cmd)self.provisioned_blocks += self.resources[name] = {\"\": name, \"\": translate_table[instance['']]}return name", "docstring": "The submit method takes the command string to be executed upon\n instantiation of a resource most often to start a pilot.\n\n Args :\n - command (str) : The bash command string to be executed.\n - blocksize (int) : Blocksize to be requested\n - tasks_per_node (int) : command invocations to be launched per node\n\n KWargs:\n - job_name (str) : Human friendly name to be assigned to the job request\n\n Returns:\n - A job identifier, this could be an integer, string etc\n\n Raises:\n - ExecutionProviderException or its subclasses", "id": "f2793:c0:m1"} {"signature": "@propertydef current_capacity(self):", "body": "return self.provisioned_blocks", "docstring": "Returns the number of currently provisioned blocks.", "id": "f2793:c0:m5"} {"signature": "def submit(self, command, blocksize, tasks_per_node, job_name=\"\"):", "body": "if self.provisioned_blocks >= self.max_blocks:logger.warn(\"\", self.label)return Noneif blocksize < self.nodes_per_block:blocksize = self.nodes_per_blockjob_name = \"\".format(job_name, time.time())script_path = \"\".format(self.script_dir, job_name)script_path = os.path.abspath(script_path)logger.debug(\"\", blocksize, self.nodes_per_block,tasks_per_node)job_config = {}job_config[\"\"] = self.channel.script_dirjob_config[\"\"] = self.nodes_per_blockjob_config[\"\"] = self.nodes_per_block * tasks_per_nodejob_config[\"\"] = self.nodes_per_blockjob_config[\"\"] = tasks_per_nodejob_config[\"\"] = self.walltimejob_config[\"\"] = self.scheduler_optionsjob_config[\"\"] = self.worker_initjob_config[\"\"] = commandjob_config[\"\"] = self.launcher(command,tasks_per_node,self.nodes_per_block)logger.debug(\"\")self._write_submit_script(template_string, script_path, job_name, job_config)channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)submit_options = ''if self.queue is not None:submit_options = ''.format(submit_options, self.queue)if self.account is not None:submit_options = ''.format(submit_options, self.account)launch_cmd = \"\".format(submit_options, channel_script_path)retcode, stdout, stderr = super().execute_wait(launch_cmd)job_id = Noneif retcode == :for line in stdout.split(''):if line.strip():job_id = line.strip()self.resources[job_id] = {'': job_id, '': '', '': blocksize}else:message = \"\".format(launch_cmd, retcode)if (stdout is not None) and (stderr is not None):message += \"\".format(stderr.strip(), stdout.strip())logger.error(message)return job_id", "docstring": "Submits the command onto an Local Resource Manager job of blocksize parallel elements.\n Submit returns an ID that corresponds to the task that was just submitted.\n\n If tasks_per_node < 1 : ! This is illegal. tasks_per_node should be integer\n\n If tasks_per_node == 1:\n A single node is provisioned\n\n If tasks_per_node > 1 :\n tasks_per_node * blocksize number of nodes are provisioned.\n\n Args:\n - command :(String) Commandline invocation to be made on the remote side.\n - blocksize :(float)\n - tasks_per_node (int) : command invocations to be launched per node\n\n Kwargs:\n - job_name (String): Name for job, must be unique\n\n Returns:\n - None: At capacity, cannot provision more\n - job_id: (string) Identifier for the job", "id": "f2795:c0:m2"} {"signature": "def _status(self):", "body": "job_id_list = ''.join(self.resources.keys())jobs_missing = list(self.resources.keys())retcode, stdout, stderr = super().execute_wait(\"\".format(job_id_list))for line in stdout.split(''):parts = line.split()if not parts or parts[].upper().startswith('') or parts[].startswith(''):continuejob_id = parts[]status = translate_table.get(parts[], '')self.resources[job_id][''] = statusjobs_missing.remove(job_id)for missing_job in jobs_missing:if self.resources[missing_job][''] in ['', '']:self.resources[missing_job][''] = translate_table['']", "docstring": "Internal: Do not call. Returns the status list for a list of job_ids\n\n Args:\n self\n\n Returns:\n [status...] : Status list of all jobs", "id": "f2795:c0:m1"} {"signature": "def scale_in(self, blocks=, machines=, strategy=None):", "body": "count = instances = self.client.servers.list()for instance in instances[:machines]:print(\"\", instance)instance.delete()count += return count", "docstring": "Scale in resources", "id": "f2798:c0:m2"} {"signature": "def scale_out(self, blocks=, block_size=):", "body": "self.config[''.format(self.pool)]['']count = if blocks == :block_id = len(self.blocks)self.blocks[block_id] = []for instance_id in range(, block_size):instances = self.server_manager.create(''.format(block_id, instance_id), self.client.images.get(''), self.client.flavors.list()[],min_count=,max_count=,userdata=setup_script.format(engine_config=self.engine_config),key_name='',security_groups=[''],nics=[{\"\": '',\"\": '',\"\": ''}])self.blocks[block_id].extend([instances])count += return count", "docstring": "Scale out the existing resources.", "id": "f2798:c0:m1"} {"signature": "@abstractmethoddef cancel(self, job_ids):", "body": "pass", "docstring": "Cancels the resources identified by the job_ids provided by the user.\n\n Args:\n - job_ids (list): A list of job identifiers\n\n Returns:\n - A list of status from cancelling the job which can be True, False\n\n Raises:\n - ExecutionProviderException or its subclasses", "id": "f2799:c0:m2"} {"signature": "def _write_submit_script(self, template, script_filename, job_name, configs):", "body": "try:submit_script = Template(template).substitute(jobname=job_name, **configs)with open(script_filename, '') as f:f.write(submit_script)except KeyError as e:logger.error(\"\", e)raise (SchedulerMissingArgs(e.args, self.sitename))except IOError as e:logger.error(\"\", script_filename)raise (ScriptPathError(script_filename, e))except Exception as e:print(\"\", template)print(\"\", job_name)print(\"\", configs)logger.error(\"\", e)raise (e)return True", "docstring": "Generate submit script and write it to a file.\n\n Args:\n - template (string) : The template string to be used for the writing submit script\n - script_filename (string) : Name of the submit script\n - job_name (string) : job name\n - configs (dict) : configs that get pushed into the template\n\n Returns:\n - True: on success\n\n Raises:\n SchedulerMissingArgs : If template is missing args\n ScriptPathError : Unable to write submit script out", "id": "f2800:c0:m2"} {"signature": "@propertydef scaling_enabled(self):", "body": "return self._scaling_enabled", "docstring": "The callers of ParslExecutors need to differentiate between Executors\n and Executors wrapped in a resource provider\n\n Returns:\n - Status (Bool)", "id": "f2800:c0:m7"} {"signature": "def cancel(self, job_ids):", "body": "job_id_list = ''.join(job_ids)retcode, stdout, stderr = super().execute_wait(\"\".format(job_id_list))rets = Noneif retcode == :for jid in job_ids:self.resources[jid][''] = translate_table[''] rets = [True for i in job_ids]else:rets = [False for i in job_ids]return rets", "docstring": "Cancels the jobs specified by a list of job ids\n\n Args:\n job_ids : [ ...]\n\n Returns :\n [True/False...] : If the cancel operation fails the entire list will be False.", "id": "f2802:c0:m3"} {"signature": "def submit(self, command, blocksize, tasks_per_node, job_name=\"\"):", "body": "if self.provisioned_blocks >= self.max_blocks:logger.warn(\"\", self.label)return Noneif blocksize < self.nodes_per_block:blocksize = self.nodes_per_blockaccount_opt = ''.format(self.account) if self.account is not None else ''job_name = \"\".format(job_name, time.time())script_path = \"\".format(self.script_dir, job_name)script_path = os.path.abspath(script_path)job_config = {}job_config[\"\"] = self.scheduler_optionsjob_config[\"\"] = self.worker_initlogger.debug(\"\",blocksize, self.nodes_per_block, tasks_per_node)job_config[\"\"] = self.launcher(command, tasks_per_node, self.nodes_per_block)queue_opt = ''.format(self.queue) if self.queue is not None else ''logger.debug(\"\")self._write_submit_script(template_string, script_path, job_name, job_config)channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)command = ''.format(self.nodes_per_block, queue_opt, wtime_to_minutes(self.walltime), account_opt, channel_script_path)logger.debug(\"\".format(command))retcode, stdout, stderr = super().execute_wait(command)if retcode != :logger.error(\"\".format(command))logger.error(\"\".format(stdout, stderr))logger.debug(\"\", retcode, stdout.strip(), stderr.strip())job_id = Noneif retcode == :job_id = stdout.strip()self.resources[job_id] = {'': job_id, '': '', '': blocksize}else:logger.error(\"\".format(stderr))raise (ScaleOutFailed(self.__class__, \"\"))logger.debug(\"\".format(job_id))return job_id", "docstring": "Submits the command onto an Local Resource Manager job of blocksize parallel elements.\n Submit returns an ID that corresponds to the task that was just submitted.\n\n If tasks_per_node < 1 : ! This is illegal. tasks_per_node should be integer\n\n If tasks_per_node == 1:\n A single node is provisioned\n\n If tasks_per_node > 1 :\n tasks_per_node * blocksize number of nodes are provisioned.\n\n Args:\n - command :(String) Commandline invocation to be made on the remote side.\n - blocksize :(float)\n - tasks_per_node (int) : command invocations to be launched per node\n\n Kwargs:\n - job_name (String): Name for job, must be unique\n\n Returns:\n - None: At capacity, cannot provision more\n - job_id: (string) Identifier for the job", "id": "f2802:c0:m2"} {"signature": "def scale_out(self, workers=):", "body": "raise NotImplementedError", "docstring": "Scales out the number of active workers by 1.\n\n This method is notImplemented for threads and will raise the error if called.\n\n Raises:\n NotImplemented exception", "id": "f2803:c0:m4"} {"signature": "def submit(self, *args, **kwargs):", "body": "return self.executor.submit(*args, **kwargs)", "docstring": "Submits work to the thread pool.\n\n This method is simply pass through and behaves like a submit call as described\n here `Python docs: `_", "id": "f2803:c0:m3"} {"signature": "def shutdown(self, block=False):", "body": "x = self.executor.shutdown(wait=block)logger.debug(\"\")return x", "docstring": "Shutdown the ThreadPool.\n\n Kwargs:\n - block (Bool): To block for confirmations or not", "id": "f2803:c0:m6"} {"signature": "def close(self):", "body": "if self.reuse:logger.debug(\"\")returnif self.mode == \"\":logger.debug(\"\")returntry:pgid = os.getpgid(self.proc.pid)os.killpg(pgid, signal.SIGTERM)time.sleep()os.killpg(pgid, signal.SIGKILL)try:self.proc.wait(timeout=)x = self.proc.returncodeif x == :logger.debug(\"\".format(x))else:logger.error(\"\".format(x))except subprocess.TimeoutExpired:logger.warn(\"\".format(self.proc.pid))except Exception as e:logger.warn(\"\".format(self.proc.pid, e))", "docstring": "Terminate the controller process and its child processes.\n\n Args:\n - None", "id": "f2805:c0:m4"} {"signature": "@propertydef engine_file(self):", "body": "return os.path.join(self.ipython_dir,''.format(self.profile),'')", "docstring": "Specify path to the ipcontroller-engine.json file.\n\n This file is stored in in the ipython_dir/profile folders.\n\n Returns :\n - str, File path to engine file", "id": "f2805:c0:m2"} {"signature": "@propertydef client_file(self):", "body": "return os.path.join(self.ipython_dir,''.format(self.profile),'')", "docstring": "Specify path to the ipcontroller-client.json file.\n\n This file is stored in in the ipython_dir/profile folders.\n\n Returns :\n - str, File path to client file", "id": "f2805:c0:m3"} {"signature": "def start(self):", "body": "if self.mode == \"\":returnif self.ipython_dir != '':self.ipython_dir = os.path.abspath(os.path.expanduser(self.ipython_dir))if self.log:stdout = open(os.path.join(self.ipython_dir, \"\".format(self.profile)), '')stderr = open(os.path.join(self.ipython_dir, \"\".format(self.profile)), '')else:stdout = open(os.devnull, '')stderr = open(os.devnull, '')try:opts = ['','' if self.ipython_dir == '' else ''.format(self.ipython_dir),self.interfaces if self.interfaces is not None else '','' if self.profile == '' else ''.format(self.profile),'' if self.reuse else '',''.format(self.public_ip) if self.public_ip else '',''.format(self.port) if self.port is not None else '']if self.port_range is not None:opts += [''.format(self.hb_ping, self.hb_pong),''.format(self.control_client, self.control_engine),''.format(self.mux_client, self.mux_engine),''.format(self.task_client, self.task_engine)]logger.debug(\"\".format(''.join([str(x) for x in opts])))self.proc = subprocess.Popen(opts, stdout=stdout, stderr=stderr, preexec_fn=os.setsid)except FileNotFoundError:msg = \"\"logger.error(msg)raise ControllerError(msg)except Exception as e:msg = \"\".format(e)logger.error(msg)raise ControllerError(msg)", "docstring": "Start the controller.", "id": "f2805:c0:m1"} {"signature": "def __init__(self, ip_address, port_range):", "body": "self.context = zmq.Context()self.zmq_socket = self.context.socket(zmq.DEALER)self.zmq_socket.set_hwm()self.port = self.zmq_socket.bind_to_random_port(\"\".format(ip_address),min_port=port_range[],max_port=port_range[])", "docstring": "TODO: docstring", "id": "f2806:c1:m0"} {"signature": "def execute_task(f, args, kwargs, user_ns):", "body": "fname = getattr(f, '', '')prefix = \"\"fname = prefix + \"\"argname = prefix + \"\"kwargname = prefix + \"\"resultname = prefix + \"\"user_ns.update({fname: f,argname: args,kwargname: kwargs,resultname: resultname})code = \"\".format(resultname, fname,argname, kwargname)try:exec(code, user_ns, user_ns)except Exception as e:logger.warning(\"\".format(e))raise eelse:return user_ns.get(resultname)", "docstring": "Deserialize the buffer and execute the task.\n\n# Returns the result or exception.", "id": "f2807:m0"} {"signature": "def start(self):", "body": "logger.info(\"\")while True:socks = dict(self.poller.poll())if socks.get(self.task_incoming) == zmq.POLLIN:message = self.task_incoming.recv_multipart()logger.debug(\"\")self.worker_messages.send_multipart(message)logger.debug(\"\")if socks.get(self.worker_messages) == zmq.POLLIN:message = self.worker_messages.recv_multipart()logger.debug(\"\")self.result_outgoing.send_multipart(message[:])logger.debug(\"\")", "docstring": "TODO: docstring", "id": "f2808:c0:m1"} {"signature": "def starter(comm_q, *args, **kwargs):", "body": "ic = Interchange(*args, **kwargs)comm_q.put(ic.worker_port)ic.start()logger.debug(\"\")", "docstring": "Start the interchange process\n\n The executor is expected to call this function. The args, kwargs match that of the Interchange.__init__", "id": "f2808:m1"} {"signature": "def status(self):", "body": "status = []if self.provider:status = self.provider.status(self.blocks)return status", "docstring": "Return status of all blocks.", "id": "f2809:c0:m9"} {"signature": "def scale_out(self, blocks=):", "body": "r = []for i in range(blocks):if self.provider:block = self.provider.submit(self.launch_cmd, , self.workers_per_node)logger.debug(\"\".format(i, block))if not block:raise(ScalingFailed(self.provider.label,\"\"))self.blocks.extend([block])else:logger.error(\"\")r = Nonereturn r", "docstring": "Scales out the number of active workers by the number of blocks specified.\n\n Parameters\n ----------\n\n blocks : int\n # of blocks to scale out. Default=1\n\n Raises:\n NotImplementedError", "id": "f2809:c0:m7"} {"signature": "def _queue_management_worker(self):", "body": "logger.debug(\"\")while True:task_id, buf = self.incoming_q.get() msg = deserialize_object(buf)[]task_fut = self.tasks[task_id]logger.debug(\"\".format(task_id))if \"\" in msg:task_fut.set_result(msg[\"\"])elif \"\" in msg:passelif '' in msg:logger.warning(\"\")try:s, _ = deserialize_object(msg[''])exception = ValueError(\"\".format(s))task_fut.set_exception(exception)except Exception as e:task_fut.set_exception(DeserializationError(\"\".format(e)))else:raise BadMessage(\"\")if not self.is_alive:breaklogger.info(\"\")", "docstring": "TODO: docstring", "id": "f2809:c0:m4"} {"signature": "def start(self):", "body": "self.outgoing_q = zmq_pipes.TasksOutgoing(\"\", self.interchange_port_range)self.incoming_q = zmq_pipes.ResultsIncoming(\"\", self.interchange_port_range)self.is_alive = Trueself._queue_management_thread = Noneself._start_queue_management_thread()self._start_local_queue_process()logger.debug(\"\".format(self._queue_management_thread))if self.provider:l_cmd = self.launch_cmd.format( task_url=self.worker_task_url,workers_per_node=self.workers_per_node,logdir=\"\".format(self.run_dir, self.label))self.launch_cmd = l_cmdlogger.debug(\"\".format(self.launch_cmd))self._scaling_enabled = self.provider.scaling_enabledlogger.debug(\"\", self.provider)if hasattr(self.provider, ''):try:for i in range(self.provider.init_blocks):block = self.provider.submit(self.launch_cmd, , self.workers_per_node)logger.debug(\"\".format(i, block))if not block:raise(ScalingFailed(self.provider.label,\"\"))self.blocks.extend([block])except Exception as e:logger.error(\"\".format(e))raise eelse:self._scaling_enabled = Falselogger.debug(\"\")", "docstring": "Create the Interchange process and connect to it.", "id": "f2809:c0:m1"} {"signature": "def scale_in(self, blocks):", "body": "to_kill = self.blocks[:blocks]if self.provider:r = self.provider.cancel(to_kill)return r", "docstring": "Scale in the number of active blocks by specified amount.\n\n The scale in method here is very rude. It doesn't give the workers\n the opportunity to finish current tasks or cleanup. This is tracked\n in issue #530\n\n Raises:\n NotImplementedError", "id": "f2809:c0:m8"} {"signature": "@propertydef run_dir(self):", "body": "return self._run_dir", "docstring": "Path to the run directory.", "id": "f2810:c0:m6"} {"signature": "@abstractmethoddef submit(self, *args, **kwargs):", "body": "pass", "docstring": "Submit.\n\n We haven't yet decided on what the args to this can be,\n whether it should just be func, args, kwargs or be the partially evaluated\n fn", "id": "f2810:c0:m1"} {"signature": "@abstractmethoddef start(self, *args, **kwargs):", "body": "pass", "docstring": "Start the executor.\n\n Any spin-up operations (for example: starting thread pools) should be performed here.", "id": "f2810:c0:m0"} {"signature": "@abstractpropertydef scaling_enabled(self):", "body": "pass", "docstring": "Specify if scaling is enabled.\n\n The callers of ParslExecutors need to differentiate between Executors\n and Executors wrapped in a resource provider", "id": "f2810:c0:m5"} {"signature": "def submit(self, func, *args, **kwargs):", "body": "task_id = uuid.uuid4()logger.debug(\"\".format(func, args))self.tasks[task_id] = Future()fn_buf = pack_apply_message(func, args, kwargs,buffer_threshold= * ,item_threshold=)msg = {\"\": task_id,\"\": fn_buf}self.outgoing_q.put(msg)return self.tasks[task_id]", "docstring": "Submits work to the the outgoing_q.\n\n The outgoing_q is an external process listens on this\n queue for new work. This method is simply pass through and behaves like a\n submit call as described here `Python docs: `_\n\n Args:\n - func (callable) : Callable function\n - *args (list) : List of arbitrary positional arguments.\n\n Kwargs:\n - **kwargs (dict) : A dictionary of arbitrary keyword args for func.\n\n Returns:\n Future", "id": "f2812:c0:m6"} {"signature": "def runner(incoming_q, outgoing_q):", "body": "logger.debug(\"\")def execute_task(bufs):\"\"\"\"\"\"user_ns = locals()user_ns.update({'': __builtins__})f, args, kwargs = unpack_apply_message(bufs, user_ns, copy=False)fname = getattr(f, '', '')prefix = \"\"fname = prefix + \"\"argname = prefix + \"\"kwargname = prefix + \"\"resultname = prefix + \"\"user_ns.update({fname: f,argname: args,kwargname: kwargs,resultname: resultname})code = \"\".format(resultname, fname,argname, kwargname)try:logger.debug(\"\".format(code))exec(code, user_ns, user_ns)except Exception as e:logger.warning(\"\".format(e))raise eelse:logger.debug(\"\".format(user_ns.get(resultname)))return user_ns.get(resultname)while True:try:msg = incoming_q.get(block=True, timeout=)except queue.Empty:logger.debug(\"\")except IOError as e:logger.debug(\"\".format(e))try:outgoing_q.put(None)except Exception:passbreakexcept Exception as e:logger.debug(\"\".format(e))else:if not msg:logger.debug(\"\")outgoing_q.put(None)breakelse:logger.debug(\"\".format(msg[\"\"]))try:response_obj = execute_task(msg[''])response = {\"\": msg[\"\"],\"\": serialize_object(response_obj)}logger.debug(\"\".format(deserialize_object(response[\"\"])))except Exception as e:logger.debug(\"\".format(e))response = {\"\": msg[\"\"],\"\": serialize_object(e)}outgoing_q.put(response)logger.debug(\"\")", "docstring": "This is a function that mocks the Swift-T side.\n\n It listens on the the incoming_q for tasks and posts returns on the outgoing_q.\n\n Args:\n - incoming_q (Queue object) : The queue to listen on\n - outgoing_q (Queue object) : Queue to post results on\n\n The messages posted on the incoming_q will be of the form :\n\n .. code:: python\n\n {\n \"task_id\" : ,\n \"buffer\" : serialized buffer containing the fn, args and kwargs\n }\n\n If ``None`` is received, the runner will exit.\n\n Response messages should be of the form:\n\n .. code:: python\n\n {\n \"task_id\" : ,\n \"result\" : serialized buffer containing result\n \"exception\" : serialized exception object\n }\n\n On exiting the runner will post ``None`` to the outgoing_q", "id": "f2812:m0"} {"signature": "def _start_queue_management_thread(self):", "body": "logging.debug(\"\", \"\" * )if self._queue_management_thread is None:logging.debug(\"\")self._queue_management_thread = threading.Thread(target=self._queue_management_worker)self._queue_management_thread.daemon = Trueself._queue_management_thread.start()else:logging.debug(\"\")", "docstring": "Method to start the management thread as a daemon.\n\n Checks if a thread already exists, then starts it.\n Could be used later as a restart if the management thread dies.", "id": "f2812:c0:m4"} {"signature": "def scale_in(self, workers):", "body": "raise NotImplementedError", "docstring": "Scale in the number of active blocks by specified amount.\n\n This method is not implemented for turbine and will raise an error if called.\n\n Raises:\n NotImplementedError", "id": "f2812:c0:m9"} {"signature": "def __init__(self, label='', storage_access=None, working_dir=None, managed=True):", "body": "logger.debug(\"\")self.label = labelself.storage_access = storage_access if storage_access is not None else []if len(self.storage_access) > :raise ConfigurationError('')self.working_dir = working_dirself.managed = managed", "docstring": "Initialize the thread pool.\n\n Trying to implement the emews model.", "id": "f2812:c0:m0"} {"signature": "def scale_out(self, blocks=):", "body": "r = []for i in range(blocks):if self.provider:block = self.provider.submit(self.launch_cmd, , self.workers_per_node)logger.debug(\"\".format(i, block))if not block:raise(ScalingFailed(self.provider.label,\"\"))self.engines.extend([block])r.extend([block])else:logger.error(\"\")r = Nonereturn r", "docstring": "Scales out the number of active workers by 1.\n\n This method is notImplemented for threads and will raise the error if called.\n\n Parameters:\n blocks : int\n Number of blocks to be provisioned.", "id": "f2813:c0:m8"} {"signature": "def submit(self, *args, **kwargs):", "body": "return self.lb_view.apply_async(*args, **kwargs)", "docstring": "Submits work to the thread pool.\n\n This method is simply pass through and behaves like a submit call as described\n here `Python docs: `_\n\n Returns:\n Future", "id": "f2813:c0:m7"} {"signature": "def scale_in(self, blocks):", "body": "status = dict(zip(self.engines, self.provider.status(self.engines)))to_kill = [engine for engine in status if status[engine] == \"\"][:blocks]if self.provider:r = self.provider.cancel(to_kill)else:logger.error(\"\")r = Nonereturn r", "docstring": "Scale in the number of active blocks by the specified number.", "id": "f2813:c0:m9"} {"signature": "def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):", "body": "buffers = []if istype(obj, sequence_types) and len(obj) < item_threshold:cobj = can_sequence(obj)for c in cobj:buffers.extend(_extract_buffers(c, buffer_threshold))elif istype(obj, dict) and len(obj) < item_threshold:cobj = {}for k in sorted(obj):c = can(obj[k])buffers.extend(_extract_buffers(c, buffer_threshold))cobj[k] = celse:cobj = can(obj)buffers.extend(_extract_buffers(cobj, buffer_threshold))buffers.insert(, pickle.dumps(cobj, PICKLE_PROTOCOL))return buffers", "docstring": "Serialize an object into a list of sendable buffers.\n\n Parameters\n ----------\n\n obj : object\n The object to be serialized\n buffer_threshold : int\n The threshold (in bytes) for pulling out data buffers\n to avoid pickling them.\n item_threshold : int\n The maximum number of items over which canning will iterate.\n Containers (lists, dicts) larger than this will be pickled without\n introspection.\n\n Returns\n -------\n [bufs] : list of buffers representing the serialized object.", "id": "f2814:m3"} {"signature": "def _nbytes(buf):", "body": "if isinstance(buf, memoryview):if PY3:return buf.nbyteselse:size = buf.itemsizefor dim in buf.shape:size *= dimreturn sizeelse:return len(buf)", "docstring": "Return byte-size of a memoryview or buffer.", "id": "f2814:m0"} {"signature": "def _extract_buffers(obj, threshold=MAX_BYTES):", "body": "buffers = []if isinstance(obj, CannedObject) and obj.buffers:for i, buf in enumerate(obj.buffers):nbytes = _nbytes(buf)if nbytes > threshold:obj.buffers[i] = Nonebuffers.append(buf)elif isinstance(buf, memoryview):obj.buffers[i] = buf.tobytes()elif isinstance(buf, buffer):obj.buffers[i] = bytes(buf)return buffers", "docstring": "Extract buffers larger than a certain threshold.", "id": "f2814:m1"} {"signature": "def unpack_apply_message(bufs, g=None, copy=True):", "body": "bufs = list(bufs) assert len(bufs) >= , \"\"pf = buffer_to_bytes_py2(bufs.pop())f = uncan(pickle.loads(pf), g)pinfo = buffer_to_bytes_py2(bufs.pop())info = pickle.loads(pinfo)arg_bufs, kwarg_bufs = bufs[:info['']], bufs[info['']:]args = []for i in range(info['']):arg, arg_bufs = deserialize_object(arg_bufs, g)args.append(arg)args = tuple(args)assert not arg_bufs, \"\"kwargs = {}for key in info['']:kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)kwargs[key] = kwargassert not kwarg_bufs, \"\"return f, args, kwargs", "docstring": "Unpack f,args,kwargs from buffers packed by pack_apply_message().\n\n Returns: original f,args,kwargs", "id": "f2814:m6"} {"signature": "def _restore_buffers(obj, buffers):", "body": "if isinstance(obj, CannedObject) and obj.buffers:for i, buf in enumerate(obj.buffers):if buf is None:obj.buffers[i] = buffers.pop()", "docstring": "Restore extracted buffers.", "id": "f2814:m2"} {"signature": "def use_pickle():", "body": "from . import serializeserialize.pickle = serialize._stdlib_picklecan_map[FunctionType] = _original_can_map[FunctionType]", "docstring": "Revert to using stdlib pickle.\n\n Reverts custom serialization enabled by use_dill|cloudpickle.", "id": "f2817:m4"} {"signature": "def _import_mapping(mapping, original=None):", "body": "for key, value in list(mapping.items()):if isinstance(key, string_types):try:cls = import_item(key)except Exception:if original and key not in original:print(\"\", key, exc_info=True)mapping.pop(key)else:mapping[cls] = mapping.pop(key)", "docstring": "Import any string-keys in a type mapping.", "id": "f2817:m5"} {"signature": "def __init__(self, obj, keys=[], hook=None):", "body": "self.keys = keysself.obj = copy.copy(obj)self.hook = can(hook)for key in keys:setattr(self.obj, key, can(getattr(obj, key)))self.buffers = []", "docstring": "Can an object for safe pickling.\n\n Parameters\n ==========\n\n obj:\n The object to be canned\n keys: list (optional)\n list of attribute names that will be explicitly canned / uncanned\n hook: callable (optional)\n An optional extra callable,\n which can do additional processing of the uncanned object.\n\n Large data may be offloaded into the buffers list,\n used for zero-copy transfers.", "id": "f2817:c0:m0"} {"signature": "def _get_cell_type(a=None):", "body": "def inner():return areturn type(py3compat.get_closure(inner)[])", "docstring": "The type of a closure cell doesn't seem to be importable, so just create one.", "id": "f2817:m0"} {"signature": "def istype(obj, check):", "body": "if isinstance(check, tuple):for cls in check:if type(obj) is cls:return Truereturn Falseelse:return type(obj) is check", "docstring": "Like isinstance(obj, check), but strict.\n\n This won't catch subclasses.", "id": "f2817:m6"} {"signature": "def __init__(self, ip_address, port_range):", "body": "self.context = zmq.Context()self.zmq_socket = self.context.socket(zmq.DEALER)self.zmq_socket.set_hwm()self.port = self.zmq_socket.bind_to_random_port(\"\".format(ip_address),min_port=port_range[],max_port=port_range[])self.poller = zmq.Poller()self.poller.register(self.zmq_socket, zmq.POLLOUT)", "docstring": "Parameters\n----------\n\nip_address: str\n IP address of the client (where Parsl runs)\nport_range: tuple(int, int)\n Port range for the comms between client and interchange", "id": "f2819:c1:m0"} {"signature": "def run(self, message):", "body": "self.zmq_socket.send_pyobj(message, copy=True)reply = self.zmq_socket.recv_pyobj()return reply", "docstring": "This function needs to be fast at the same time aware of the possibility of\n ZMQ pipes overflowing.\n\n The timeout increases slowly if contention is detected on ZMQ pipes.\n We could set copy=False and get slightly better latency but this results\n in ZMQ sockets reaching a broken state once there are ~10k tasks in flight.\n This issue can be magnified if each the serialized buffer itself is larger.", "id": "f2819:c0:m1"} {"signature": "def create_reg_message(self):", "body": "msg = {'': PARSL_VERSION,'': \"\".format(sys.version_info.major,sys.version_info.minor,sys.version_info.micro),'': self.worker_count,'': self.block_id,'': self.prefetch_capacity,'': self.worker_count + self.prefetch_capacity,'': platform.system(),'': platform.node(),'': os.getcwd(),}b_msg = json.dumps(msg).encode('')return b_msg", "docstring": "Creates a registration message to identify the worker to the interchange", "id": "f2820:c0:m1"} {"signature": "def worker(worker_id, pool_id, task_queue, result_queue, worker_queue):", "body": "start_file_logger(''.format(args.logdir, pool_id, worker_id),worker_id,name=\"\",level=logging.DEBUG if args.debug else logging.INFO)logger.info(''.format(worker_id))if args.debug:logger.debug(\"\")while True:worker_queue.put(worker_id)req = task_queue.get()tid = req['']logger.info(\"\".format(tid))try:worker_queue.get()except queue.Empty:logger.warning(\"\".format(worker_id))passtry:result = execute_task(req[''])serialized_result = serialize_object(result)except Exception:result_package = {'': tid, '': serialize_object(RemoteExceptionWrapper(*sys.exc_info()))}else:result_package = {'': tid, '': serialized_result}logger.info(\"\".format(tid))pkl_package = pickle.dumps(result_package)result_queue.put(pkl_package)", "docstring": "Put request token into queue\nGet task from task_queue\nPop request from queue\nPut result into result_queue", "id": "f2820:m1"} {"signature": "def push_results(self, kill_event):", "body": "logger.debug(\"\")push_poll_period = max(, self.poll_period) / logger.debug(\"\".format(push_poll_period))last_beat = time.time()items = []while not kill_event.is_set():try:r = self.pending_result_queue.get(block=True, timeout=push_poll_period)items.append(r)except queue.Empty:passexcept Exception as e:logger.exception(\"\".format(e))if len(items) >= self.max_queue_size or time.time() > last_beat + push_poll_period:last_beat = time.time()if items:self.result_outgoing.send_multipart(items)items = []logger.critical(\"\")", "docstring": "Listens on the pending_result_queue and sends out results via 0mq\n\n Parameters:\n -----------\n kill_event : threading.Event\n Event to let the thread know when it is time to die.", "id": "f2820:c0:m4"} {"signature": "def __init__(self,task_q_url=\"\",result_q_url=\"\",cores_per_worker=,max_workers=float(''),prefetch_capacity=,uid=None,block_id=None,heartbeat_threshold=,heartbeat_period=,poll_period=):", "body": "logger.info(\"\")self.context = zmq.Context()self.task_incoming = self.context.socket(zmq.DEALER)self.task_incoming.setsockopt(zmq.IDENTITY, uid.encode(''))self.task_incoming.setsockopt(zmq.LINGER, )self.task_incoming.connect(task_q_url)self.result_outgoing = self.context.socket(zmq.DEALER)self.result_outgoing.setsockopt(zmq.IDENTITY, uid.encode(''))self.result_outgoing.setsockopt(zmq.LINGER, )self.result_outgoing.connect(result_q_url)logger.info(\"\")self.uid = uidself.block_id = block_idcores_on_node = multiprocessing.cpu_count()self.max_workers = max_workersself.prefetch_capacity = prefetch_capacityself.worker_count = min(max_workers,math.floor(cores_on_node / cores_per_worker))logger.info(\"\".format(self.worker_count))self.pending_task_queue = multiprocessing.Queue()self.pending_result_queue = multiprocessing.Queue()self.ready_worker_queue = multiprocessing.Queue()self.max_queue_size = self.prefetch_capacity + self.worker_countself.tasks_per_round = self.heartbeat_period = heartbeat_periodself.heartbeat_threshold = heartbeat_thresholdself.poll_period = poll_period", "docstring": "Parameters\n----------\nworker_url : str\n Worker url on which workers will attempt to connect back\n\nuid : str\n string unique identifier\n\nblock_id : str\n Block identifier that maps managers to the provider blocks they belong to.\n\ncores_per_worker : float\n cores to be assigned to each worker. Oversubscription is possible\n by setting cores_per_worker < 1.0. Default=1\n\nmax_workers : int\n caps the maximum number of workers that can be launched.\n default: infinity\n\nprefetch_capacity : int\n Number of tasks that could be prefetched over available worker capacity.\n When there are a few tasks (<100) or when tasks are long running, this option should\n be set to 0 for better load balancing. Default is 0.\n\nheartbeat_threshold : int\n Seconds since the last message from the interchange after which the\n interchange is assumed to be un-available, and the manager initiates shutdown. Default:120s\n\n Number of seconds since the last message from the interchange after which the worker\n assumes that the interchange is lost and the manager shuts down. Default:120\n\nheartbeat_period : int\n Number of seconds after which a heartbeat message is sent to the interchange\n\npoll_period : int\n Timeout period used by the manager in milliseconds. Default: 10ms", "id": "f2820:c0:m0"} {"signature": "def set_stream_logger(name='', level=logging.DEBUG, format_string=None):", "body": "if format_string is None:format_string = \"\"global loggerlogger = logging.getLogger(name)logger.setLevel(logging.DEBUG)handler = logging.StreamHandler()handler.setLevel(level)formatter = logging.Formatter(format_string, datefmt='')handler.setFormatter(formatter)logger.addHandler(handler)", "docstring": "Add a stream log handler.\n\n Args:\n - name (string) : Set the logger name.\n - level (logging.LEVEL) : Set to logging.DEBUG by default.\n - format_string (sting) : Set to None by default.\n\n Returns:\n - None", "id": "f2820:m3"} {"signature": "def start_file_logger(filename, rank, name='', level=logging.DEBUG, format_string=None):", "body": "if format_string is None:format_string = \"\".format(rank)global loggerlogger = logging.getLogger(name)logger.setLevel(logging.DEBUG)handler = logging.FileHandler(filename)handler.setLevel(level)formatter = logging.Formatter(format_string, datefmt='')handler.setFormatter(formatter)logger.addHandler(handler)", "docstring": "Add a stream log handler.\n\n Args:\n - filename (string): Name of the file to write logs to\n - name (string): Logger name\n - level (logging.LEVEL): Set the logging level.\n - format_string (string): Set the format string\n\n Returns:\n - None", "id": "f2820:m2"} {"signature": "def __init__(self,client_address=\"\",interchange_address=\"\",client_ports=(, , ),worker_ports=None,worker_port_range=(, ),heartbeat_threshold=,logdir=\"\",logging_level=logging.INFO,poll_period=,suppress_failure=False,):", "body": "self.logdir = logdirtry:os.makedirs(self.logdir)except FileExistsError:passstart_file_logger(\"\".format(self.logdir), level=logging_level)logger.debug(\"\")self.client_address = client_addressself.interchange_address = interchange_addressself.suppress_failure = suppress_failureself.poll_period = poll_periodlogger.info(\"\".format(client_address, client_ports[], client_ports[], client_ports[]))self.context = zmq.Context()self.task_incoming = self.context.socket(zmq.DEALER)self.task_incoming.set_hwm()self.task_incoming.RCVTIMEO = self.task_incoming.connect(\"\".format(client_address, client_ports[]))self.results_outgoing = self.context.socket(zmq.DEALER)self.results_outgoing.set_hwm()self.results_outgoing.connect(\"\".format(client_address, client_ports[]))self.command_channel = self.context.socket(zmq.REP)self.command_channel.RCVTIMEO = self.command_channel.connect(\"\".format(client_address, client_ports[]))logger.info(\"\")self.pending_task_queue = queue.Queue(maxsize= ** )self.worker_ports = worker_portsself.worker_port_range = worker_port_rangeself.task_outgoing = self.context.socket(zmq.ROUTER)self.task_outgoing.set_hwm()self.results_incoming = self.context.socket(zmq.ROUTER)self.results_incoming.set_hwm()if self.worker_ports:self.worker_task_port = self.worker_ports[]self.worker_result_port = self.worker_ports[]self.task_outgoing.bind(\"\".format(self.worker_task_port))self.results_incoming.bind(\"\".format(self.worker_result_port))else:self.worker_task_port = self.task_outgoing.bind_to_random_port('',min_port=worker_port_range[],max_port=worker_port_range[], max_tries=)self.worker_result_port = self.results_incoming.bind_to_random_port('',min_port=worker_port_range[],max_port=worker_port_range[], max_tries=)logger.info(\"\".format(self.worker_task_port, self.worker_result_port))self._ready_manager_queue = {}self.heartbeat_threshold = heartbeat_thresholdself.current_platform = {'': PARSL_VERSION,'': \"\".format(sys.version_info.major,sys.version_info.minor,sys.version_info.micro),'': platform.system(),'': platform.node(),'': os.getcwd()}logger.info(\"\".format(self.current_platform))", "docstring": "Parameters\n----------\nclient_address : str\n The ip address at which the parsl client can be reached. Default: \"127.0.0.1\"\n\ninterchange_address : str\n The ip address at which the workers will be able to reach the Interchange. Default: \"127.0.0.1\"\n\nclient_ports : triple(int, int, int)\n The ports at which the client can be reached\n\nworker_ports : tuple(int, int)\n The specific two ports at which workers will connect to the Interchange. Default: None\n\nworker_port_range : tuple(int, int)\n The interchange picks ports at random from the range which will be used by workers.\n This is overridden when the worker_ports option is set. Defauls: (54000, 55000)\n\nheartbeat_threshold : int\n Number of seconds since the last heartbeat after which worker is considered lost.\n\nlogdir : str\n Parsl log directory paths. Logs and temp files go here. Default: '.'\n\nlogging_level : int\n Logging level as defined in the logging module. Default: logging.INFO (20)\n\npoll_period : int\n The main thread polling period, in milliseconds. Default: 10ms\n\nsuppress_failure : Bool\n When set to True, the interchange will attempt to suppress failures. Default: False", "id": "f2821:c3:m0"} {"signature": "def start(self, poll_period=None):", "body": "logger.info(\"\")if poll_period is None:poll_period = self.poll_periodstart = time.time()count = self._kill_event = threading.Event()self._task_puller_thread = threading.Thread(target=self.migrate_tasks_to_internal,args=(self._kill_event,))self._task_puller_thread.start()self._command_thread = threading.Thread(target=self._command_server,args=(self._kill_event,))self._command_thread.start()poller = zmq.Poller()poller.register(self.task_outgoing, zmq.POLLIN)poller.register(self.results_incoming, zmq.POLLIN)interesting_managers = set()while not self._kill_event.is_set():self.socks = dict(poller.poll(timeout=poll_period))if self.task_outgoing in self.socks and self.socks[self.task_outgoing] == zmq.POLLIN:logger.debug(\"\")message = self.task_outgoing.recv_multipart()manager = message[]if manager not in self._ready_manager_queue:reg_flag = Falsetry:msg = json.loads(message[].decode(''))reg_flag = Trueexcept Exception:logger.warning(\"\".format(manager))logger.debug(\"\".format(message[]))self._ready_manager_queue[manager] = {'': time.time(),'': ,'': None,'': ,'': True,'': []}if reg_flag is True:interesting_managers.add(manager)logger.info(\"\".format(manager))self._ready_manager_queue[manager].update(msg)logger.info(\"\".format(manager, msg))if (msg[''].rsplit(\"\", )[] != self.current_platform[''].rsplit(\"\", )[] ormsg[''] != self.current_platform['']):logger.warn(\"\".format(manager))if self.suppress_failure is False:logger.debug(\"\")self._kill_event.set()e = ManagerLost(manager)result_package = {'': -, '': serialize_object(e)}pkl_package = pickle.dumps(result_package)self.results_outgoing.send(pkl_package)logger.warning(\"\")else:logger.debug(\"\")else:logger.info(\"\".format(manager, msg['']))logger.info(\"\".format(manager,msg[''].rsplit(\"\", )[]))else:if self.suppress_failure is False:self._kill_event.set()e = BadRegistration(manager, critical=True)result_package = {'': -, '': serialize_object(e)}pkl_package = pickle.dumps(result_package)self.results_outgoing.send(pkl_package)else:logger.debug(\"\".format(manager))else:tasks_requested = int.from_bytes(message[], \"\")self._ready_manager_queue[manager][''] = time.time()if tasks_requested == HEARTBEAT_CODE:logger.debug(\"\".format(manager))self.task_outgoing.send_multipart([manager, b'', PKL_HEARTBEAT_CODE])else:logger.debug(\"\".format(manager, tasks_requested))self._ready_manager_queue[manager][''] = tasks_requestedinteresting_managers.add(manager)logger.debug(\"\")logger.debug(\"\".format(len(self._ready_manager_queue),len(interesting_managers)))if interesting_managers and not self.pending_task_queue.empty():shuffled_managers = list(interesting_managers)random.shuffle(shuffled_managers)while shuffled_managers and not self.pending_task_queue.empty(): manager = shuffled_managers.pop()tasks_inflight = len(self._ready_manager_queue[manager][''])real_capacity = min(self._ready_manager_queue[manager][''],self._ready_manager_queue[manager][''] - tasks_inflight)if (real_capacity and self._ready_manager_queue[manager]['']):tasks = self.get_tasks(real_capacity)if tasks:self.task_outgoing.send_multipart([manager, b'', pickle.dumps(tasks)])task_count = len(tasks)count += task_counttids = [t[''] for t in tasks]self._ready_manager_queue[manager][''] -= task_countself._ready_manager_queue[manager][''].extend(tids)logger.debug(\"\".format(tids, manager))if self._ready_manager_queue[manager][''] > :logger.debug(\"\".format(manager, self._ready_manager_queue[manager]['']))else:logger.debug(\"\".format(manager))interesting_managers.remove(manager)else:interesting_managers.remove(manager)logger.debug(\"\".format(len(interesting_managers)))else:logger.debug(\"\")if self.results_incoming in self.socks and self.socks[self.results_incoming] == zmq.POLLIN:logger.debug(\"\")manager, *b_messages = self.results_incoming.recv_multipart()if manager not in self._ready_manager_queue:logger.warning(\"\".format(manager))else:logger.debug(\"\".format(len(b_messages)))for b_message in b_messages:r = pickle.loads(b_message)self._ready_manager_queue[manager][''].remove(r[''])self.results_outgoing.send_multipart(b_messages)logger.debug(\"\".format(self._ready_manager_queue[manager]['']))logger.debug(\"\")logger.debug(\"\")bad_managers = [manager for manager in self._ready_manager_queue iftime.time() - self._ready_manager_queue[manager][''] > self.heartbeat_threshold]for manager in bad_managers:logger.debug(\"\".format(self._ready_manager_queue[manager][''], time.time()))logger.warning(\"\".format(manager))for tid in self._ready_manager_queue[manager]['']:try:raise ManagerLost(manager)except Exception:result_package = {'': tid, '': serialize_object(RemoteExceptionWrapper(*sys.exc_info()))}pkl_package = pickle.dumps(result_package)self.results_outgoing.send(pkl_package)logger.warning(\"\")self._ready_manager_queue.pop(manager, '')logger.debug(\"\")logger.debug(\"\")delta = time.time() - startlogger.info(\"\".format(count, delta))logger.warning(\"\")", "docstring": "Start the NeedNameQeueu\n\n Parameters:\n ----------\n\n TODO: Move task receiving to a thread", "id": "f2821:c3:m4"} {"signature": "def start_file_logger(filename, name='', level=logging.DEBUG, format_string=None):", "body": "if format_string is None:format_string = \"\"global loggerlogger = logging.getLogger(name)logger.setLevel(level)handler = logging.FileHandler(filename)handler.setLevel(level)formatter = logging.Formatter(format_string, datefmt='')handler.setFormatter(formatter)logger.addHandler(handler)", "docstring": "Add a stream log handler.\n\n Parameters\n ---------\n\n filename: string\n Name of the file to write logs to. Required.\n name: string\n Logger name. Default=\"parsl.executors.interchange\"\n level: logging.LEVEL\n Set the logging level. Default=logging.DEBUG\n - format_string (string): Set the format string\n format_string: string\n Format string to use.\n\n Returns\n -------\n None.", "id": "f2821:m0"} {"signature": "def get_tasks(self, count):", "body": "tasks = []for i in range(, count):try:x = self.pending_task_queue.get(block=False)except queue.Empty:breakelse:tasks.append(x)return tasks", "docstring": "Obtains a batch of tasks from the internal pending_task_queue\n\n Parameters\n ----------\n count: int\n Count of tasks to get from the queue\n\n Returns\n -------\n List of upto count tasks. May return fewer than count down to an empty list\n eg. [{'task_id':, 'buffer':} ... ]", "id": "f2821:c3:m1"} {"signature": "def _start_local_queue_process(self):", "body": "comm_q = Queue(maxsize=)self.queue_proc = Process(target=interchange.starter,args=(comm_q,),kwargs={\"\": (self.outgoing_q.port,self.incoming_q.port,self.command_client.port),\"\": self.worker_ports,\"\": self.worker_port_range,\"\": \"\".format(self.run_dir, self.label),\"\": self.suppress_failure,\"\": self.heartbeat_threshold,\"\": self.poll_period,\"\": logging.DEBUG if self.worker_debug else logging.INFO},)self.queue_proc.start()try:(worker_task_port, worker_result_port) = comm_q.get(block=True, timeout=)except queue.Empty:logger.error(\"\")raise Exception(\"\")self.worker_task_url = \"\".format(self.address, worker_task_port)self.worker_result_url = \"\".format(self.address, worker_result_port)", "docstring": "Starts the interchange process locally\n\n Starts the interchange process locally and uses an internal command queue to\n get the worker task and result ports that the interchange has bound to.", "id": "f2822:c0:m5"} {"signature": "def _queue_management_worker(self):", "body": "logger.debug(\"\")while not self._executor_bad_state.is_set():try:msgs = self.incoming_q.get(timeout=)except queue.Empty:logger.debug(\"\")passexcept IOError as e:logger.exception(\"\".format(e.errno, e))returnexcept Exception as e:logger.exception(\"\".format(e))returnelse:if msgs is None:logger.debug(\"\")returnelse:for serialized_msg in msgs:try:msg = pickle.loads(serialized_msg)tid = msg['']except pickle.UnpicklingError:raise BadMessage(\"\")except Exception:raise BadMessage(\"\")if tid == - and '' in msg:logger.warning(\"\")self._executor_exception, _ = deserialize_object(msg[''])logger.exception(\"\".format(self._executor_exception))self._executor_bad_state.set()for task in self.tasks:self.tasks[task].set_exception(self._executor_exception)breaktask_fut = self.tasks[tid]if '' in msg:result, _ = deserialize_object(msg[''])task_fut.set_result(result)elif '' in msg:try:s, _ = deserialize_object(msg[''])try:s.reraise()except Exception as e:task_fut.set_exception(e)except Exception as e:task_fut.set_exception(DeserializationError(\"\".format(e)))else:raise BadMessage(\"\")if not self.is_alive:breaklogger.info(\"\")", "docstring": "Listen to the queue for task status messages and handle them.\n\n Depending on the message, tasks will be updated with results, exceptions,\n or updates. It expects the following messages:\n\n .. code:: python\n\n {\n \"task_id\" : \n \"result\" : serialized result object, if task succeeded\n ... more tags could be added later\n }\n\n {\n \"task_id\" : \n \"exception\" : serialized exception object, on failure\n }\n\n We do not support these yet, but they could be added easily.\n\n .. code:: python\n\n {\n \"task_id\" : \n \"cpu_stat\" : <>\n \"mem_stat\" : <>\n \"io_stat\" : <>\n \"started\" : tstamp\n }\n\n The `None` message is a die request.", "id": "f2822:c0:m3"} {"signature": "def _hold_block(self, block_id):", "body": "managers = self.connected_managersfor manager in managers:if manager[''] == block_id:logger.debug(\"\".format(manager['']))self.hold_worker(manager[''])", "docstring": "Sends hold command to all managers which are in a specific block\n\n Parameters\n ----------\n block_id : str\n Block identifier of the block to be put on hold", "id": "f2822:c0:m11"} {"signature": "def initialize_scaling(self):", "body": "debug_opts = \"\" if self.worker_debug else \"\"max_workers = \"\" if self.max_workers == float('') else \"\".format(self.max_workers)worker_logdir = \"\".format(self.run_dir, self.label)if self.worker_logdir_root is not None:worker_logdir = \"\".format(self.worker_logdir_root, self.label)l_cmd = self.launch_cmd.format(debug=debug_opts,prefetch_capacity=self.prefetch_capacity,task_url=self.worker_task_url,result_url=self.worker_result_url,cores_per_worker=self.cores_per_worker,max_workers=max_workers,nodes_per_block=self.provider.nodes_per_block,heartbeat_period=self.heartbeat_period,heartbeat_threshold=self.heartbeat_threshold,poll_period=self.poll_period,logdir=worker_logdir)self.launch_cmd = l_cmdlogger.debug(\"\".format(self.launch_cmd))self._scaling_enabled = self.provider.scaling_enabledlogger.debug(\"\", self.provider)if hasattr(self.provider, ''):try:self.scale_out(blocks=self.provider.init_blocks)except Exception as e:logger.error(\"\".format(e))raise e", "docstring": "Compose the launch command and call the scale_out\n\n This should be implemented in the child classes to take care of\n executor specific oddities.", "id": "f2822:c0:m1"} {"signature": "def hold_worker(self, worker_id):", "body": "c = self.command_client.run(\"\".format(worker_id))logger.debug(\"\".format(worker_id))return c", "docstring": "Puts a worker on hold, preventing scheduling of additional tasks to it.\n\n This is called \"hold\" mostly because this only stops scheduling of tasks,\n and does not actually kill the worker.\n\n Parameters\n ----------\n\n worker_id : str\n Worker id to be put on hold", "id": "f2822:c0:m7"} {"signature": "def _start_queue_management_thread(self):", "body": "if self._queue_management_thread is None:logger.debug(\"\")self._queue_management_thread = threading.Thread(target=self._queue_management_worker)self._queue_management_thread.daemon = Trueself._queue_management_thread.start()logger.debug(\"\")else:logger.debug(\"\")", "docstring": "Method to start the management thread as a daemon.\n\n Checks if a thread already exists, then starts it.\n Could be used later as a restart if the management thread dies.", "id": "f2822:c0:m6"} {"signature": "def scale_out(self, blocks=):", "body": "r = []for i in range(blocks):if self.provider:external_block_id = str(len(self.blocks))launch_cmd = self.launch_cmd.format(block_id=external_block_id)internal_block = self.provider.submit(launch_cmd, , )logger.debug(\"\".format(external_block_id, internal_block))if not internal_block:raise(ScalingFailed(self.provider.label,\"\"))r.extend([external_block_id])self.blocks[external_block_id] = internal_blockelse:logger.error(\"\")r = Nonereturn r", "docstring": "Scales out the number of blocks by \"blocks\"\n\n Raises:\n NotImplementedError", "id": "f2822:c0:m14"} {"signature": "def start(self):", "body": "self.outgoing_q = zmq_pipes.TasksOutgoing(\"\", self.interchange_port_range)self.incoming_q = zmq_pipes.ResultsIncoming(\"\", self.interchange_port_range)self.command_client = zmq_pipes.CommandClient(\"\", self.interchange_port_range)self.is_alive = Trueself._executor_bad_state = threading.Event()self._executor_exception = Noneself._queue_management_thread = Noneself._start_queue_management_thread()self._start_local_queue_process()logger.debug(\"\".format(self._queue_management_thread))if self.provider:self.initialize_scaling()else:self._scaling_enabled = Falselogger.debug(\"\")", "docstring": "Create the Interchange process and connect to it.", "id": "f2822:c0:m2"} {"signature": "def pull_tasks(self, kill_event):", "body": "logger.info(\"\")poller = zmq.Poller()poller.register(self.task_incoming, zmq.POLLIN)msg = self.create_reg_message()logger.debug(\"\".format(msg))self.task_incoming.send(msg)last_beat = time.time()last_interchange_contact = time.time()task_recv_counter = poll_timer = while not kill_event.is_set():time.sleep(LOOP_SLOWDOWN)ready_worker_count = self.ready_worker_queue.qsize()pending_task_count = self.pending_task_queue.qsize()logger.debug(\"\".format(ready_worker_count,pending_task_count))if time.time() > last_beat + self.heartbeat_period:self.heartbeat()last_beat = time.time()if pending_task_count < self.max_queue_size and ready_worker_count > :logger.debug(\"\".format(ready_worker_count))msg = ((ready_worker_count).to_bytes(, \"\"))self.task_incoming.send(msg)socks = dict(poller.poll(timeout=poll_timer))if self.task_incoming in socks and socks[self.task_incoming] == zmq.POLLIN:_, pkl_msg = self.task_incoming.recv_multipart()tasks = pickle.loads(pkl_msg)last_interchange_contact = time.time()if tasks == '':logger.critical(\"\")kill_event.set()breakelif tasks == HEARTBEAT_CODE:logger.debug(\"\")else:poll_timer = task_recv_counter += len(tasks)logger.debug(\"\".format([t[''] for t in tasks],task_recv_counter))for task in tasks:self.pending_task_queue.put(task)else:logger.debug(\"\")poll_timer = min(self.heartbeat_period * , poll_timer * )if time.time() > last_interchange_contact + self.heartbeat_threshold:logger.critical(\"\")kill_event.set()logger.critical(\"\")break", "docstring": "Pulls tasks from the incoming tasks 0mq pipe onto the internal\n pending task queue\n\n Parameters:\n -----------\n kill_event : threading.Event\n Event to let the thread know when it is time to die.", "id": "f2823:c0:m5"} {"signature": "def set_stream_logger(name='', level=logging.DEBUG, format_string=None):", "body": "if format_string is None:format_string = \"\"global loggerlogger = logging.getLogger(name)logger.setLevel(logging.DEBUG)handler = logging.StreamHandler()handler.setLevel(level)formatter = logging.Formatter(format_string, datefmt='')handler.setFormatter(formatter)logger.addHandler(handler)", "docstring": "Add a stream log handler.\n\n Args:\n - name (string) : Set the logger name.\n - level (logging.LEVEL) : Set to logging.DEBUG by default.\n - format_string (sting) : Set to None by default.\n\n Returns:\n - None", "id": "f2823:m3"} {"signature": "def recv_result_from_workers(self):", "body": "info = MPI.Status()result = self.comm.recv(source=MPI.ANY_SOURCE, tag=RESULT_TAG, status=info)logger.debug(\"\".format(result))return result", "docstring": "Receives a results from the MPI worker pool and send it out via 0mq\n\n Returns:\n --------\n result: task result from the workers", "id": "f2823:c0:m3"} {"signature": "def __call__(self, command, tasks_per_node, nodes_per_block, walltime=None):", "body": "task_blocks = tasks_per_node * nodes_per_blockx =", "docstring": "Args:\n- command (string): The command string to be launched\n- task_block (string) : bash evaluated string.\n\nKWargs:\n- walltime (int) : This is not used by this launcher.", "id": "f2826:c5:m1"} {"signature": "def __call__(self, command, tasks_per_node, nodes_per_block, walltime=None):", "body": "task_blocks = tasks_per_node * nodes_per_blockx =", "docstring": "Args:\n- command (string): The command string to be launched\n- task_block (string) : bash evaluated string.\n\nKWargs:\n- walltime (int) : This is not used by this launcher.", "id": "f2826:c6:m1"} {"signature": "@abstractmethoddef __call__(self, command, tasks_per_node, nodes_per_block, walltime=None):", "body": "pass", "docstring": "Wraps the command with the Launcher calls.\n *MUST* be implemented by the concrete child classes", "id": "f2826:c0:m0"} {"signature": "def __call__(self, command, tasks_per_node, nodes_per_block, walltime=None):", "body": "task_blocks = tasks_per_node * nodes_per_blockx =", "docstring": "Args:\n- command (string): The command string to be launched\n- task_block (string) : bash evaluated string.\n\nKWargs:\n- walltime (int) : This is not used by this launcher.", "id": "f2826:c4:m0"} {"signature": "def get_last_checkpoint(rundir=\"\"):", "body": "if not os.path.isdir(rundir):return []dirs = sorted(os.listdir(rundir))if len(dirs) == :return []last_runid = dirs[-]last_checkpoint = os.path.abspath(''.format(rundir, last_runid))if(not(os.path.isdir(last_checkpoint))):return []return [last_checkpoint]", "docstring": "Find the checkpoint from the last run, if one exists.\n\n Note that checkpoints are incremental, and this helper will not find\n previous checkpoints from earlier than the most recent run. It probably\n should be made to do so.\n\n Kwargs:\n - rundir(str) : Path to the runinfo directory\n\n Returns:\n - a list suitable for checkpointFiles parameter of DataFlowKernel\n constructor, with 0 or 1 elements", "id": "f2829:m2"} {"signature": "def get_all_checkpoints(rundir=\"\"):", "body": "if(not os.path.isdir(rundir)):return []dirs = sorted(os.listdir(rundir))checkpoints = []for runid in dirs:checkpoint = os.path.abspath(''.format(rundir, runid))if os.path.isdir(checkpoint):checkpoints.append(checkpoint)return checkpoints", "docstring": "Finds the checkpoints from all last runs.\n\n Note that checkpoints are incremental, and this helper will not find\n previous checkpoints from earlier than the most recent run. It probably\n should be made to do so.\n\n Kwargs:\n - rundir(str) : Path to the runinfo directory\n\n Returns:\n - a list suitable for the checkpointFiles parameter of DataFlowKernel\n constructor", "id": "f2829:m1"} {"signature": "def teardown_module(module):", "body": "p_this = Path(__file__)to_remove_list = list(p_this.parent.select_by_ext(\"\"))for p in to_remove_list:if p.exists():p.remove()", "docstring": "Remove temp file and dir for test.", "id": "f2875:m0"} {"signature": "def setup_module(module):", "body": "p = Path(__file__).change(new_basename=\"\")try:shutil.copytree(p.abspath, p.change(new_basename=\"\").abspath)except Exception as e:passp = Path(__file__).change(new_basename=\"\")with open(p.abspath, \"\") as f:f.write(\"\".encode(\"\"))p = Path(__file__).change(new_basename=\"\")with open(p.abspath, \"\") as f:f.write(\"\".encode(\"\"))", "docstring": "Create temp file and dir for test.\n\n- create a new folder ``/wow``\n- create two file `/`wow/file_to_move.txt``, ``wow/file_to_copy.txt``", "id": "f2877:m0"} {"signature": "def get_text_fingerprint(text, hash_meth, encoding=\"\"): ", "body": "m = hash_meth()m.update(text.encode(encoding))return m.hexdigest()", "docstring": "Use default hash method to return hash value of a piece of string\ndefault setting use 'utf-8' encoding.", "id": "f2879:m0"} {"signature": "def sha256file(abspath, nbytes=, chunk_size=DEFAULT_CHUNK_SIZE):", "body": "return get_file_fingerprint(abspath, hashlib.sha256, nbytes=nbytes, chunk_size=chunk_size)", "docstring": "Return sha256 hash value of a piece of a file\n\nEstimate processing time on:\n\n:param abspath: the absolute path to the file\n:param nbytes: only has first N bytes of the file. if 0 or None,\n hash all file", "id": "f2879:m3"} {"signature": "def get_partial_md5(self, nbytes):", "body": "return md5file(abspath=self.abspath, nbytes=nbytes)", "docstring": "Return md5 check sum of first n bytes of this file.", "id": "f2880:c0:m0"} {"signature": "def get_partial_sha256(self, nbytes):", "body": "return sha256file(abspath=self.abspath, nbytes=nbytes)", "docstring": "Return sha256 check sum of first n bytes of this file.", "id": "f2880:c0:m2"} {"signature": "def make_zip_archive(self,dst=None,filters=all_true,compress=True,overwrite=False,makedirs=False,verbose=False): ", "body": "self.assert_exists()if dst is None:dst = self._auto_zip_archive_dst()else:dst = self.change(new_abspath=dst)if not dst.basename.lower().endswith(\"\"):raise ValueError(\"\")if dst.exists():if not overwrite:raise IOError(\"\" % dst)if compress:compression = ZIP_DEFLATEDelse:compression = ZIP_STOREDif not dst.parent.exists():if makedirs:os.makedirs(dst.parent.abspath)if verbose:msg = \"\" % selfprint(msg)current_dir = os.getcwd()if self.is_dir():total_size = selected = list()for p in self.glob(\"\"):if filters(p):selected.append(p)total_size += p.sizeif verbose:msg = \"\".format(len(selected), repr_data_size(total_size),)print(msg)with ZipFile(dst.abspath, \"\", compression) as f:os.chdir(self.abspath)for p in selected:relpath = p.relative_to(self).__str__()f.write(relpath)elif self.is_file():with ZipFile(dst.abspath, \"\", compression) as f:os.chdir(self.parent.abspath)f.write(self.basename)os.chdir(current_dir)if verbose:msg = \"\".format(dst.size_in_text)print(msg)", "docstring": "Make a zip archive.\n\n:param dst: output file path. if not given, will be automatically assigned.\n:param filters: custom path filter. By default it allows any file.\n:param compress: compress or not.\n:param overwrite: overwrite exists or not.\n:param verbose: display log or not.\n:return:", "id": "f2881:c0:m1"} {"signature": "def backup(self,dst=None,ignore=None,ignore_ext=None,ignore_pattern=None,ignore_size_smaller_than=None,ignore_size_larger_than=None,case_sensitive=False): ", "body": "def preprocess_arg(arg): if arg is None:return []if isinstance(arg, (tuple, list)):return list(arg)else:return [arg, ]self.assert_is_dir_and_exists()ignore = preprocess_arg(ignore)for i in ignore:if i.startswith(\"\") or i.startswith(\"\"):raise ValueErrorignore_ext = preprocess_arg(ignore_ext)for ext in ignore_ext:if not ext.startswith(\"\"):raise ValueErrorignore_pattern = preprocess_arg(ignore_pattern)if case_sensitive:passelse:ignore = [i.lower() for i in ignore]ignore_ext = [i.lower() for i in ignore_ext]ignore_pattern = [i.lower() for i in ignore_pattern]def filters(p):relpath = p.relative_to(self).abspathif not case_sensitive:relpath = relpath.lower()for i in ignore:if relpath.startswith(i):return Falseif case_sensitive:ext = p.extelse:ext = p.ext.lower()if ext in ignore_ext:return Falsefor pattern in ignore_pattern:if pattern in relpath:return Falseif ignore_size_smaller_than:if p.size < ignore_size_smaller_than:return Falseif ignore_size_larger_than:if p.size > ignore_size_larger_than:return Falsereturn Trueself.make_zip_archive(dst=dst, filters=filters, compress=True, overwrite=False, verbose=True,)", "docstring": "Create a compressed zip archive backup for a directory.\n\n:param dst: the output file path.\n:param ignore: file or directory defined in this list will be ignored.\n:param ignore_ext: file with extensions defined in this list will be ignored.\n:param ignore_pattern: any file or directory that contains this pattern\n will be ignored.\n:param ignore_size_smaller_than: any file size smaller than this\n will be ignored.\n:param ignore_size_larger_than: any file size larger than this\n will be ignored.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u4e3a\u4e00\u4e2a\u76ee\u5f55\u521b\u5efa\u4e00\u4e2a\u5907\u4efd\u538b\u7f29\u5305\u3002\u53ef\u4ee5\u901a\u8fc7\u8fc7\u6ee4\u5668\u9009\u62e9\u4f60\u8981\u5907\u4efd\u7684\u6587\u4ef6\u3002", "id": "f2881:c0:m2"} {"signature": "@propertydef fname_hexstr(self):", "body": "return encode_hexstr(self.fname)", "docstring": "File name encoded in hex string.", "id": "f2882:c0:m9"} {"signature": "@propertydef mtime(self):", "body": "try:return self._stat.st_mtimeexcept: self._stat = self.stat()return self.mtime", "docstring": "Get most recent modify time in timestamp.", "id": "f2882:c0:m13"} {"signature": "@propertydef basename_hexstr(self):", "body": "return encode_hexstr(self.basename)", "docstring": "File name with extension encoded in hex string.", "id": "f2882:c0:m7"} {"signature": "@propertydef ctime(self):", "body": "try:return self._stat.st_ctimeexcept: self._stat = self.stat()return self.ctime", "docstring": "Get most recent create time in timestamp.", "id": "f2882:c0:m15"} {"signature": "@propertydef create_datetime(self):", "body": "return datetime.fromtimestamp(self.ctime)", "docstring": "Get most recent create time in datetime.", "id": "f2882:c0:m18"} {"signature": "@propertydef abspath(self):", "body": "return self.absolute().__str__()", "docstring": "r\"\"\"\n Absolute path.\n\n Example: ``C:\\User\\admin\\readme.txt`` for ``C:\\User\\admin\\readme.txt``", "id": "f2882:c0:m0"} {"signature": "@propertydef dirpath_hexstr(self):", "body": "return encode_hexstr(self.dirpath)", "docstring": "Return dir full absolute path encoded in hex string.", "id": "f2882:c0:m3"} {"signature": "@propertydef fname(self):", "body": "return self.stem", "docstring": "r\"\"\"\n File name without extension.\n\n Example: ``readme`` for ``C:\\User\\admin\\readme.txt``", "id": "f2882:c0:m8"} {"signature": "@propertydef ext(self):", "body": "return self.suffix", "docstring": "r\"\"\"\n File extension. If it's a dir, then return empty str.\n\n Example: ``.txt`` for ``C:\\User\\admin\\readme.txt``", "id": "f2882:c0:m10"} {"signature": "def touch(self, mode=, exist_ok=True):", "body": "if self._closed:self._raise_closed()if exist_ok:try:self._accessor.utime(self, None)except OSError:passelse:returnflags = os.O_CREAT | os.O_WRONLYif not exist_ok:flags |= os.O_EXCLfd = self._raw_open(flags, mode)os.close(fd)", "docstring": "Create this file with the given access mode, if it doesn't exist.", "id": "f2883:c14:m24"} {"signature": "def rmdir(self):", "body": "if self._closed:self._raise_closed()self._accessor.rmdir(self)", "docstring": "Remove this directory. The directory must be empty.", "id": "f2883:c14:m29"} {"signature": "def joinpath(self, *args):", "body": "return self._make_child(args)", "docstring": "Combine this path with one or several arguments, and return a\n new path representing either a subpath (if all arguments are relative\n paths) or a totally different path (if one of the arguments is\n anchored).", "id": "f2883:c11:m31"} {"signature": "def with_suffix(self, suffix):", "body": "f = self._flavourif f.sep in suffix or f.altsep and f.altsep in suffix:raise ValueError(\"\" % (suffix))if suffix and not suffix.startswith('') or suffix == '':raise ValueError(\"\" % (suffix))name = self.nameif not name:raise ValueError(\"\" % (self,))old_suffix = self.suffixif not old_suffix:name = name + suffixelse:name = name[:-len(old_suffix)] + suffixreturn self._from_parsed_parts(self._drv, self._root,self._parts[:-] + [name])", "docstring": "Return a new path with the file suffix changed (or added, if\n none).", "id": "f2883:c11:m28"} {"signature": "def chmod(self, mode):", "body": "if self._closed:self._raise_closed()self._accessor.chmod(self, mode)", "docstring": "Change the permissions of the path, like os.chmod().", "id": "f2883:c14:m26"} {"signature": "def is_fifo(self):", "body": "try:return S_ISFIFO(self.stat().st_mode)except OSError as e:if e.errno not in (ENOENT, ENOTDIR):raisereturn False", "docstring": "Whether this path is a FIFO.", "id": "f2883:c14:m40"} {"signature": "def stat(self):", "body": "return self._accessor.stat(self)", "docstring": "Return the result of the stat() system call on this path, like\nos.stat() does.", "id": "f2883:c14:m16"} {"signature": "def match(self, path_pattern):", "body": "cf = self._flavour.casefoldpath_pattern = cf(path_pattern)drv, root, pat_parts = self._flavour.parse_parts((path_pattern,))if not pat_parts:raise ValueError(\"\")if drv and drv != cf(self._drv):return Falseif root and root != cf(self._root):return Falseparts = self._cpartsif drv or root:if len(pat_parts) != len(parts):return Falsepat_parts = pat_parts[:]elif len(pat_parts) > len(parts):return Falsefor part, pat in zip(reversed(parts), reversed(pat_parts)):if not fnmatch.fnmatchcase(part, pat):return Falsereturn True", "docstring": "Return True if this path matches the given pattern.", "id": "f2883:c11:m38"} {"signature": "def rename(self, target):", "body": "if self._closed:self._raise_closed()self._accessor.rename(self, target)", "docstring": "Rename this path to the given path.", "id": "f2883:c14:m31"} {"signature": "def exists(self):", "body": "try:self.stat()except OSError as e:if e.errno not in (ENOENT, ENOTDIR):raisereturn Falsereturn True", "docstring": "Whether this path exists.", "id": "f2883:c14:m34"} {"signature": "def __str__(self):", "body": "try:return self._strexcept AttributeError:self._str = self._format_parsed_parts(self._drv, self._root,self._parts) or ''return self._str", "docstring": "Return the string representation of the path, suitable for\n passing to system calls.", "id": "f2883:c11:m8"} {"signature": "@propertydef parent(self):", "body": "drv = self._drvroot = self._rootparts = self._partsif len(parts) == and (drv or root):return selfreturn self._from_parsed_parts(drv, root, parts[:-])", "docstring": "The logical parent of the path.", "id": "f2883:c11:m34"} {"signature": "def write_bytes(self, data):", "body": "if not isinstance(data, six.binary_type):raise TypeError('' %(six.binary_type.__name__, data.__class__.__name__))with self.open(mode='') as f:return f.write(data)", "docstring": "Open the file in bytes mode, write to it, and close the file.", "id": "f2883:c14:m22"} {"signature": "def replace(self, target):", "body": "if sys.version_info < (, ):raise NotImplementedError(\"\"\"\")if self._closed:self._raise_closed()self._accessor.replace(self, target)", "docstring": "Rename this path to the given path, clobbering the existing\ndestination if it exists.", "id": "f2883:c14:m32"} {"signature": "@propertydef suffixes(self):", "body": "name = self.nameif name.endswith(''):return []name = name.lstrip('')return ['' + suffix for suffix in name.split('')[:]]", "docstring": "A list of the final component's suffixes, if any.", "id": "f2883:c11:m25"} {"signature": "@propertydef name(self):", "body": "parts = self._partsif len(parts) == ( if (self._drv or self._root) else ):return ''return parts[-]", "docstring": "The final path component, if any.", "id": "f2883:c11:m23"} {"signature": "def lchmod(self, mode):", "body": "if self._closed:self._raise_closed()self._accessor.lchmod(self, mode)", "docstring": "Like chmod(), except if the path points to a symlink, the symlink's\npermissions are changed, rather than its target's.", "id": "f2883:c14:m27"} {"signature": "def is_symlink(self):", "body": "try:return S_ISLNK(self.lstat().st_mode)except OSError as e:if e.errno not in (ENOENT, ENOTDIR):raisereturn False", "docstring": "Whether this path is a symbolic link.", "id": "f2883:c14:m37"} {"signature": "def is_reserved(self):", "body": "return self._flavour.is_reserved(self._parts)", "docstring": "Return True if the path contains one of the special names reserved\n by the system, if any.", "id": "f2883:c11:m37"} {"signature": "def symlink_to(self, target, target_is_directory=False):", "body": "if self._closed:self._raise_closed()self._accessor.symlink(target, self, target_is_directory)", "docstring": "Make this path a symlink pointing to the given path.\nNote the order of arguments (self, target) is the reverse of\nos.symlink's.", "id": "f2883:c14:m33"} {"signature": "def as_posix(self):", "body": "f = self._flavourreturn str(self).replace(f.sep, '')", "docstring": "Return the string representation of the path with forward (/)\n slashes.", "id": "f2883:c11:m10"} {"signature": "def absolute(self):", "body": "if self._closed:self._raise_closed()if self.is_absolute():return selfobj = self._from_parts([os.getcwd()] + self._parts, init=False)obj._init(template=self)return obj", "docstring": "Return an absolute version of this path. This function works\n even if the path doesn't point to anything.\n\n No normalization is done, i.e. all '.' and '..' will be kept along.\n Use resolve() to get the canonical path to a file.", "id": "f2883:c14:m14"} {"signature": "@classmethoddef cwd(cls):", "body": "return cls(os.getcwd())", "docstring": "Return a new path pointing to the current working directory\n (as returned by os.getcwd()).", "id": "f2883:c14:m8"} {"signature": "def is_block_device(self):", "body": "try:return S_ISBLK(self.stat().st_mode)except OSError as e:if e.errno not in (ENOENT, ENOTDIR):raisereturn False", "docstring": "Whether this path is a block device.", "id": "f2883:c14:m38"} {"signature": "def glob(self, pattern):", "body": "if not pattern:raise ValueError(\"\".format(pattern))pattern = self._flavour.casefold(pattern)drv, root, pattern_parts = self._flavour.parse_parts((pattern,))if drv or root:raise NotImplementedError(\"\")selector = _make_selector(tuple(pattern_parts))for p in selector.select_from(self):yield p", "docstring": "Iterate over this subtree and yield all existing files (of any\n kind, including directories) matching the given pattern.", "id": "f2883:c14:m12"} {"signature": "def expanduser(self):", "body": "if (not (self._drv or self._root)and self._parts and self._parts[][:] == ''):homedir = self._flavour.gethomedir(self._parts[][:])return self._from_parts([homedir] + self._parts[:])return self", "docstring": "Return a new path with expanded ~ and ~user constructs\n (as returned by os.path.expanduser)", "id": "f2883:c14:m42"} {"signature": "def lstat(self):", "body": "if self._closed:self._raise_closed()return self._accessor.lstat(self)", "docstring": "Like stat(), except if the path points to a symlink, the symlink's\nstatus information is returned, rather than its target's.", "id": "f2883:c14:m30"} {"signature": "def unlink(self):", "body": "if self._closed:self._raise_closed()self._accessor.unlink(self)", "docstring": "Remove this file or link.\nIf the path is a directory, use rmdir() instead.", "id": "f2883:c14:m28"} {"signature": "@propertydef n_subdir(self):", "body": "self.assert_is_dir_and_exists()n = for _ in self.select_dir(recursive=False):n += return n", "docstring": "Count how many folders in this directory (doesn't include folder in\nsub folders).", "id": "f2884:c0:m9"} {"signature": "def select_video(self, recursive=True): ", "body": "return self.select_by_ext(self._video_ext, recursive)", "docstring": "Select video file.", "id": "f2884:c0:m19"} {"signature": "def _sort_by(key):", "body": "@staticmethoddef sort_by(p_list, reverse=False):return sorted(p_list,key=lambda p: getattr(p, key),reverse=reverse,)return sort_by", "docstring": "High order function for sort methods.", "id": "f2884:m1"} {"signature": "def select_by_ctime(self, min_time=, max_time=ts_2100,recursive=True):", "body": "def filters(p): return min_time <= p.ctime <= max_timereturn self.select_file(filters, recursive)", "docstring": "Select file path by create time.\n\n:param min_time: lower bound timestamp\n:param max_time: upper bound timestamp\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u9009\u62e9\u6240\u6709 :attr:`pathlib_mate.pathlib2.Path.ctime` \u5728\u4e00\u5b9a\u8303\u56f4\u5185\u7684\u6587\u4ef6\u3002", "id": "f2884:c0:m16"} {"signature": "@propertydef n_file(self):", "body": "self.assert_is_dir_and_exists()n = for _ in self.select_file(recursive=True):n += return n", "docstring": "Count how many files in this directory. Including file in sub folder.", "id": "f2884:c0:m6"} {"signature": "def assert_exists(self):", "body": "if not self.exists():msg = \"\" % selfraise EnvironmentError(msg)", "docstring": "Assert it exists.", "id": "f2884:c0:m2"} {"signature": "def select_audio(self, recursive=True): ", "body": "return self.select_by_ext(self._audio_ext, recursive)", "docstring": "Select audio file.", "id": "f2884:c0:m18"} {"signature": "def select_file(self, filters=all_true, recursive=True):", "body": "for p in self.select(filters, recursive):if p.is_file():yield p", "docstring": "Select file path by criterion.\n\n **\u4e2d\u6587\u6587\u6863**\n\n \u6839\u636efilters\u4e2d\u5b9a\u4e49\u7684\u6761\u4ef6\u9009\u62e9\u6587\u4ef6\u3002", "id": "f2884:c0:m4"} {"signature": "def assert_is_dir_and_exists(self):", "body": "if not self.is_dir():msg = \"\" % selfraise EnvironmentError(msg)", "docstring": "Assert it is a directory and exists in file system.", "id": "f2884:c0:m1"} {"signature": "def select_by_pattern_in_abspath(self,pattern,recursive=True,case_sensitive=False):", "body": "if case_sensitive:def filters(p):return pattern in p.abspathelse:pattern = pattern.lower()def filters(p):return pattern in p.abspath.lower()return self.select_file(filters, recursive)", "docstring": "Select file path by text pattern in absolute path.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u9009\u62e9\u7edd\u5bf9\u8def\u5f84\u4e2d\u5305\u542b\u6307\u5b9a\u5b50\u5b57\u7b26\u4e32\u7684\u6587\u4ef6\u3002", "id": "f2884:c0:m12"} {"signature": "def assert_is_file_and_exists(self):", "body": "if not self.is_file():msg = \"\" % selfraise EnvironmentError(msg)", "docstring": "Assert it is a directory and exists in file system.", "id": "f2884:c0:m0"} {"signature": "def select_image(self, recursive=True):", "body": "return self.select_by_ext(self._image_ext, recursive)", "docstring": "Select image file.", "id": "f2884:c0:m17"} {"signature": "def ensure_list(path_or_path_list):", "body": "if isinstance(path_or_path_list, (tuple, list, set)):return [ensure_str(path) for path in path_or_path_list]else:return [ensure_str(path_or_path_list), ]", "docstring": "Pre-process input argument, whether if it is:\n\n1. abspath\n2. Path instance\n3. string\n4. list or set of any of them\n\nIt returns list of path.\n\n:return path_or_path_list: always return list of path in string\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u9884\u5904\u7406\u8f93\u5165\u53c2\u6570\u3002", "id": "f2885:m1"} {"signature": "def repr_data_size(size_in_bytes, precision=):", "body": "if size_in_bytes < :return \"\" % size_in_bytesindex = while :index += size_in_bytes, mod = divmod(size_in_bytes, )if size_in_bytes < :breaktemplate = \"\" % precisions = template.format(size_in_bytes + mod / , MAGNITUDE_OF_DATA[index])return s", "docstring": "Return human readable string represent of a file size. Doesn't support\nsize greater than 1EB.\n\nFor example:\n\n- 100 bytes => 100 B\n- 100,000 bytes => 97.66 KB\n- 100,000,000 bytes => 95.37 MB\n- 100,000,000,000 bytes => 93.13 GB\n- 100,000,000,000,000 bytes => 90.95 TB\n- 100,000,000,000,000,000 bytes => 88.82 PB\n...\n\nMagnitude of data::\n\n 1000 kB kilobyte\n 1000 ** 2 MB megabyte\n 1000 ** 3 GB gigabyte\n 1000 ** 4 TB terabyte\n 1000 ** 5 PB petabyte\n 1000 ** 6 EB exabyte\n 1000 ** 7 ZB zettabyte\n 1000 ** 8 YB yottabyte", "id": "f2885:m2"} {"signature": "def zip_many_files(list_of_abspath, dst):", "body": "if os.path.exists(dst):print(\"\" % dst)returnbase_dir = os.getcwd()with ZipFile(dst, \"\") as f:for abspath in list_of_abspath:dirname, basename = os.path.split(abspath)os.chdir(dirname)f.write(basename)os.chdir(base_dir)", "docstring": "Add many files to a zip archive.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u5c06\u4e00\u7cfb\u5217\u7684\u6587\u4ef6\u538b\u7f29\u5230\u4e00\u4e2a\u538b\u7f29\u5305\u4e2d, \u82e5\u6709\u91cd\u590d\u7684\u6587\u4ef6\u540d, \u5728zip\u4e2d\u4fdd\u7559\u6240\u6709\u7684\u526f\u672c\u3002", "id": "f2887:m2"} {"signature": "def zip_a_folder(src, dst):", "body": "if os.path.exists(dst):print(\"\" % dst)returnsrc, dst = os.path.abspath(src), os.path.abspath(dst)cwd = os.getcwd()todo = list()dirname, basename = os.path.split(src)os.chdir(dirname)for dirname, _, fnamelist in os.walk(basename):for fname in fnamelist:newname = os.path.join(dirname, fname)todo.append(newname)with ZipFile(dst, \"\") as f:for newname in todo:f.write(newname)os.chdir(cwd)", "docstring": "Add a folder and everything inside to zip archive.\n\nExample::\n\n |---paper\n |--- algorithm.pdf\n |--- images\n |--- 1.jpg\n\n zip_a_folder(\"paper\", \"paper.zip\")\n\n paper.zip\n |---paper\n |--- algorithm.pdf\n |--- images\n |--- 1.jpg\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u5c06\u6574\u4e2a\u6587\u4ef6\u5939\u6dfb\u52a0\u5230\u538b\u7f29\u5305, \u5305\u62ec\u6839\u76ee\u5f55\u672c\u8eab\u3002", "id": "f2887:m0"} {"signature": "def zip_everything_in_a_folder(src, dst):", "body": "if os.path.exists(dst):print(\"\" % dst)returnsrc, dst = os.path.abspath(src), os.path.abspath(dst)cwd = os.getcwd()todo = list()os.chdir(src)for dirname, _, fnamelist in os.walk(os.getcwd()):for fname in fnamelist:newname = os.path.relpath(os.path.join(dirname, fname), src)todo.append(newname)with ZipFile(dst, \"\") as f:for newname in todo:f.write(newname)os.chdir(cwd)", "docstring": "Add everything in a folder except the root folder it self to zip archive.\n\nExample::\n\n |---paper\n |--- algorithm.pdf\n |--- images\n |--- 1.jpg\n\n zip_everything_in_folder(\"paper\", \"paper.zip\")\n\n paper.zip\n |--- algorithm.pdf\n |--- images\n |--- 1.jpg\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u5c06\u76ee\u5f55\u5185\u90e8\u7684\u6240\u6709\u6587\u4ef6\u6dfb\u52a0\u5230\u538b\u7f29\u5305, \u4e0d\u5305\u62ec\u6839\u76ee\u5f55\u672c\u8eab\u3002", "id": "f2887:m1"} {"signature": "def is_empty(self, strict=True):", "body": "if self.exists():if self.is_file():return self.size == elif self.is_dir():if strict:return len(list(self.select(recursive=True))) == else: return len(list(self.select_file(recursive=True))) == else: msg = \"\" % selfraise EnvironmentError(msg)else:raise EnvironmentError(\"\" % self)", "docstring": "- If it's a file, check if it is a empty file. (0 bytes content)\n- If it's a directory, check if there's no file and dir in it.\n But if ``strict = False``, then only check if there's no file in it.\n\n:param strict: only useful when it is a directory. if True, only\n return True if this dir has no dir and file. if False, return True\n if it doesn't have any file.", "id": "f2888:c0:m4"} {"signature": "def file_stat(self, filters=all_true):", "body": "self.assert_is_dir_and_exists()stat = {\"\": , \"\": , \"\": }for p in self.select(filters=filters, recursive=True):if p.is_file():stat[\"\"] += stat[\"\"] += p.sizeelif p.is_dir():stat[\"\"] += return stat", "docstring": "Find out how many files, directorys and total size (Include file in\n it's sub-folder).\n\n :returns: stat, a dict like ``{\"file\": number of files,\n \"dir\": number of directorys, \"size\": total size in bytes}``\n\n **\u4e2d\u6587\u6587\u6863**\n\n \u8fd4\u56de\u4e00\u4e2a\u76ee\u5f55\u4e2d\u7684\u6587\u4ef6, \u6587\u4ef6\u5939, \u5927\u5c0f\u7684\u7edf\u8ba1\u6570\u636e\u3002", "id": "f2888:c0:m10"} {"signature": "def print_big_dir_and_big_file(self, top_n=):", "body": "self.assert_is_dir_and_exists()size_table1 = sorted([(p, p.dirsize) for p in self.select_dir(recursive=False)],key=lambda x: x[],reverse=True,)for p1, size1 in size_table1[:top_n]:print(\"\".format(repr_data_size(size1), p1.abspath))size_table2 = sorted([(p, p.size) for p in p1.select_file(recursive=True)],key=lambda x: x[],reverse=True,)for p2, size2 in size_table2[:top_n]:print(\"\".format(repr_data_size(size2), p2.abspath))", "docstring": "Print ``top_n`` big dir and ``top_n`` big file in each dir.", "id": "f2888:c0:m8"} {"signature": "def print_big_file(self, top_n=):", "body": "self.assert_is_dir_and_exists()size_table = sorted([(p, p.size) for p in self.select_file(recursive=True)],key=lambda x: x[],reverse=True,)for p, size in size_table[:top_n]:print(\"\".format(repr_data_size(size), p.abspath))", "docstring": "Print ``top_n`` big file in this dir.", "id": "f2888:c0:m7"} {"signature": "def trail_space(self, filters=lambda p: p.ext == \"\"): ", "body": "self.assert_is_dir_and_exists()for p in self.select_file(filters):try:with open(p.abspath, \"\") as f:lines = list()for line in f:lines.append(line.decode(\"\").rstrip())with open(p.abspath, \"\") as f:f.write(\"\".join(lines).encode(\"\"))except Exception as e: raise e", "docstring": "Trail white space at end of each line for every ``.py`` file.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u5c06\u76ee\u5f55\u4e0b\u7684\u6240\u6709\u88ab\u9009\u62e9\u7684\u6587\u4ef6\u4e2d\u884c\u672b\u7684\u7a7a\u683c\u5220\u9664\u3002", "id": "f2888:c0:m13"} {"signature": "def decode_hexstr(text):", "body": "return binascii.a2b_hex(text.encode(\"\")).decode(\"\")", "docstring": "Reverse operation of :func:`encode_hexstr`.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u5c0616\u8fdb\u5236\u5b57\u7b26\u4e32\u89e3\u7801\u4e3a\u539f\u5b57\u7b26\u4e32\u3002", "id": "f2889:m1"} {"signature": "def copyto(self,new_abspath=None,new_dirpath=None,new_dirname=None,new_basename=None,new_fname=None,new_ext=None,overwrite=False,makedirs=False):", "body": "self.assert_exists()p = self.change(new_abspath=new_abspath,new_dirpath=new_dirpath,new_dirname=new_dirname,new_basename=new_basename,new_fname=new_fname,new_ext=new_ext,)if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):if self.abspath != p.abspath:try:shutil.copy(self.abspath, p.abspath)except IOError as e:if makedirs:os.makedirs(p.parent.abspath)shutil.copy(self.abspath, p.abspath)else:raise ereturn p", "docstring": "Copy this file to other place.", "id": "f2890:c0:m5"} {"signature": "def moveto(self,new_abspath=None,new_dirpath=None,new_dirname=None,new_basename=None,new_fname=None,new_ext=None,overwrite=False,makedirs=False):", "body": "self.assert_exists()p = self.change(new_abspath=new_abspath,new_dirpath=new_dirpath,new_dirname=new_dirname,new_basename=new_basename,new_fname=new_fname,new_ext=new_ext,)if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):if self.abspath != p.abspath:if makedirs:parent = p.parentif not parent.exists():os.makedirs(parent.abspath)self.rename(p)return p", "docstring": "An advanced :meth:`pathlib_mate.pathlib2.Path.rename` method provide ability to rename by\neach components of a path. A new ``Path`` instance will returns.\n\n**\u4e2d\u6587\u6587\u6863**\n\n\u9ad8\u7ea7\u91cd\u547d\u540d\u51fd\u6570, \u5141\u8bb8\u7528\u4e8e\u6839\u636e\u8def\u5f84\u7684\u5404\u4e2a\u7ec4\u6210\u90e8\u5206\u8fdb\u884c\u91cd\u547d\u540d\u3002\u4f46\u548cos.rename\n\u65b9\u6cd5\u4e00\u6837, \u9700\u8981\u4fdd\u8bc1\u6bcd\u6587\u4ef6\u5939\u5b58\u5728\u3002", "id": "f2890:c0:m4"} {"signature": "def append_parts(self, *parts):", "body": "return self.__class__(self, *parts)", "docstring": "Append some parts to the end of this path.\n\n:returns: a new Path object.\n\nExample::\n\n >>> self.__class__(\"/usr/bin/python\").append_parts(\"lib\")\n \"/user/bin/python/lib\"\n\n >>> self.__class__(\"/usr/bin/python\").append_parts(\"lib\", \"core.py\")\n \"/user/bin/python/lib/core.py\"", "id": "f2890:c0:m1"} {"signature": "def is_not_exist_or_allow_overwrite(self, overwrite=False):", "body": "if self.exists() and overwrite is False:return Falseelse: return True", "docstring": "Test whether a file target is not exists or it exists but allow\noverwrite.", "id": "f2890:c0:m3"} {"signature": "@propertydef format(self):", "body": "formats = {}for subpod in self.root.findall(''):for elem in list(subpod):if elem.tag == '':continuecontent = elem.textif elem.tag == '':content = {'': elem.get(''),'': elem.get(''),'': elem.get(''),'': int(elem.get('', )),'': int(elem.get('', ))}if elem.tag not in formats:formats[elem.tag] = [content]else:formats[elem.tag].append(content)return formats", "docstring": "Dictionary of available formats, corresponding to a list of the values\nExample: pod.format['plaintext'] will return a list of every plaintext\n content in the pod's subpods", "id": "f2901:c2:m4"} {"signature": "def kill_connections(self, name):", "body": "log.info('' % name)self._run_stmt(\"\"\"\"\"\" % name)", "docstring": "Drops all connections to the specified database.", "id": "f2908:c0:m11"} {"signature": "def connections(self, name):", "body": "stmt = \"\"\"\"\"\".format(fields=''.join(CONNECTION_FIELDS), datname=name)return list(Connection(**x) for x in self._iter_results(stmt))", "docstring": "Returns a list of existing connections to the named database.", "id": "f2908:c0:m10"} {"signature": "def rename(self, from_name, to_name):", "body": "log.info('' % (from_name, to_name))self._run_stmt('' % (from_name, to_name))", "docstring": "Renames an existing database.", "id": "f2908:c0:m9"} {"signature": "def settings(self):", "body": "stmt = \"\".format(fields=''.join(SETTINGS_FIELDS))settings = []for row in self._iter_results(stmt):row[''] = self._vartype_map[row['']](row[''])settings.append(Settings(**row))return settings", "docstring": "Returns settings from the server.", "id": "f2908:c0:m19"} {"signature": "def connection_url(self, name=None):", "body": "return ''.format(**{k: v for k, v in self._connect_options(name)})", "docstring": "Provides a connection string for database as a sqlalchemy compatible URL.\n\nNB - this doesn't include special arguments related to SSL connectivity (which are outside the scope\nof the connection URL format).\n\nParameters\n----------\nname: str, optional\n an override database name for the connection string.\n\nReturns\n-------\nstr: the connection URL (e.g. postgresql://user1@localhost:5432/db1)", "id": "f2908:c0:m17"} {"signature": "def map_attr(self, mapping, attr, obj):", "body": "if attr not in mapping and hasattr(self, attr):if not callable(getattr(self, attr)):mapping[attr] = getattr(self, attr)else:mapping[attr] = getattr(self, attr)(obj)", "docstring": "A kind of cheesy method that allows for callables or attributes to\nbe used interchangably", "id": "f2914:c4:m10"} {"signature": "def preprocess(self, obj, mapping, **kwargs):", "body": "pass", "docstring": "Pre-processing hook. Called by map_to_dictionary()", "id": "f2914:c4:m15"} {"signature": "def request_resource(self, url, **kwargs):", "body": "obj = self.get_object(url)mapping = self.map_to_dictionary(url, obj, **kwargs)resource = OEmbedResource.create(mapping)resource.content_object = objreturn resource", "docstring": "Request an OEmbedResource for a given url. Some valid keyword args:\n- format\n- maxwidth\n- maxheight", "id": "f2914:c4:m18"} {"signature": "def provider_from_url(self, url):", "body": "domain = get_domain(url)site_tuples = self.get_cleaned_sites().values()for domain_re, name, normalized_domain in site_tuples:if re.match(domain_re, domain):return normalized_domain, namesite = Site.objects.get_current()return site.domain, site.name", "docstring": "Given a URL for any of our sites, try and match it to one, returning\nthe domain & name of the match. If no match is found, return current.\n\nReturns a tuple of domain, site name -- used to determine 'provider'", "id": "f2914:c4:m5"} {"signature": "def map_to_dictionary(self, url, obj, **kwargs):", "body": "maxwidth = kwargs.get('', None)maxheight = kwargs.get('', None)provider_url, provider_name = self.provider_from_url(url)mapping = {'': '','': url,'': provider_name,'': provider_url,'': self.resource_type}self.preprocess(obj, mapping, **kwargs)if self.resource_type == '' and self.get_image(obj):self.resize_photo(obj, mapping, maxwidth, maxheight)elif self.resource_type in ('', '', ''):width, height = size_to_nearest(maxwidth,maxheight,self._meta.valid_sizes,self._meta.force_fit)mapping.update(width=width, height=height)if self.get_image(obj):self.thumbnail(obj, mapping)for attr in ('', '', '', ''):self.map_attr(mapping, attr, obj)if '' in mapping:mapping[''] = relative_to_full(mapping[''], url)if '' in mapping:mapping[''] = relative_to_full(mapping[''], url)if '' not in mapping and mapping[''] in ('', ''):mapping[''] = self.render_html(obj, context=Context(mapping))self.postprocess(obj, mapping, **kwargs)return mapping", "docstring": "Build a dictionary of metadata for the requested object.", "id": "f2914:c4:m17"} {"signature": "def get_image(self, obj):", "body": "if self._meta.image_field:return getattr(obj, self._meta.image_field)", "docstring": "Return an ImageFileField instance", "id": "f2914:c4:m11"} {"signature": "def get_sites(self):", "body": "return Site.objects.all()", "docstring": "Return sites whose domains should be checked against", "id": "f2914:c4:m3"} {"signature": "def _fetch(self, url):", "body": "return fetch_url(url)", "docstring": "Fetches from a URL, respecting GZip encoding, etc.\n\nReturns an OEmbedResource instance", "id": "f2914:c1:m2"} {"signature": "def _image_field(self):", "body": "for field in self.model._meta.fields:if isinstance(field, ImageField):return field.name", "docstring": "Try to automatically detect an image field", "id": "f2914:c2:m2"} {"signature": "def postprocess(self, obj, mapping, **kwargs):", "body": "pass", "docstring": "Post-processing hook. Called by map_to_dictionary()", "id": "f2914:c4:m16"} {"signature": "def _build_regex(self):", "body": "url_patterns = resolver.reverse_dict.get(self._meta.named_view)try:regex = url_patterns[]except TypeError:raise OEmbedException('' % self._meta.named_view)cleaned_sites = self.get_cleaned_sites()site_regexes = []for site in self.get_sites():site_regexes.append(cleaned_sites[site.pk][])sites = ''.join(site_regexes)regex = re.compile('' % (sites, regex))return regex", "docstring": "Performs a reverse lookup on a named view and generates\na list of regexes that match that object. It generates\nregexes with the domain name included, using sites provided\nby the get_sites() method.\n\n>>> regex = provider.regex\n>>> regex.pattern\n'http://(www2.kusports.com|www2.ljworld.com|www.lawrence.com)/photos/(?P\\\\d{4})/(?P\\\\w{3})/(?P\\\\d{1,2})/(?P\\\\d+)/$'", "id": "f2914:c4:m2"} {"signature": "def render_html(self, obj, context=None):", "body": "provided_context = context or Context()context = RequestContext(mock_request())context.update(provided_context)context.push()context[self._meta.context_varname] = objrendered = render_to_string(self._meta.template_name, context)context.pop()return rendered", "docstring": "Generate the 'html' attribute of an oembed resource using a template.\nSort of a corollary to the parser's render_oembed method. By default,\nthe current mapping will be passed in as the context.\n\nOEmbed templates are stored in:\n\noembed/provider/[app_label]_[model].html\n\n-- or --\n\noembed/provider/media_video.html", "id": "f2914:c4:m9"} {"signature": "def setUp(self):", "body": "super(ConsumerTestCase, self).setUp()self.oembed_client = OEmbedConsumer()", "docstring": "Set up test environment", "id": "f2923:c0:m0"} {"signature": "def setUp(self):", "body": "oembed.autodiscover()oembed.site._db_updated = Noneself.storage = DummyMemoryStorage()self.orig_default_storage = storage.default_storagestorage.default_storage = self.storageself.media_root, self.media_url = settings.MEDIA_ROOT, settings.MEDIA_URLsettings.MEDIA_ROOT = MEDIA_ROOTsettings.MEDIA_URL = MEDIA_URLself.template_dirs = settings.TEMPLATE_DIRScur_dir = os.path.dirname(__file__)settings.TEMPLATE_DIRS = [os.path.join(os.path.dirname(cur_dir), '')]self.orig_file_storage = settings.DEFAULT_FILE_STORAGEsettings.DEFAULT_FILE_STORAGE = DEFAULT_FILE_STORAGEtest_image = Image.new('', (, ), (, , , ))self.test_img_buffer = StringIO()test_image.save(self.test_img_buffer, '')self.test_img_file = ContentFile(self.test_img_buffer.getvalue())self.test_img_location = ''storage.default_storage.save(self.test_img_location, self.test_img_file)", "docstring": "Set up test environment", "id": "f2925:c0:m0"} {"signature": "def do_url_scheme(parser, token):", "body": "args = token.split_contents()if len(args) != :raise template.TemplateSyntaxError('' % args[])return OEmbedURLSchemeNode()", "docstring": "Generates a <link> tag with oembed autodiscovery bits.\n\n{% oembed_url_scheme %}", "id": "f2937:m5"} {"signature": "@register.filterdef strip_oembeds(text, args=None):", "body": "resource_type = width = height = Noneif args:dimensions = args.lower().split('')if len(dimensions) in (, ):resource_type = dimensions.pop()if len(dimensions) == :width, height = map(lambda x: int(x), dimensions)client = OEmbedConsumer()return mark_safe(client.strip(text, width, height, resource_type))", "docstring": "Take a block of text and strip all the embeds from it, optionally taking\na maxwidth, maxheight / resource_type\n\nUsage:\n{{ post.content|strip_embeds }}\n\n{{ post.content|strip_embeds:\"600x600xphoto\" }}\n\n{{ post.content|strip_embeds:\"video\" }}", "id": "f2937:m2"} {"signature": "def json(request, *args, **kwargs):", "body": "params = dict(list(request.GET.items()))callback = params.pop('', None)url = params.pop('', None)if not url:return HttpResponseBadRequest('')try:provider = oembed.site.provider_for_url(url)if not provider.provides:raise OEmbedMissingEndpoint()except OEmbedMissingEndpoint:raise Http404('' % url)query = dict([(smart_str(k), smart_str(v)) for k, v in list(params.items()) if v])try:resource = oembed.site.embed(url, **query)except OEmbedException as e:raise Http404('' % (url, str(e)))response = HttpResponse(mimetype='')json = resource.jsonif callback:response.write('' % (defaultfilters.force_escape(callback), json))else:response.write(json)return response", "docstring": "The oembed endpoint, or the url to which requests for metadata are passed.\nThird parties will want to access this view with URLs for your site's\ncontent and be returned OEmbed metadata.", "id": "f2940:m0"} {"signature": "def store_providers(self, provider_data):", "body": "if not hasattr(provider_data, ''):raise OEmbedException('')provider_pks = []for provider in provider_data:if '' not in provider or'' not in provider:continueresource_type = provider.get('')if resource_type not in RESOURCE_TYPES:continuestored_provider, created = StoredProvider.objects.get_or_create(wildcard_regex=provider[''])if created:stored_provider.endpoint_url = relative_to_full( provider[''],provider[''])stored_provider.resource_type = resource_typestored_provider.save()provider_pks.append(stored_provider.pk)return StoredProvider.objects.filter(pk__in=provider_pks)", "docstring": "Iterate over the returned json and try to sort out any new providers", "id": "f2942:c0:m13"} {"signature": "def invalidate_stored_oembeds(self, sender, instance, created, **kwargs):", "body": "ctype = ContentType.objects.get_for_model(instance)StoredOEmbed.objects.filter(object_id=instance.pk,content_type=ctype).delete()", "docstring": "A hook for django-based oembed providers to delete any stored oembeds", "id": "f2942:c0:m10"} {"signature": "def get_providers(self):", "body": "return self.get_registry().keys()", "docstring": "Provide a list of all oembed providers that are being used.", "id": "f2942:c0:m8"} {"signature": "def ensure_populated(self):", "body": "if not self._populated:self.populate()", "docstring": "Ensure not only that the internal registry of Python-class providers is\npopulated, but also make sure the cached queryset of database-providers\nis up-to-date", "id": "f2942:c0:m6"} {"signature": "def autodiscover(self, url):", "body": "headers, response = fetch_url(url)if headers[''].split('')[] in ('', ''):provider_data = json.loads(response)return self.store_providers(provider_data)", "docstring": "Load up StoredProviders from url if it is an oembed scheme", "id": "f2942:c0:m12"} {"signature": "def register_field(cls, field):", "body": "FieldRegistry.add_field(cls, field)signals.post_save.connect(handle_save_embeds, sender=cls,dispatch_uid='' %(cls._meta.app_label, cls._meta.module_name, field.name))", "docstring": "Handles registering the fields with the FieldRegistry and creating a \npost-save signal for the model.", "id": "f2943:m0"} {"signature": "def contribute_to_class(self, cls, name):", "body": "super(EmbeddedMediaField, self).contribute_to_class(cls, name)register_field(cls, self)cls._meta.add_virtual_field(EmbeddedSignalCreator(self))", "docstring": "I need a way to ensure that this signal gets created for all child\nmodels, and since model inheritance doesn't have a 'contrubite_to_class'\nstyle hook, I am creating a fake virtual field which will be added to\nall subclasses and handles creating the signal", "id": "f2943:c2:m1"} {"signature": "def autodiscover():", "body": "import impfrom django.conf import settingsfor app in settings.INSTALLED_APPS:try:app_path = __import__(app, {}, {}, [app.split('')[-]]).__path__except AttributeError:continuetry:imp.find_module('', app_path)except ImportError:continue__import__(\"\" % app)", "docstring": "Automatically build the provider index.", "id": "f2944:m0"} {"signature": "def render_oembed(self, oembed_resource, original_url, template_dir=None,context=None):", "body": "provided_context = context or Context()context = RequestContext(context.get(\"\") or mock_request())context.update(provided_context)template_name = '' % oembed_resource.typetemplates = [os.path.join('', template_name), '']if template_dir:templates.insert(, os.path.join('', template_dir, template_name))template = select_template(templates)context.push()context[''] = oembed_resourcecontext[''] = original_urlrendered = template.render(context)context.pop()return rendered.strip()", "docstring": "Render the oembed resource and return as a string.\n\nTemplate directory will always fall back to 'oembed/[type].html', but\na custom template dir can be passed in using the kwargs.\n\nTemplates are given two context variables:\n- response: an OEmbedResource\n- original_url: the url that was passed to the consumer", "id": "f2946:c0:m0"} {"signature": "def parse(self, text, maxwidth=None, maxheight=None, template_dir=None,context=None, urlize_all_links=CONSUMER_URLIZE_ALL):", "body": "context = context or Context()context[''] = maxwidthcontext[''] = maxheighttry:text = unicode(text)except UnicodeDecodeError:text = unicode(text.decode(''))return self.parse_data(text, maxwidth, maxheight, template_dir,context, urlize_all_links)", "docstring": "Scans a block of text, replacing anything matching a provider pattern\nwith an OEmbed html snippet, if possible.\n\nTemplates should be stored at oembed/{format}.html, so for example:\n\n oembed/video.html\n\nAn optional template_dir can be provided, allowing for\n\n oembed/[template_dir]/video.html\n\nThese templates are passed a context variable, ``response``, which is\nan OEmbedResource, as well as the ``original_url``", "id": "f2946:c0:m1"} {"signature": "def parse_data(self, text, maxwidth, maxheight, template_dir, context,urlize_all_links):", "body": "replacements = {}user_urls = set(re.findall(URL_RE, text))for user_url in user_urls:try:resource = oembed.site.embed(user_url, maxwidth=maxwidth, maxheight=maxheight)except OEmbedException:if urlize_all_links:replacements[user_url] = '' % {'': user_url}else:context[''] = min(maxwidth, resource.width)context[''] = min(maxheight, resource.height)replacement = self.render_oembed(resource, user_url, template_dir=template_dir, context=context)replacements[user_url] = replacement.strip()user_urls = re.finditer(URL_RE, text)matches = []for match in user_urls:if match.group() in replacements:matches.append([match.start(), match.end(), match.group()])for indx, (start, end, user_url) in enumerate(matches):replacement = replacements[user_url]difference = len(replacement) - len(user_url)text = text[:start] + replacement + text[end:]for j in xrange(indx + , len(matches)):matches[j][] += differencematches[j][] += differencereturn mark_safe(text)", "docstring": "Parses a block of text indiscriminately", "id": "f2947:c0:m0"} {"signature": "def fetch_url(url, method='', user_agent='', timeout=SOCKET_TIMEOUT):", "body": "sock = httplib2.Http(timeout=timeout)request_headers = {'': user_agent,'': ''}try:headers, raw = sock.request(url, headers=request_headers, method=method)except:raise OEmbedHTTPException('' % url)return headers, raw", "docstring": "Fetch response headers and data from a URL, raising a generic exception\nfor any kind of failure.", "id": "f2950:m2"} {"signature": "def load_class(path):", "body": "package, klass = path.rsplit('', )module = import_module(package)return getattr(module, klass)", "docstring": "dynamically load a class given a string of the format\n\npackage.Class", "id": "f2950:m6"} {"signature": "def getDevicesReadableNames():", "body": "return [{'': s,'': config.get(s).get('')}for s in getDevicesCodenames()]", "docstring": "Returns codename and readable name for each device", "id": "f2959:m1"} {"signature": "def encryptPassword(self, login, passwd):", "body": "binaryKey = b64decode(config.GOOGLE_PUBKEY)i = utils.readInt(binaryKey, )modulus = utils.toBigInt(binaryKey[:][:i])j = utils.readInt(binaryKey, i + )exponent = utils.toBigInt(binaryKey[i + :][:j])digest = hashes.Hash(hashes.SHA1(), backend=default_backend())digest.update(binaryKey)h = b'' + digest.finalize()[:]der_data = encode_dss_signature(modulus, exponent)publicKey = load_der_public_key(der_data, backend=default_backend())to_be_encrypted = login.encode() + b'' + passwd.encode()ciphertext = publicKey.encrypt(to_be_encrypted,padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA1()),algorithm=hashes.SHA1(),label=None))return urlsafe_b64encode(h + ciphertext)", "docstring": "Encrypt credentials using the google publickey, with the\n RSA algorithm", "id": "f2960:c3:m3"} {"signature": "def bulkDetails(self, packageNames):", "body": "params = {'': ''}req = googleplay_pb2.BulkDetailsRequest()req.docid.extend(packageNames)data = req.SerializeToString()message = self.executeRequestApi2(BULK_URL,post_data=data.decode(\"\"),content_type=CONTENT_TYPE_PROTO,params=params)response = message.payload.bulkDetailsResponsereturn [None if not utils.hasDoc(entry) elseutils.parseProtobufObj(entry.doc)for entry in response.entry]", "docstring": "Get several apps details from a list of package names.\n\n This is much more efficient than calling N times details() since it\n requires only one request. If an item is not found it returns an empty object\n instead of throwing a RequestError('Item not found') like the details() function\n\n Args:\n packageNames (list): a list of app IDs (usually starting with 'com.').\n\n Returns:\n a list of dictionaries containing docv2 data, or None\n if the app doesn't exist", "id": "f2960:c3:m15"} {"signature": "def login(self, email=None, password=None, gsfId=None, authSubToken=None):", "body": "if email is not None and password is not None:encryptedPass = self.encryptPassword(email, password).decode('')params = self.deviceBuilder.getLoginParams(email, encryptedPass)params[''] = ''params[''] = ''params[''] = ''headers = self.deviceBuilder.getAuthHeaders(self.gsfId)headers[''] = ''response = requests.post(AUTH_URL, data=params, verify=ssl_verify,proxies=self.proxies_config)data = response.text.split()params = {}for d in data:if \"\" not in d:continuek, v = d.split(\"\", )params[k.strip().lower()] = v.strip()if \"\" in params:ac2dmToken = params[\"\"]elif \"\" in params:if \"\" in params[\"\"]:raise SecurityCheckError(\"\"\"\"\"\")raise LoginError(\"\" + params[\"\"])else:raise LoginError(\"\")self.gsfId = self.checkin(email, ac2dmToken)self.getAuthSubToken(email, encryptedPass)self.uploadDeviceConfig()elif gsfId is not None and authSubToken is not None:self.gsfId = gsfIdself.setAuthSubToken(authSubToken)self.search('')else:raise LoginError('')", "docstring": "Login to your Google Account.\n For first time login you should provide:\n * email\n * password\n For the following logins you need to provide:\n * gsfId\n * authSubToken", "id": "f2960:c3:m8"} {"signature": "def details(self, packageName):", "body": "path = DETAILS_URL + \"\".format(requests.utils.quote(packageName))data = self.executeRequestApi2(path)return utils.parseProtobufObj(data.payload.detailsResponse.docV2)", "docstring": "Get app details from a package name.\n\n packageName is the app unique ID (usually starting with 'com.').", "id": "f2960:c3:m14"} {"signature": "def getHeaders(self, upload_fields=False):", "body": "if upload_fields:headers = self.deviceBuilder.getDeviceUploadHeaders()else:headers = self.deviceBuilder.getBaseHeaders()if self.gsfId is not None:headers[\"\"] = \"\".format(self.gsfId)if self.authSubToken is not None:headers[\"\"] = \"\" % self.authSubTokenif self.device_config_token is not None:headers[\"\"] = self.device_config_tokenif self.deviceCheckinConsistencyToken is not None:headers[\"\"] = self.deviceCheckinConsistencyTokenif self.dfeCookie is not None:headers[\"\"] = self.dfeCookiereturn headers", "docstring": "Return the default set of request headers, which\n can later be expanded, based on the request type", "id": "f2960:c3:m5"} {"signature": "def text_rank (path):", "body": "graph = build_graph(json_iter(path))ranks = nx.pagerank(graph)return graph, ranks", "docstring": "run the TextRank algorithm", "id": "f2966:m12"} {"signature": "def json_iter (path):", "body": "with open(path, '') as f:for line in f.readlines():yield json.loads(line)", "docstring": "iterator for JSON-per-line in a file pattern", "id": "f2966:m29"} {"signature": "def render_ranks (graph, ranks, dot_file=\"\"):", "body": "if dot_file:write_dot(graph, ranks, path=dot_file)", "docstring": "render the TextRank graph for visual formats", "id": "f2966:m11"} {"signature": "def normalize_key_phrases (path, ranks, stopwords=None, spacy_nlp=None, skip_ner=True):", "body": "global STOPWORDS, SPACY_NLPif (type(stopwords) is list) or (type(stopwords) is set):stopwords = set(stopwords)else:if not STOPWORDS:STOPWORDS = load_stopwords(stopwords)stopwords = STOPWORDSif not spacy_nlp:if not SPACY_NLP:SPACY_NLP = spacy.load(\"\")spacy_nlp = SPACY_NLPsingle_lex = {}phrase_lex = {}if isinstance(path, str):path = json_iter(path)for meta in path:sent = [w for w in map(WordNode._make, meta[\"\"])]for rl in collect_keyword(sent, ranks, stopwords):id = str(rl.ids)if id not in single_lex:single_lex[id] = rlelse:prev_lex = single_lex[id]single_lex[id] = rl._replace(count = prev_lex.count + )if not skip_ner:for rl in collect_entities(sent, ranks, stopwords, spacy_nlp):id = str(rl.ids)if id not in phrase_lex:phrase_lex[id] = rlelse:prev_lex = phrase_lex[id]phrase_lex[id] = rl._replace(count = prev_lex.count + )for rl in collect_phrases(sent, ranks, spacy_nlp):id = str(rl.ids)if id not in phrase_lex:phrase_lex[id] = rlelse:prev_lex = phrase_lex[id]phrase_lex[id] = rl._replace(count = prev_lex.count + )rank_list = [rl.rank for rl in single_lex.values()]if len(rank_list) < :max_single_rank = else:max_single_rank = max(rank_list)repeated_roots = {}for rl in sorted(phrase_lex.values(), key=lambda rl: len(rl), reverse=True):rank_list = []for i in iter(range(, len(rl.ids))):id = rl.ids[i]if not id in repeated_roots:repeated_roots[id] = rank_list.append(rl.rank[i])else:repeated_roots[id] += rank_list.append(rl.rank[i] / repeated_roots[id])phrase_rank = calc_rms(rank_list)single_lex[str(rl.ids)] = rl._replace(rank = phrase_rank)sum_ranks = sum([rl.rank for rl in single_lex.values()])for rl in sorted(single_lex.values(), key=lambda rl: rl.rank, reverse=True):if sum_ranks > :rl = rl._replace(rank=rl.rank / sum_ranks)elif rl.rank == :rl = rl._replace(rank=)rl = rl._replace(text=re.sub(r\"\", r\"\", rl.text))yield rl", "docstring": "collect keyphrases, named entities, etc., while removing stop words", "id": "f2966:m22"} {"signature": "def collect_keyword (sent, ranks, stopwords):", "body": "for w in sent:if (w.word_id > ) and (w.root in ranks) and (w.pos[] in \"\") and (w.root not in stopwords):rl = RankedLexeme(text=w.raw.lower(), rank=ranks[w.root]/, ids=[w.word_id], pos=w.pos.lower(), count=)if DEBUG:print(rl)yield rl", "docstring": "iterator for collecting the single-word keyphrases", "id": "f2966:m17"} {"signature": "def build_graph (json_iter):", "body": "global DEBUG, WordNodegraph = nx.DiGraph()for meta in json_iter:if DEBUG:print(meta[\"\"])for pair in get_tiles(map(WordNode._make, meta[\"\"])):if DEBUG:print(pair)for word_id in pair:if not graph.has_node(word_id):graph.add_node(word_id)try:graph.edge[pair[]][pair[]][\"\"] += except KeyError:graph.add_edge(pair[], pair[], weight=)return graph", "docstring": "construct the TextRank graph from parsed paragraphs", "id": "f2966:m9"} {"signature": "def write_dot (graph, ranks, path=\"\"):", "body": "dot = Digraph()for node in graph.nodes():dot.node(node, \"\" % (node, ranks[node]))for edge in graph.edges():dot.edge(edge[], edge[], constraint=\"\")with open(path, '') as f:f.write(dot.source)", "docstring": "output the graph in Dot file format", "id": "f2966:m10"} {"signature": "def parse_doc (json_iter):", "body": "global DEBUGfor meta in json_iter:base_idx = for graf_text in filter_quotes(meta[\"\"], is_email=False):if DEBUG:print(\"\", graf_text)grafs, new_base_idx = parse_graf(meta[\"\"], graf_text, base_idx)base_idx = new_base_idxfor graf in grafs:yield graf", "docstring": "parse one document to prep for TextRank", "id": "f2966:m7"} {"signature": "def top_sentences (kernel, path):", "body": "key_sent = {}i = if isinstance(path, str):path = json_iter(path)for meta in path:graf = meta[\"\"]tagged_sent = [WordNode._make(x) for x in graf]text = \"\".join([w.raw for w in tagged_sent])m_sent = mh_digest([str(w.word_id) for w in tagged_sent])dist = sum([m_sent.jaccard(m) * rl.rank for rl, m in kernel])key_sent[text] = (dist, i)i += for text, (dist, i) in sorted(key_sent.items(), key=lambda x: x[][], reverse=True):yield SummarySent(dist=dist, idx=i, text=text)", "docstring": "determine distance for each sentence", "id": "f2966:m25"} {"signature": "def enumerate_chunks (phrase, spacy_nlp):", "body": "if (len(phrase) > ):found = Falsetext = \"\".join([rl.text for rl in phrase])doc = spacy_nlp(text.strip(), parse=True)for np in doc.noun_chunks:if np.text != text:found = Trueyield np.text, find_chunk(phrase, np.text.split(\"\"))if not found and all([rl.pos[] != \"\" for rl in phrase]):yield text, phrase", "docstring": "iterate through the noun phrases", "id": "f2966:m16"} {"signature": "def limit_sentences (path, word_limit=):", "body": "word_count = if isinstance(path, str):path = json_iter(path)for meta in path:if not isinstance(meta, SummarySent):p = SummarySent(**meta)else:p = metasent_text = p.text.strip().split(\"\")sent_len = len(sent_text)if (word_count + sent_len) > word_limit:breakelse:word_count += sent_lenyield sent_text, p.idx", "docstring": "iterator for the most significant sentences, up to a specified limit", "id": "f2966:m27"} {"signature": "def fix_microsoft (foo):", "body": "i = bar = []while i < len(foo):text, lemma, pos, tag = foo[i]if (text == \"\") and (i > ):prev_tok = bar[-]prev_tok[] += \"\"prev_tok[] += \"\"bar[-] = prev_tokelse:bar.append(foo[i])i += return bar", "docstring": "fix special case for `c#`, `f#`, etc.; thanks Microsoft", "id": "f2966:m4"} {"signature": "def make_sentence (sent_text):", "body": "lex = []idx = for word in sent_text:if len(word) > :if (idx > ) and not (word[] in \"\"):lex.append(\"\")lex.append(word)idx += return \"\".join(lex)", "docstring": "construct a sentence text, with proper spacing", "id": "f2966:m28"} {"signature": "def cleanup_text (text):", "body": "x = \"\".join(map(lambda s: s.strip(), text.split(\"\"))).strip()x = x.replace('', '').replace('', '')x = x.replace(\"\", \"\").replace(\"\", \"\").replace(\"\", \"\")x = x.replace('', '').replace('', '')x = str(unicodedata.normalize('', x).encode('', '').decode(''))try:assert type(x).__name__ == ''except AssertionError:print(\"\", type(line), line)return x", "docstring": "It scrubs the garbled from its stream...\nOr it gets the debugger again.", "id": "f2970:m0"} {"signature": "@contextmanagerdef visit(self, key):", "body": "self[key] = keytry:yield keyfinally:del self[key]", "docstring": "Visits key and marks as visited.\n Support context manager interface.\n\n :param key: key being visited.", "id": "f2981:c0:m0"} {"signature": "def construct_mapping(self, node, deep=False):", "body": "mapping = super(ExtendedSafeConstructor, self).construct_mapping(node, deep)return {(str(key) if isinstance(key, int) else key): mapping[key]for key in mapping}", "docstring": "While yaml supports integer keys, these are not valid in\n json, and will break jsonschema. This method coerces all keys\n to strings.", "id": "f2986:c0:m0"} {"signature": "def __add__(self, other):", "body": "other = Position(other)return Position((self.line + other.line,self.column + other.column))", "docstring": "(1, 1) + (1, 1) -> (2, 2)", "id": "f3011:c0:m5"} {"signature": "def __lt__(self, other):", "body": "other = Position(other)return (self.line, self.column) < (other.line, other.column)", "docstring": "Compares Position with Position or indexable object", "id": "f3011:c0:m11"} {"signature": "def advance_line(self):", "body": "self.line += self.column = ", "docstring": "(3, 10) -> (4, 1)", "id": "f3011:c0:m2"} {"signature": "def __eq__(self, other):", "body": "if not (hasattr(other, '') and hasattr(other, '')) and len(other) < :return Falseother = Position(other)return self.line == other.line and self.column == other.column", "docstring": "Compares Positions or Position and tuple\n\n Will not fail if other is an unsupported type", "id": "f3011:c0:m10"} {"signature": "def node_to_bounding_box(node):", "body": "return BoundingBoxFinder().compute(node)", "docstring": "Bounding box of the given node\n\n The bounding box of a node represents its left most and right most\n position in the rendered source code. Its left position is here\n always (1, 1).", "id": "f3011:m3"} {"signature": "def __eq__(self, other):", "body": "if not (hasattr(other, '') and hasattr(other, '')) and len(other) < :return Falseother = BoundingBox(other)return self.top_left == other.top_left and self.bottom_right == other.bottom_right", "docstring": "Compares BoundingBox with BoundingBox or indexable object", "id": "f3011:c1:m1"} {"signature": "@propertydef right(self):", "body": "return Position((self.line, self.column + ))", "docstring": "(3, 10) -> (3, 11)", "id": "f3011:c0:m4"} {"signature": "def position_to_path(tree, position):", "body": "return PositionFinder().find(tree, position)", "docstring": "Path to the node located at the given line and column\n\n This function locates a node in the rendered source code", "id": "f3011:m0"} {"signature": "def position_to_node(tree, position):", "body": "return path_to_node(tree, position_to_path(tree, position))", "docstring": "FST node located at the given line and column", "id": "f3011:m2"} {"signature": "def path_to_node(tree, path):", "body": "if path is None:return Nonenode = treefor key in path:node = child_by_key(node, key)return node", "docstring": "FST node located at the given path", "id": "f3011:m1"} {"signature": "def get_space(node):", "body": "if len(node) < or len(node[]) == :return Nonereturn transform_tabs_to_spaces(node[][][])", "docstring": "Return space formatting information of node.\n\n If the node does not have a third formatting item - like in\n a ('ENDL', '\\n') node - then we return None as a flag value. This is\n maybe not the best behavior but it seems to work for now.", "id": "f3016:m2"} {"signature": "def gettokentype(self):", "body": "return self.name", "docstring": "Returns the type or name of the token.", "id": "f3029:c0:m5"} {"signature": "def getstr(self):", "body": "return self.value", "docstring": "Returns the string represented by this token.", "id": "f3029:c0:m6"} {"signature": "@classmethoddef regular_polygon(cls, center, radius, n_vertices, start_angle=, **kwargs):", "body": "angles = (np.arange(n_vertices) * * np.pi / n_vertices) + start_anglereturn cls(center + radius * np.array([np.cos(angles), np.sin(angles)]).T, **kwargs)", "docstring": "Construct a regular polygon.\n\n Parameters\n ----------\n center : array-like\n radius : float\n n_vertices : int\n start_angle : float, optional\n Where to put the first point, relative to `center`,\n in radians counter-clockwise starting from the horizontal axis.\n kwargs\n Other keyword arguments are passed to the |Shape| constructor.", "id": "f3037:c0:m1"} {"signature": "@classmethoddef circle(cls, center, radius, n_vertices=, **kwargs):", "body": "return cls.regular_polygon(center, radius, n_vertices, **kwargs)", "docstring": "Construct a circle.\n\n Parameters\n ----------\n center : array-like\n radius : float\n n_vertices : int, optional\n Number of points to draw.\n Decrease for performance, increase for appearance.\n kwargs\n Other keyword arguments are passed to the |Shape| constructor.", "id": "f3037:c0:m2"} {"signature": "def update(self, dt):", "body": "self.translate(dt * self.velocity)self.rotate(dt * self.angular_velocity)", "docstring": "Update the shape's position by moving it forward according to its velocity.\n\n Parameters\n ----------\n dt : float", "id": "f3037:c0:m24"} {"signature": "def covers(self, other):", "body": "return bool(self.poly.covers(other.poly))", "docstring": "Check if the shape completely covers another shape.\n\n Parameters\n ----------\n other : |Shape|\n\n Returns\n -------\n bool", "id": "f3037:c0:m27"} {"signature": "@propertydef _kwargs(self):", "body": "return dict(color=self.color, velocity=self.velocity, colors=self.colors)", "docstring": "Keyword arguments for recreating the Shape from the vertices.", "id": "f3037:c0:m8"} {"signature": "def enable(self, enabled):", "body": "self.enabled = enabledreturn self", "docstring": "Set whether the shape should be drawn.\n\n Parameters\n ----------\n\n enabled : bool", "id": "f3037:c0:m25"} {"signature": "def flip(self, angle, center=None):", "body": "return self.rotate(-angle, center=center).flip_y(center=center).rotate(angle, center=center)", "docstring": "Flip the shape in an arbitrary direction.\n\n Parameters\n ----------\n angle : array-like\n The angle, in radians counter-clockwise from the horizontal axis,\n defining the angle about which to flip the shape (of a line through `center`).\n center : array-like, optional\n The point about which to flip.\n If not passed, the center of the shape will be used.", "id": "f3037:c0:m21"} {"signature": "def flip_x(self, center=None):", "body": "if center is None:self.poly.flip()else:self.poly.flip(center[])", "docstring": "Flip the shape in the x direction, in-place.\n\n Parameters\n ----------\n center : array-like, optional\n Point about which to flip.\n If not passed, the center of the shape will be used.", "id": "f3037:c0:m19"} {"signature": "def overlaps(self, other):", "body": "return bool(self.poly.overlaps(other.poly))", "docstring": "Check if two shapes overlap.\n\n Parameters\n ----------\n other : |Shape|\n\n Returns\n -------\n bool", "id": "f3037:c0:m26"} {"signature": "def enqueue_task(self, task):", "body": "data = dumps(task)if self._async:self.publisher_client.publish(self.topic_path, data=data)logger.info(''.format(task.id))else:unpickled_task = unpickle(data)logger.info(''.format(unpickled_task.id))with measure_time() as summary, self.queue_context():unpickled_task.execute(queue=self)summary(unpickled_task.summary())return TaskResult(task.id, self)", "docstring": "Enqueues a task directly. This is used when a task is retried or if\n a task was manually created.\n\n Note that this does not store the task.", "id": "f3044:c0:m5"} {"signature": "def task_context(self):", "body": "return task_context(self)", "docstring": "Returns a context manager that sets this task as the current_task\nglobal. Similar to flask's app.request_context. This is used by the\nworkers to make the global available inside of task functions.", "id": "f3045:c2:m9"} {"signature": "def cleanup(self):", "body": "if self.subscription:logger.info(\"\")self.subscriber_client.delete_subscription(self.subscription)", "docstring": "Deletes this worker's subscription.", "id": "f3046:c0:m2"} {"signature": "def _get_or_create_subscription(self):", "body": "topic_path = self._get_topic_path()subscription_name = ''.format(queue.PUBSUB_OBJECT_PREFIX, self.name, uuid4().hex)subscription_path = self.subscriber_client.subscription_path(self.project, subscription_name)try:self.subscriber_client.get_subscription(subscription_path)except google.cloud.exceptions.NotFound:logger.info(\"\".format(subscription_name))self.subscriber_client.create_subscription(subscription_path, topic_path)return subscription_path", "docstring": "In a broadcast queue, workers have a unique subscription ensuring\n that every worker recieves a copy of every task.", "id": "f3046:c0:m1"} {"signature": "def unpickle(pickled_string):", "body": "try:obj = loads(pickled_string)except Exception as e:raise UnpickleError('', pickled_string, e)return obj", "docstring": "Unpickles a string, but raises a unified UnpickleError in case anything\n fails.\n This is a helper method to not have to deal with the fact that `loads()`\n potentially raises many types of exceptions (e.g. AttributeError,\n IndexError, TypeError, KeyError, etc.)", "id": "f3055:m0"} {"signature": "def busybox_single_app_bundle_fixture(num_bundles=, command=[''], app_name_transformer=None):", "body": "if app_name_transformer is None:app_name_transformer = lambda x: xapp_dict = {'': '','': '','': '','': {'': command},'': {'': '','': [''],'': [{'': '','': [''],'': ''},{'': '','': [''],'': ''},{'': '','': [''],'': ''}]}}for bundle in range(num_bundles):app_name = app_name_transformer(''.format(_num_to_alpha(bundle)))bundle_name = ''.format(_num_to_alpha(bundle))_write('', bundle_name, {'': '', '': [app_name]})_write('', app_name, app_dict)", "docstring": "Fixture for use in integration tests. The local repo at\n /tmp/fake-repo should be set up before using this fixture. Optionally takes in\n a name transformer function which is applied to the default names of the apps.", "id": "f3059:m9"} {"signature": "@patch('')def run_command(self, args, fake_exit, raise_on_error=True):", "body": "with patch('', wraps=self.exec_docker_patch) as fake_exec_docker:fake_exit.side_effect = SysExit('')self.fake_exec_docker = fake_exec_dockersys.argv = [''] + args.split('')try:client_entrypoint()except SysExit:passfor call in fake_exit.mock_calls:name, args, kwargs = callif len(args) == and args[] > and raise_on_error:self._clear_stdout()raise CommandError(''.format(''.join(sys.argv), args[]))result = self.stdoutself._clear_stdout()return result", "docstring": "Run a command through the Dusty client entrypoint, e.g. simulating\n the Dusty CLI as close as possible without having to call a subprocess.\n This command raises if the command fails, otherwise it returns the\n stdout generated by the command.", "id": "f3121:c4:m7"} {"signature": "def _load_ssh_auth_post_yosemite(mac_username):", "body": "user_id = subprocess.check_output(['', '', mac_username])ssh_auth_sock = subprocess.check_output(['', '', user_id, '', '', '']).rstrip()_set_ssh_auth_sock(ssh_auth_sock)", "docstring": "Starting with Yosemite, launchd was rearchitected and now only one\n launchd process runs for all users. This allows us to much more easily\n impersonate a user through launchd and extract the environment\n variables from their running processes.", "id": "f3124:m13"} {"signature": "def check_and_load_ssh_auth():", "body": "mac_username = get_config_value(constants.CONFIG_MAC_USERNAME_KEY)if not mac_username:logging.info(\"\")returnif not _running_on_mac(): logging.info(\"\")returnif _mac_version_is_post_yosemite():_load_ssh_auth_post_yosemite(mac_username)else:_load_ssh_auth_pre_yosemite()", "docstring": "Will check the mac_username config value; if it is present, will load that user's\nSSH_AUTH_SOCK environment variable to the current environment. This allows git clones\nto behave the same for the daemon as they do for the user", "id": "f3124:m16"} {"signature": "def _lib_install_commands_for_lib(app_name, assembled_specs):", "body": "libs = assembled_specs[''][app_name]['']['']return _lib_install_commands_for_libs(assembled_specs, libs)", "docstring": "This returns a list of all the commands that will install libraries for a\n given lib", "id": "f3127:m12"} {"signature": "def _compile_docker_commands(app_name, assembled_specs, port_spec):", "body": "app_spec = assembled_specs[''][app_name]commands = ['']commands += _lib_install_commands_for_app(app_name, assembled_specs)if app_spec['']:commands.append(\"\".format(container_code_path(app_spec)))commands.append(\"\".format(container_code_path(app_spec)))commands += _copy_assets_commands_for_app(app_spec, assembled_specs)commands += _get_once_commands(app_spec, port_spec)commands += _get_always_commands(app_spec)return commands", "docstring": "This is used to compile the command that will be run when the docker container starts\n up. This command has to install any libs that the app uses, run the `always` command, and\n run the `once` command if the container is being launched for the first time", "id": "f3127:m8"} {"signature": "def _lib_install_commands(lib_spec):", "body": "if not lib_spec['']:return []return [\"\".format(lib_spec[''])] + lib_spec['']", "docstring": "This returns a single commmand that will install a library in a docker container", "id": "f3127:m14"} {"signature": "def repo_mount_validator():", "body": "def validator(document):if '' in document and '' in document:returnelif '' not in document and '' not in document:returnreturn ''return validator", "docstring": "If either repo or mount are provided, they must both be provided.", "id": "f3133:m1"} {"signature": "def _ensure_managed_repos_dir_exists():", "body": "if not os.path.exists(constants.REPOS_DIR):os.makedirs(constants.REPOS_DIR)", "docstring": "Our exports file will be invalid if this folder doesn't exist, and the NFS server\nwill not run correctly.", "id": "f3137:m2"} {"signature": "def configure_nfs_server():", "body": "repos_for_export = get_all_repos(active_only=True, include_specs_repo=False)current_exports = _get_current_exports()needed_exports = _get_exports_for_repos(repos_for_export)_ensure_managed_repos_dir_exists()if not needed_exports.difference(current_exports):if not _server_is_running():_restart_server()return_write_exports_config(needed_exports)_restart_server()", "docstring": "This function is used with `dusty up`. It will check all active repos to see if\nthey are exported. If any are missing, it will replace current dusty exports with\nexports that are needed for currently active repos, and restart\nthe nfs server", "id": "f3137:m0"} {"signature": "def vm_path_is_directory(remote_path):", "body": "try:check_call_on_vm(''.format(remote_path))except CalledProcessError:return Falsereturn True", "docstring": "A weak check of whether a path in the Dusty VM is a directory.\n This function returns False on any process error, so False may indicate\n other failures such as the path not actually existing.", "id": "f3139:m2"} {"signature": "@memoizeddef get_authed_registries():", "body": "result = set()if not os.path.exists(constants.DOCKER_CONFIG_PATH):return resultconfig = json.load(open(constants.DOCKER_CONFIG_PATH, ''))for registry in config.get('', {}).iterkeys():try:parsed = urlparse(registry)except Exception:log_to_client('').format(registry)result.add(parsed.netloc) if parsed.netloc else result.add(parsed.path)return result", "docstring": "Reads the local Docker client config for the current user\n and returns all registries to which the user may be logged in.\n This is intended to be run client-side, not by the daemon.", "id": "f3140:m1"} {"signature": "def registry_from_image(image_name):", "body": "if '' not in image_name: return constants.PUBLIC_DOCKER_REGISTRYprefix = image_name.split('')[]if '' not in prefix: return constants.PUBLIC_DOCKER_REGISTRYreturn prefix", "docstring": "Returns the Docker registry host associated with\n a given image name.", "id": "f3140:m0"} {"signature": "def get_dusty_containers(services, include_exited=False):", "body": "client = get_docker_client()if services:containers = [get_container_for_app_or_service(service, include_exited=include_exited) for service in services]return [container for container in containers if container]else:return [containerfor container in client.containers(all=include_exited)if any(name.startswith('') for name in container.get('', []))]", "docstring": "Get a list of containers associated with the list\n of services. If no services are provided, attempts to\n return all containers associated with Dusty.", "id": "f3144:m5"} {"signature": "def _compose_restart(services):", "body": "def _restart_container(client, container):log_to_client(''.format(get_canonical_container_name(container)))client.restart(container[''], timeout=)assembled_specs = get_assembled_specs()if services == []:services = [spec.name for spec in assembled_specs.get_apps_and_services()]logging.info(''.format(services))client = get_docker_client()for service in services:container = get_container_for_app_or_service(service, include_exited=True)if container is None:log_to_client(''.format(service))continuestopped_linked_containers = _check_stopped_linked_containers(container, assembled_specs)if stopped_linked_containers:log_to_client(''.format(stopped_linked_containers, service))else:_restart_container(client, container)", "docstring": "Well, this is annoying. Compose 1.2 shipped with the\n restart functionality fucking broken, so we can't set a faster\n timeout than 10 seconds (which is way too long) using Compose.\n We are therefore resigned to trying to hack this together\n ourselves. Lame.\n\n Relevant fix which will make it into the next release:\n https://github.com/docker/compose/pull/1318", "id": "f3145:m6"} {"signature": "def update_running_containers_from_spec(compose_config, recreate_containers=True):", "body": "write_composefile(compose_config, constants.COMPOSEFILE_PATH)compose_up(constants.COMPOSEFILE_PATH, '', recreate_containers=recreate_containers)", "docstring": "Takes in a Compose spec from the Dusty Compose compiler,\n writes it to the Compose spec folder so Compose can pick it\n up, then does everything needed to make sure the Docker VM is\n up and running containers with the updated config.", "id": "f3145:m7"} {"signature": "def stop_running_services(services=None):", "body": "if services is None:services = []_compose_stop(constants.COMPOSEFILE_PATH, '', services)", "docstring": "Stop running containers owned by Dusty, or a specific\n list of Compose services if provided.\n\n Here, \"services\" refers to the Compose version of the term,\n so any existing running container, by name. This includes Dusty\n apps and services.", "id": "f3145:m8"} {"signature": "def remove_exited_dusty_containers():", "body": "client = get_docker_client()exited_containers = get_exited_dusty_containers()removed_containers = []for container in exited_containers:log_to_client(\"\".format(container[''][]))try:client.remove_container(container[''], v=True)removed_containers.append(container)except Exception as e:log_to_client(e.message or str(e))return removed_containers", "docstring": "Removed all dusty containers with 'Exited' in their status", "id": "f3146:m1"} {"signature": "def remove_current_dusty_config(config):", "body": "return constants.DUSTY_CONFIG_REGEX.sub(\"\", config)", "docstring": "Given a string representing the contents of a\n file, this function strips out the Dusty config section\n denominated by the Dusty header and footer. Returns\n the stripped string.", "id": "f3148:m2"} {"signature": "def _ip_for_mac_from_ip_addr_show(ip_addr_show, target_mac):", "body": "return_next_ip = Falsefor line in ip_addr_show.splitlines():line = line.strip()if line.startswith(''):line_mac = line.split('')[].replace('', '')if line_mac == target_mac:return_next_ip = Trueelif return_next_ip and line.startswith('') and not line.startswith(''):ip = line.split('')[].split('')[]return ip", "docstring": "Given the rather-complex output from an 'ip addr show' command\n on the VM, parse the output to determine the IP address\n assigned to the interface with the given MAC.", "id": "f3150:m28"} {"signature": "def _apply_nic_fix():", "body": "log_to_client('')check_call_demoted(['', '', constants.VM_MACHINE_NAME, '', constants.VM_NIC_TYPE])", "docstring": "Set NIC 1 to use PCnet-FAST III. The host-only NIC type is\n set during docker-machine create (and Machine will change it\n back if it is changed manually), which is why we only change\n NIC 1 here.", "id": "f3150:m20"} {"signature": "def _get_localhost_ssh_port():", "body": "for line in _get_vm_config():if line.startswith(''):spec = line.split('')[].strip('')name, protocol, host, host_port, target, target_port = spec.split('')if name == '' and protocol == '' and target_port == '':return host_portraise ValueError('')", "docstring": "Something in the VM chain, either VirtualBox or Machine, helpfully\n sets up localhost-to-VM forwarding on port 22. We can inspect this\n rule to determine the port on localhost which gets forwarded to\n 22 in the VM.", "id": "f3150:m26"} {"signature": "def delete_docker_vm_host_only_interface():", "body": "adapter_name = get_vm_hostonly_adapter()log_to_client(''.format(adapter_name))check_call_demoted(['', '', '', adapter_name])", "docstring": "Attempt to delete the host-only interface attached\n to the current Dusty VM. VM should be stopped\n before calling this.", "id": "f3150:m21"} {"signature": "def _stop_docker_vm():", "body": "check_call_demoted(['', '', constants.VM_MACHINE_NAME], redirect_stderr=True)", "docstring": "Stop the Dusty VM if it is not already stopped.", "id": "f3150:m16"} {"signature": "def regenerate_docker_vm_certificates():", "body": "log_to_client('')check_call_demoted(['', '', '', constants.VM_MACHINE_NAME])", "docstring": "Regenerate certificates for a running VM through Docker Machine.\n This may be necessary following a restart if there were previously\n networking issues preventing Machine from doing this as part\n of normal startup.", "id": "f3150:m22"} {"signature": "def _apply_nat_net_less_greedy_subnet():", "body": "check_and_log_output_and_error_demoted(['', '', constants.VM_MACHINE_NAME, '', ''],quiet_on_success=True)", "docstring": "By default, VirtualBox claims 10.0.2.x for itself as part of its NAT routing\n scheme. This subnet is commonly used on internal networks, making this a pretty\n damn greedy choice. We instead alter the VM to use the less greedy subnet of\n 10.174.249.x which is less likely to conflict.", "id": "f3150:m13"} {"signature": "def docker_vm_is_running():", "body": "running_vms = check_output_demoted(['', '', ''])for line in running_vms.splitlines():if ''.format(constants.VM_MACHINE_NAME) in line:return Truereturn False", "docstring": "Using VBoxManage is 0.5 seconds or so faster than Machine.", "id": "f3150:m18"} {"signature": "def _apply_nat_dns_host_resolver():", "body": "check_and_log_output_and_error_demoted(['', '', constants.VM_MACHINE_NAME, '', ''],quiet_on_success=True)", "docstring": "This will make the Dusty VM always use the host's DNS resolver for lookups.\nIt solves an issue we were seeing where the VM's resolving settings would get\nout of date when a laptop was moved between routers with different settings,\nresulting in DNS lookup failures on the VM.", "id": "f3150:m12"} {"signature": "def _get_host_only_ip():", "body": "mac = _get_host_only_mac_address()ip_addr_show = check_output_demoted(['', '', '','', '','', _vm_key_path(), '', _get_localhost_ssh_port(),'', ''])return _ip_for_mac_from_ip_addr_show(ip_addr_show, mac)", "docstring": "Determine the host-only IP of the Dusty VM through Virtualbox and SSH\n directly, bypassing Docker Machine. We do this because Docker Machine is\n much slower, taking about 600ms total. We are basically doing the same\n flow Docker Machine does in its own code.", "id": "f3150:m29"} {"signature": "def _dusty_vm_exists():", "body": "existing_vms = check_output_demoted(['', '', ''])for line in existing_vms.splitlines():if ''.format(constants.VM_MACHINE_NAME) in line:return Truereturn False", "docstring": "We use VBox directly instead of Docker Machine because it\n shaves about 0.5 seconds off the runtime of this check.", "id": "f3150:m11"} {"signature": "def get_nginx_configuration_spec(port_spec_dict, docker_bridge_ip):", "body": "nginx_http_config, nginx_stream_config = \"\", \"\"for port_spec in port_spec_dict['']:if port_spec[''] == '':nginx_http_config += _nginx_http_spec(port_spec, docker_bridge_ip)elif port_spec[''] == '':nginx_stream_config += _nginx_stream_spec(port_spec, docker_bridge_ip)return {'': nginx_http_config, '': nginx_stream_config}", "docstring": "This function will take in a port spec as specified by the port_spec compiler and\n will output an nginx web proxy config string. This string can then be written to a file\n and used running nginx", "id": "f3153:m8"} {"signature": "def _nginx_http_spec(port_spec, bridge_ip):", "body": "server_string_spec = \"\"server_string_spec += \"\".format(_nginx_max_file_size_string())server_string_spec += \"\".format(_nginx_listen_string(port_spec))server_string_spec += \"\".format(_nginx_server_name_string(port_spec))server_string_spec += _nginx_location_spec(port_spec, bridge_ip)server_string_spec += _custom_502_page()server_string_spec += \"\"return server_string_spec", "docstring": "This will output the nginx HTTP config string for specific port spec", "id": "f3153:m6"} {"signature": "def get_lib_volume_mounts(base_lib_name, assembled_specs):", "body": "volumes = [_get_lib_repo_volume_mount(assembled_specs[''][base_lib_name])]volumes.append(get_command_files_volume_mount(base_lib_name, test=True))for lib_name in assembled_specs[''][base_lib_name]['']['']:lib_spec = assembled_specs[''][lib_name]volumes.append(_get_lib_repo_volume_mount(lib_spec))return volumes", "docstring": "Returns a list of the formatted volume specs for a lib", "id": "f3155:m4"} {"signature": "def _get_app_repo_volume_mount(app_spec):", "body": "if app_spec['']:return \"\".format(Repo(app_spec['']).vm_path, container_code_path(app_spec))", "docstring": "This returns the formatted volume mount spec to mount the local code for an app in the\n container", "id": "f3155:m5"} {"signature": "def _get_lib_repo_volume_mount(lib_spec):", "body": "return \"\".format(Repo(lib_spec['']).vm_path, container_code_path(lib_spec))", "docstring": "This returns the formatted volume mount spec to mount the local code for a lib in the\n container", "id": "f3155:m6"} {"signature": "def _get_app_libs_volume_mounts(app_name, assembled_specs):", "body": "volumes = []for lib_name in assembled_specs[''][app_name]['']['']:lib_spec = assembled_specs[''][lib_name]volumes.append(\"\".format(Repo(lib_spec['']).vm_path, container_code_path(lib_spec)))return volumes", "docstring": "Returns a list of the formatted volume mounts for all libs that an app uses", "id": "f3155:m8"} {"signature": "def _composed_service_dict(service_spec):", "body": "compose_dict = service_spec.plain_dict()_apply_env_overrides(env_overrides_for_app_or_service(service_spec.name), compose_dict)compose_dict.setdefault('', []).append(_get_cp_volume_mount(service_spec.name))compose_dict[''] = \"\".format(service_spec.name)return compose_dict", "docstring": "This function returns a dictionary of the docker_compose specifications\n for one service. Currently, this is just the Dusty service spec with\n an additional volume mount to support Dusty's cp functionality.", "id": "f3156:m11"} {"signature": "def get_compose_dict(assembled_specs, port_specs):", "body": "compose_dict = _compose_dict_for_nginx(port_specs)for app_name in assembled_specs[''].keys():compose_dict[app_name] = _composed_app_dict(app_name, assembled_specs, port_specs)for service_spec in assembled_specs[''].values():compose_dict[service_spec.name] = _composed_service_dict(service_spec)return compose_dict", "docstring": "This function returns a dictionary representation of a docker-compose.yml file, based on assembled_specs from\n the spec_assembler, and port_specs from the port_spec compiler", "id": "f3156:m2"} {"signature": "def _get_expanded_active_specs(specs):", "body": "_filter_active(constants.CONFIG_BUNDLES_KEY, specs)_filter_active('', specs)_expand_libs_in_apps(specs)_filter_active('', specs)_filter_active('', specs)_add_active_assets(specs)", "docstring": "This function removes any unnecessary bundles, apps, libs, and services that aren't needed by\nthe activated_bundles. It also expands inside specs.apps.depends.libs all libs that are needed\nindirectly by each app", "id": "f3157:m9"} {"signature": "def _get_referenced_services(specs):", "body": "active_services = set()for app_spec in specs[''].values():for service in app_spec['']['']:active_services.add(service)for bundle_spec in specs[''].values():for service in bundle_spec['']:active_services.add(service)return active_services", "docstring": "Returns all services that are referenced in specs.apps.depends.services,\nor in specs.bundles.services", "id": "f3157:m6"} {"signature": "def get_same_container_repos_from_spec(app_or_library_spec):", "body": "repos = set()app_or_lib_repo = get_repo_of_app_or_library(app_or_library_spec.name)if app_or_lib_repo is not None:repos.add(app_or_lib_repo)for dependent_name in app_or_library_spec['']['']:repos.add(get_repo_of_app_or_library(dependent_name))return repos", "docstring": "Given the spec of an app or library, returns all repos that are guaranteed\n to live in the same container", "id": "f3157:m19"} {"signature": "@contextmanagerdef parallel_task_queue(pool_size=multiprocessing.cpu_count()):", "body": "task_queue = TaskQueue(pool_size)yield task_queuetask_queue.execute()", "docstring": "Context manager for setting up a TaskQueue. Upon leaving the\n context manager, all tasks that were enqueued will be executed\n in parallel subject to `pool_size` concurrency constraints.", "id": "f3158:m0"} {"signature": "@contextlib.contextmanagerdef streaming_to_client():", "body": "for handler in client_logger.handlers:if hasattr(handler, ''):breakelse:handler = Noneold_propagate = client_logger.propagateclient_logger.propagate = Falseif handler is not None:old_append = handler.append_newlineshandler.append_newlines = Falseyieldclient_logger.propagate = old_propagateif handler is not None:handler.append_newlines = old_append", "docstring": "Puts the client logger into streaming mode, which sends\n unbuffered input through to the socket one character at a time.\n We also disable propagation so the root logger does not\n receive many one-byte emissions. This context handler\n was originally created for streaming Compose up's\n terminal output through to the client and should only be\n used for similarly complex circumstances.", "id": "f3160:m5"} {"signature": "@daemon_commanddef update_managed_repos(force=False):", "body": "log_to_client('')update_specs_repo_and_known_hosts()repos_to_update = get_all_repos(active_only=True, include_specs_repo=False)with parallel_task_queue() as queue:log_to_client('')for repo in repos_to_update:if not repo.is_overridden:repo.update_local_repo_async(queue, force=force)", "docstring": "For any active, managed repos, update the Dusty-managed\n copy to bring it up to date with the latest master.", "id": "f3169:m9"} {"signature": "@daemon_commanddef start_local_env(recreate_containers):", "body": "assembled_spec = spec_assembler.get_assembled_specs()required_absent_assets = virtualbox.required_absent_assets(assembled_spec)if required_absent_assets:raise RuntimeError(''.format(required_absent_assets))docker_ip = virtualbox.get_docker_vm_ip()if os.path.exists(constants.COMPOSEFILE_PATH):try:stop_apps_or_services(rm_containers=recreate_containers)except CalledProcessError as e:log_to_client(\"\")log_to_client(str(e))daemon_warnings.clear_namespace('')df_info = virtualbox.get_docker_vm_disk_info(as_dict=True)if '' in df_info[''] or '' in df_info['']:warning_msg = ''.format(df_info[''])daemon_warnings.warn('', warning_msg)log_to_client(warning_msg)log_to_client(\"\")active_repos = spec_assembler.get_all_repos(active_only=True, include_specs_repo=False)log_to_client(\"\")port_spec = port_spec_compiler.get_port_spec_document(assembled_spec, docker_ip)log_to_client(\"\")docker_bridge_ip = virtualbox.get_docker_bridge_ip()nginx_config = nginx_compiler.get_nginx_configuration_spec(port_spec, docker_bridge_ip)log_to_client(\"\")make_up_command_files(assembled_spec, port_spec)log_to_client(\"\")compose_config = compose_compiler.get_compose_dict(assembled_spec, port_spec)log_to_client(\"\")hosts.update_hosts_file_from_port_spec(port_spec)log_to_client(\"\")nfs.configure_nfs()log_to_client(\"\")nginx.update_nginx_from_config(nginx_config)log_to_client(\"\")compose.update_running_containers_from_spec(compose_config, recreate_containers=recreate_containers)log_to_client(\"\")", "docstring": "This command will use the compilers to get compose specs\n will pass those specs to the systems that need them. Those\n systems will in turn launch the services needed to make the\n local environment go.", "id": "f3172:m2"} {"signature": "@daemon_commanddef restart_apps_or_services(app_or_service_names=None):", "body": "if app_or_service_names:log_to_client(\"\".format(''.join(app_or_service_names)))else:log_to_client(\"\")if app_or_service_names:specs = spec_assembler.get_assembled_specs()specs_list = [specs[''][app_name] for app_name in app_or_service_names if app_name in specs['']]repos = set()for spec in specs_list:if spec['']:repos = repos.union(spec_assembler.get_same_container_repos_from_spec(spec))nfs.update_nfs_with_repos(repos)else:nfs.update_nfs_with_repos(spec_assembler.get_all_repos(active_only=True, include_specs_repo=False))compose.restart_running_services(app_or_service_names)", "docstring": "Restart any containers associated with Dusty, or associated with\n the provided app_or_service_names.", "id": "f3172:m4"} {"signature": "@daemon_commanddef prep_for_start_local_env(pull_repos):", "body": "if pull_repos:update_managed_repos(force=True)assembled_spec = spec_assembler.get_assembled_specs()if not assembled_spec[constants.CONFIG_BUNDLES_KEY]:raise RuntimeError('')virtualbox.initialize_docker_vm()", "docstring": "Daemon-side command to ensure we're running the latest\n versions of any managed repos, including the\n specs repo, before we do anything else in the up flow.", "id": "f3172:m0"} {"signature": "def _env_vars_from_file(filename):", "body": "def split_env(env):if '' in env:return env.split('', )else:return env, Noneenv = {}for line in open(filename, ''):line = line.strip()if line and not line.startswith(''):k, v = split_env(line)env[k] = vreturn env", "docstring": "This code is copied from Docker Compose, so that we're exactly compatible\nwith their `env_file` option", "id": "f3174:m4"} {"signature": "def validate_specs_from_path(specs_path):", "body": "log_to_client(\"\".format(specs_path))if not os.path.exists(specs_path):raise RuntimeError(\"\".format(specs_path))specs = get_specs_from_path(specs_path)_check_bare_minimum(specs)_validate_spec_names(specs)_validate_cycle_free(specs)log_to_client(\"\")", "docstring": "Validates Dusty specs at the given path. The following checks are performed:\n -That the given path exists\n -That there are bundles in the given path\n -That the fields in the specs match those allowed in our schemas\n -That references to apps, libs, and services point at defined specs\n -That there are no cycles in app and lib dependencies", "id": "f3176:m9"} {"signature": "@daemon_commanddef copy_between_containers(source_name, source_path, dest_name, dest_path):", "body": "if not container_path_exists(source_name, source_path):raise RuntimeError(''.format(source_path, source_name))temp_path = os.path.join(tempfile.mkdtemp(), str(uuid.uuid1()))with _cleanup_path(temp_path):copy_to_local(temp_path, source_name, source_path, demote=False)copy_from_local(temp_path, dest_name, dest_path, demote=False)", "docstring": "Copy a file from the source container to an intermediate staging\n area on the local filesystem, then from that staging area to the\n destination container.\n\n These moves take place without demotion for two reasons:\n 1. There should be no permissions vulnerabilities with copying\n between containers because it is assumed the non-privileged\n user has full access to all Dusty containers.\n 2. The temp dir created by mkdtemp is owned by the owner of the\n Dusty daemon process, so if we demoted our moves to/from that location\n they would encounter permission errors.", "id": "f3179:m1"} {"signature": "def update_local_repo_async(self, task_queue, force=False):", "body": "self.ensure_local_repo()task_queue.enqueue_task(self.update_local_repo, force=force)", "docstring": "Local repo updating suitable for asynchronous, parallel execution.\n We still need to run `ensure_local_repo` synchronously because it\n does a bunch of non-threadsafe filesystem operations.", "id": "f3182:c0:m19"} {"signature": "def init_yaml_constructor():", "body": "def utf_encoding_string_constructor(loader, node):return loader.construct_scalar(node).encode('')yaml.SafeLoader.add_constructor(u'', utf_encoding_string_constructor)", "docstring": "This dark magic is used to make yaml.safe_load encode all strings as utf-8,\nwhere otherwise python unicode strings would be returned for non-ascii chars", "id": "f3207:m3"} {"signature": "def _increase_file_handle_limit():", "body": "logging.info(''.format(constants.FILE_HANDLE_LIMIT))resource.setrlimit(resource.RLIMIT_NOFILE,(constants.FILE_HANDLE_LIMIT, resource.RLIM_INFINITY))", "docstring": "Raise the open file handles permitted by the Dusty daemon process\n and its child processes. The number we choose here needs to be within\n the OS X default kernel hard limit, which is 10240.", "id": "f3208:m7"} {"signature": "def _start_http_server():", "body": "logging.info(''.format(constants.DAEMON_HTTP_BIND_IP,constants.DAEMON_HTTP_BIND_PORT))thread = threading.Thread(target=http_server.app.run, args=(constants.DAEMON_HTTP_BIND_IP,constants.DAEMON_HTTP_BIND_PORT))thread.daemon = Truethread.start()", "docstring": "Start the daemon's HTTP server on a separate thread.\n This server is only used for servicing container status\n requests from Dusty's custom 502 page.", "id": "f3208:m9"} {"signature": "@app.route('', methods=[''])def consume(consumer_id):", "body": "global _consumersconsumer = _consumers[consumer_id]client = get_docker_client()try:status = client.inspect_container(consumer.container_id)['']['']except Exception as e:status = ''new_logs = client.logs(consumer.container_id,stdout=True,stderr=True,stream=False,timestamps=False,since=calendar.timegm(consumer.offset.timetuple()))updated_consumer = Consumer(consumer.container_id, datetime.utcnow())_consumers[str(consumer_id)] = updated_consumerresponse = jsonify({'': new_logs, '': status})response.headers[''] = ''response.headers[''] = ''return response", "docstring": "Given an existing consumer ID, return any new lines from the\n log since the last time the consumer was consumed.", "id": "f3209:m3"} {"signature": "@app.route('', methods=[''])def register_consumer():", "body": "global _consumershostname, port = request.form[''], request.form['']app_name = _app_name_from_forwarding_info(hostname, port)containers = get_dusty_containers([app_name], include_exited=True)if not containers:raise ValueError(''.format(app_name))container = containers[]new_id = uuid1()new_consumer = Consumer(container[''], datetime.utcnow())_consumers[str(new_id)] = new_consumerresponse = jsonify({'': app_name, '': new_id})response.headers[''] = ''response.headers[''] = ''return response", "docstring": "Given a hostname and port attempting to be accessed,\n return a unique consumer ID for accessing logs from\n the referenced container.", "id": "f3209:m2"} {"signature": "def __getitem__(self, index):", "body": "return self._row[index]", "docstring": "Retrieve the row at index.", "id": "f3227:c0:m5"} {"signature": "def __setitem__(self, index, value):", "body": "if not isinstance(value, dict):raise TypeError('')for val in value.values():self._detect_or_validate(val)self._row[index] = value", "docstring": "Replace the row at index.", "id": "f3227:c0:m7"} {"signature": "def __repr__(self): ", "body": "parts = [u'' % self.ver_str]if bool(self.metadata):parts.append(u'' % self.metadata)column_meta = []for col, col_meta in self.column.items():if bool(col_meta):column_meta.append(u'' % (col, col_meta))else:column_meta.append(u'' % col)if bool(column_meta):parts.append(u'' % ''.join(column_meta))elif len(self.column):parts.append(u'' % ''.join(self.column.keys()))else:parts.append(u'')if bool(self):parts.extend([u'' % (row, u''.join([((u'' % (col, data[col]))if col in data else(u'' % col)) for colin self.column.keys()]))for (row, data) in enumerate(self)])else:parts.append(u'')class_name = self.__class__.__name__return u'' % (class_name, u''.join(parts), class_name)", "docstring": "Return a representation of this grid.", "id": "f3227:c0:m4"} {"signature": "def pop_at(self, index):", "body": "return self.pop(self.at(index))", "docstring": "Remove the key at the given index and return its value.", "id": "f3230:c0:m13"} {"signature": "def at(self, index):", "body": "return self._order[index]", "docstring": "Return the key at the given index.", "id": "f3230:c0:m8"} {"signature": "def add_item(self, key, value, after=False, index=None, pos_key=None,replace=True):", "body": "if self._validate_fn:self._validate_fn(value)if (index is not None) and (pos_key is not None):raise ValueError('')elif pos_key is not None:try:index = self.index(pos_key)except ValueError:raise KeyError('' % pos_key)if after and (index is not None):index += if key in self._values:if not replace:raise KeyError('' % key)if index is not None:del self[key]else:self._values[key] = valuereturnif index is not None:self._order.insert(index, key)else:self._order.append(key)self._values[key] = value", "docstring": "Add an item at a specific location, possibly replacing the\nexisting item.\n\nIf after is True, we insert *after* the given index, otherwise we\ninsert before.\n\nThe position is specified using either index or pos_key, the former\nspecifies the position from the start of the array (base 0). pos_key\nspecifies the name of another key, and positions the new key relative\nto that key.\n\nWhen replacing, the position will be left un-changed unless a location\nis specified explicitly.", "id": "f3230:c0:m7"} {"signature": "def define_haystack_units():", "body": "ureg = UnitRegistry()ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')ureg.define('')return ureg", "docstring": "Missing units found in project-haystack\nAdded to the registry", "id": "f3234:m2"} {"signature": "def dump_grid(grid):", "body": "header = '' % dump_str(str(grid._version), version=grid._version)if bool(grid.metadata):header += '' + dump_meta(grid.metadata, version=grid._version)columns = dump_columns(grid.column, version=grid._version)rows = dump_rows(grid)return ''.join([header, columns] + rows + [''])", "docstring": "Dump a single grid to its ZINC representation.", "id": "f3236:m2"} {"signature": "def parse_scalar(scalar_data, version):", "body": "try:return hs_scalar[version].parseString(scalar_data, parseAll=True)[]except pp.ParseException as pe:raise ZincParseException('' % reformat_exception(pe),scalar_data, , pe.col)except:LOG.debug('',scalar_data, version)", "docstring": "Parse a Project Haystack scalar in ZINC format.", "id": "f3237:m7"} {"signature": "def _unescape(s, uri=False):", "body": "out = ''while len(s) > :c = s[]if c == '':esc_c = s[]if esc_c in ('', ''):out += six.unichr(int(s[:], base=))s = s[:]continueelse:if esc_c == '':out += ''elif esc_c == '':out += ''elif esc_c == '':out += ''elif esc_c == '':out += ''elif esc_c == '':out += ''else:if uri and (esc_c == ''):out += ''out += esc_cs = s[:]continueelse:out += cs = s[:]return out", "docstring": "Iterative parser for string escapes.", "id": "f3237:m2"} {"signature": "def parse_grid(grid_data):", "body": "try:grid_parts = NEWLINE_RE.split(grid_data)if len(grid_parts) < :raise ZincParseException('',grid_data, , )grid_meta_str = grid_parts.pop()col_meta_str = grid_parts.pop()ver_match = VERSION_RE.match(grid_meta_str)if ver_match is None:raise ZincParseException('' % grid_meta_str,grid_data, , )version = Version(ver_match.group())try:grid_meta = hs_gridMeta[version].parseString(grid_meta_str, parseAll=True)[]except pp.ParseException as pe:raise ZincParseException('' % pe,grid_data, , pe.col)except: LOG.debug('', grid_meta_str)raisetry:col_meta = hs_cols[version].parseString(col_meta_str, parseAll=True)[]except pp.ParseException as pe:raise ZincParseException(''% reformat_exception(pe, ),grid_data, , pe.col)except: LOG.debug('', col_meta_str)raiserow_grammar = hs_row[version]def _parse_row(row_num_and_data):(row_num, row) = row_num_and_dataline_num = row_num + try:return dict(zip(col_meta.keys(),row_grammar.parseString(row, parseAll=True)[].asList()))except pp.ParseException as pe:raise ZincParseException(''% reformat_exception(pe, line_num),grid_data, line_num, pe.col)except: LOG.debug('', row)raiseg = Grid(version=grid_meta.pop(''),metadata=grid_meta,columns=list(col_meta.items()))g.extend(map(_parse_row, filter(lambda gp : bool(gp[]), enumerate(grid_parts))))return gexcept:LOG.debug('', grid_data)raise", "docstring": "Parse the incoming grid.", "id": "f3237:m6"} {"signature": "def extend(self, items, replace=True):", "body": "if isinstance(items, dict) or isinstance(items, SortableDict):items = list(items.items())for (key, value) in items:self.append(key, value, replace=replace)", "docstring": "Append the items to the metadata.", "id": "f3238:c0:m1"} {"signature": "def is_valid_ip(ip_address):", "body": "try:ip = ipaddress.ip_address(u'' + ip_address)return Trueexcept ValueError as e:return False", "docstring": "Check Validity of an IP address", "id": "f3244:m0"} {"signature": "def _lstat(self, path):", "body": "if path not in self.entries:return OverlayStat(*self.originals[''](path)[:], st_overlay=)return self.entries[path].stat", "docstring": "IMPORTANT: expects `path`'s parent to already be deref()'erenced.", "id": "f3252:c4:m15"} {"signature": "def __init__(self, install=False, passthru=None):", "body": "self.entries = {}self._installed = Falseself.impostors = dict()self.originals = dict()self.vaporized = Noneself.fds = dict()self.passthru = passthru or []if self.passthru:if not morph.isseq(self.passthru):self.passthru = [self.passthru]self.passthru = [re.compile(expr) if morph.isstr(expr) else exprfor expr in self.passthru]self._makeImpostors()if install:self.install()", "docstring": ":Parameters:\n\ninstall : bool, optional, default: false\n\n Flag indicating whether or not this overlay should be\n installed upon instantiation.\n\npassthru : list({str, regex}), optional, default: none\n\n A regular expression (or list thereof) that will be matched\n against any file that is operated on; if it matches, no overlay\n will be applied, i.e. this list excludes a set of files. The\n specified regexes can be either strings or re.RegexObject\n instances. Note that these regexes will be given only the\n fully-dereferenced paths to be tested.", "id": "f3252:c4:m0"} {"signature": "def _lexists(self, path):", "body": "try:return bool(self._lstat(path))except os.error:return False", "docstring": "IMPORTANT: expects `path` to already be deref()'erenced.", "id": "f3252:c4:m20"} {"signature": "def fso_readlink(self, path):", "body": "path = self.deref(path, to_parent=True)st = self.fso_lstat(path)if not stat.S_ISLNK(st.st_mode):raise OSError(, '', path)if st.st_overlay:return self.entries[path].contentreturn self.originals[''](path)", "docstring": "overlays os.readlink()", "id": "f3252:c4:m28"} {"signature": "def fso_stat(self, path):", "body": "return self.fso_anystat(path, link=False)", "docstring": "overlays os.stat()", "id": "f3252:c4:m18"} {"signature": "def fso_remove(self, path):", "body": "return self.fso_unlink(path)", "docstring": "overlays os.remove()", "id": "f3252:c4:m31"} {"signature": "def _stat(self, path):", "body": "if path not in self.entries:return OverlayStat(*self.originals[''](path)[:], st_overlay=)st = self.entries[path].statif stat.S_ISLNK(st.st_mode):return self._stat(self.deref(path))return st", "docstring": "IMPORTANT: expects `path`'s parent to already be deref()'erenced.", "id": "f3252:c4:m14"} {"signature": "def fso_mkdir(self, path, mode=None):", "body": "path = self.deref(path, to_parent=True)if self._lexists(path):raise OSError(, '', path)self._addentry(OverlayEntry(self, path, stat.S_IFDIR))", "docstring": "overlays os.mkdir()", "id": "f3252:c4:m25"} {"signature": "def no_ansi(text):", "body": "return re.sub(r\"\", \"\", text)", "docstring": "Kill any ANSI escape sequences.", "id": "f3261:m0"} {"signature": "def assert_sets_equal(s1, s2):", "body": "assert list(sorted(s1)) == list(sorted(s2))", "docstring": "Helper to compare sets.", "id": "f3270:m1"} {"signature": "def load():", "body": "cfg = Bunch(DEFAULTS)cfg.project_root = get_project_root()if not cfg.project_root:raise RuntimeError(\"\")cfg.rootjoin = lambda *names: os.path.join(cfg.project_root, *names)cfg.srcjoin = lambda *names: cfg.rootjoin(cfg.srcdir, *names)cfg.testjoin = lambda *names: cfg.rootjoin(cfg.testdir, *names)cfg.cwd = os.getcwd()os.chdir(cfg.project_root)if cfg.project_root not in sys.path:sys.path.append(cfg.project_root)try:from setup import project except ImportError:from setup import setup_args as project cfg.project = Bunch(project)return cfg", "docstring": "Load and return configuration as a ``Bunch``.\n\n Values are based on ``DEFAULTS``, and metadata from ``setup.py``.", "id": "f3273:m1"} {"signature": "def set_flat_layout():", "body": "DEFAULTS.update(srcdir = '',testdir = '',)", "docstring": "Switch default project layout to everything top-level.", "id": "f3273:m3"} {"signature": "def get_project_root():", "body": "try:tasks_py = sys.modules['']except KeyError:return Noneelse:return os.path.abspath(os.path.dirname(tasks_py.__file__))", "docstring": "Determine location of `tasks.py`.", "id": "f3273:m0"} {"signature": "@task(help={'': \"\",'': \"\",'': \"\",})def confluence(ctx, no_publish=False, clean=False, opts=''):", "body": "cfg = config.load()if clean:ctx.run(\"\")cmd = ['', '', '']cmd.extend(['', '']) if opts:cmd.append(opts)cmd.extend(['', ctx.rituals.docs.build + ''])if no_publish:cmd.extend([''])notify.info(\"\")with pushd(ctx.rituals.docs.sources):ctx.run(''.join(cmd), pty=True)", "docstring": "Build Sphinx docs and publish to Confluence.", "id": "f3275:m3"} {"signature": "def watchdogctl(ctx, kill=False, verbose=True):", "body": "tries = if kill else cmd = ''.format(ctx.rituals.docs.watchdog.port)pidno = pidinfo = capture(cmd, ignore_failures=True)while pidinfo:pidline = next(filter(None, [re.match(r'', x) for x in pidinfo.splitlines()]))if not pidline:raise ValueError(\"\".format(pidinfo))pidno = int(pidline.group(), )if verbose:ctx.run(\"\".format(pidno), echo=False)verbose = Falsetries -= if tries <= :breakelse:try:os.kill(pidno, )except OSError as exc: if exc.errno == :breakraiseelse:notify.info(\"\".format(pidno))ctx.run(\"\".format(pidno), echo=False)time.sleep()pid = capture(cmd, ignore_failures=True)return pidno", "docstring": "Control / check a running Sphinx autobuild process.", "id": "f3275:m1"} {"signature": "def get_pypi_auth(configfile=''):", "body": "pypi_cfg = ConfigParser()if pypi_cfg.read(os.path.expanduser(configfile)):try:user = pypi_cfg.get('', '')pwd = pypi_cfg.get('', '')return user, pwdexcept ConfigError:notify.warning(\"\"\"\".format(configfile))return None", "docstring": "Read auth from pip config.", "id": "f3275:m0"} {"signature": "@task(help=dict(docs=\"\",))def build(ctx, docs=False):", "body": "cfg = config.load()ctx.run(\"\")if docs:for doc_path in ('', ''):if os.path.exists(cfg.rootjoin(doc_path, '')):breakelse:doc_path = Noneif doc_path:ctx.run(\"\")else:notify.warning(\"\")", "docstring": "Build the project.", "id": "f3277:m2"} {"signature": "@task(help=dict(docs=\"\",backups=\"\",bytecode=\"\",dist=\"\",all=\"\",venv=\"\",tox=\"\",extra=\"\",))def clean(_dummy_ctx, docs=False, backups=False, bytecode=False, dist=False, all=False, venv=False, tox=False, extra=''): ", "body": "cfg = config.load()notify.banner(\"\")venv_dirs = ['', '', '', '', '', '']patterns = ['', '']excludes = ['', '', '', '']if docs or all:patterns.extend(['', ''])if dist or all:patterns.append('')if backups or all:patterns.extend([''])if bytecode or all:patterns.extend(['', '', '',cfg.srcjoin('')[len(cfg.project_root)+:],])if venv:patterns.extend([i + '' for i in venv_dirs])if tox:patterns.append('')else:excludes.append('')if extra:patterns.extend(shlex.split(extra))patterns = [antglob.includes(i) for i in patterns] + [antglob.excludes(i) for i in excludes]if not venv:patterns.extend([antglob.excludes(i + '') for i in venv_dirs])fileset = antglob.FileSet(cfg.project_root, patterns)for name in fileset:notify.info(''.format(name))if name.endswith(''):shutil.rmtree(os.path.join(cfg.project_root, name))else:os.unlink(os.path.join(cfg.project_root, name))", "docstring": "Perform house-keeping.", "id": "f3277:m1"} {"signature": "@task(help=dict(local=\"\",))def freeze(ctx, local=False):", "body": "cmd = ''.format('' if local else '')frozen = ctx.run(cmd, hide='').stdout.replace('', '')with io.open('', '', encoding='') as out:out.write(\"\".format(isodate()))out.write(frozen)notify.info(\"\".format(len(frozen.splitlines()),))", "docstring": "Freeze currently installed requirements.", "id": "f3277:m3"} {"signature": "@task(help={'': \"\",'': \"\",'': \"\",'': \"\",})def tox(ctx, verbose=False, clean=False, env_list='', opts=''):", "body": "cfg = config.load()add_dir2pypath(cfg.project_root)snakepits = ctx.rituals.snakepits.split(os.pathsep)cmd = []snakepits = [i for i in snakepits if os.path.isdir(i)]if snakepits:cmd += [''.format(os.pathsep.join(snakepits),)]if clean and os.path.exists(cfg.rootjoin('')):shutil.rmtree(cfg.rootjoin(''))cmd += ['']if verbose:cmd += ['']if env_list:cmd += ['', env_list]cmd += optsctx.run(''.join(cmd))", "docstring": "Perform multi-environment tests.", "id": "f3278:m1"} {"signature": "@task(help=dict(verbose=\"\",pypi=\"\",))def bump(ctx, verbose=False, pypi=False):", "body": "cfg = config.load()scm = scm_provider(cfg.project_root, commit=False, ctx=ctx)if not scm.workdir_is_clean():notify.warning(\"\")pep440 = scm.pep440_dev_version(verbose=verbose, non_local=pypi)setup_cfg = cfg.rootjoin('')if not pep440:notify.info(\"\")elif os.path.exists(setup_cfg):with io.open(setup_cfg, encoding='') as handle:data = handle.readlines()changed = Falsefor i, line in enumerate(data):if re.match(r\"\", line):verb, _ = data[i].split('', )data[i] = ''.format(verb, pep440)changed = Trueif changed:notify.info(\"\")with io.open(setup_cfg, '', encoding='') as handle:handle.write(''.join(data))else:notify.warning(\"\")else:notify.warning(\"\")if os.path.exists(setup_cfg):egg_info = shell.capture(\"\", echo=True if verbose else None)for line in egg_info.splitlines():if line.endswith(''):pkg_info_file = line.split(None, )[]with io.open(pkg_info_file, encoding='') as handle:notify.info(''.join(i for i in handle.readlines() if i.startswith('')).strip())ctx.run(\"\", echo=True if verbose else None)", "docstring": "Bump a development version.", "id": "f3283:m1"} {"signature": "@task(help=dict(pyrun=\"\",upload=\"\",opts=\"\",))def pex(ctx, pyrun='', upload=False, opts=''):", "body": "cfg = config.load()ctx.run(\"\")pkg_info = get_egg_info(cfg)version = pkg_info.version if pkg_info else cfg.project.versionpex_files = []for script in cfg.project.entry_points['']:script, entry_point = script.split('', )script, entry_point = script.strip(), entry_point.strip()pex_file = cfg.rootjoin('', ''.format(script, version))cmd = ['', '', cfg.rootjoin(''), cfg.project_root, '', script, '', pex_file]if opts:cmd.append(opts)ctx.run(''.join(cmd))non_universal = set()with closing(zipfile.ZipFile(pex_file, mode=\"\")) as pex_contents:for pex_name in pex_contents.namelist(): if pex_name.endswith('') and '' not in pex_name:non_universal.add(pex_name.split('')[].split('')[-])if non_universal:notify.warning(\"\".format(pex_file.replace(os.getcwd(), ''), ''.join(sorted(non_universal))))envs = [i.split('')[-:] for i in non_universal]envs = {i[]: i[:] for i in envs}if len(envs) > :envs = {k: v for k, v in envs.items() if not k.startswith('')}env_id = []for k, v in sorted(envs.items()):env_id.append(k)env_id.extend(v)env_id = ''.join(env_id)else:env_id = ''new_pex_file = pex_file.replace('', ''.format(env_id))notify.info(\"\".format(os.path.basename(new_pex_file)))os.rename(pex_file, new_pex_file)pex_file = new_pex_filepex_files.append(pex_file)if not pex_files:notify.warning(\"\")else:if pyrun:if any(pyrun.startswith(i) for i in ('', '', '')):pyrun_url = pyrunelse:pyrun_cfg = dict(ctx.rituals.pyrun)pyrun_cfg.update(parse_qsl(pyrun.replace(os.pathsep, '')))pyrun_url = (pyrun_cfg[''] + '' +pyrun_cfg['']).format(**pyrun_cfg)notify.info(\"\".format(pyrun_url))with url_as_file(pyrun_url, ext='') as pyrun_tarball:pyrun_tar = tarfile.TarFile.gzopen(pyrun_tarball)for pex_file in pex_files[:]:pyrun_exe = pyrun_tar.extractfile('')with open(pex_file, '') as pex_handle:pyrun_pex_file = ''.format(pex_file[:-], pyrun_url.rsplit('')[-][:-])with open(pyrun_pex_file, '') as pyrun_pex:pyrun_pex.write(INSTALLER_BASH.replace('', ''.format(len(INSTALLER_BASH) + )))shutil.copyfileobj(pyrun_exe, pyrun_pex)shutil.copyfileobj(pex_handle, pyrun_pex)shutil.copystat(pex_file, pyrun_pex_file)notify.info(\"\".format(pretty_path(pyrun_pex_file)))pex_files.append(pyrun_pex_file)if upload:base_url = ctx.rituals.release.upload.base_url.rstrip('')if not base_url:notify.failure(\"\")for pex_file in pex_files:url = base_url + '' + ctx.rituals.release.upload.path.lstrip('').format(name=cfg.project.name, version=cfg.project.version, filename=os.path.basename(pex_file))notify.info(\"\".format(url))with io.open(pex_file, '') as handle:reply = requests.put(url, data=handle.read())if reply.status_code in range(, ):notify.info(\"\".format(**vars(reply)))else:notify.warning(\"\".format(**vars(reply)))", "docstring": "Package the project with PEX.", "id": "f3283:m3"} {"signature": "def get_egg_info(cfg, verbose=False):", "body": "result = Bunch()setup_py = cfg.rootjoin('')if not os.path.exists(setup_py):return resultegg_info = shell.capture(\"\".format(setup_py), echo=True if verbose else None)for info_line in egg_info.splitlines():if info_line.endswith(''):pkg_info_file = info_line.split(None, )[]result[''] = pkg_info_filewith io.open(pkg_info_file, encoding='') as handle:lastkey = Nonefor line in handle:if line.lstrip() != line:assert lastkey, \"\".format(pkg_info_file, line)result[lastkey] += '' + lineelse:lastkey, value = line.split('', )lastkey = lastkey.strip().lower().replace('', '')value = value.strip()if lastkey in result:try:result[lastkey].append(value)except AttributeError:result[lastkey] = [result[lastkey], value]else:result[lastkey] = valuefor multikey in PKG_INFO_MULTIKEYS:if not isinstance(result.get(multikey, []), list):result[multikey] = [result[multikey]]return result", "docstring": "Call 'setup egg_info' and return the parsed meta-data.", "id": "f3283:m0"} {"signature": "@task(help=dict(devpi=\"\",egg=\"\",wheel=\"\",auto=\"\",))def dist(ctx, devpi=False, egg=False, wheel=False, auto=True):", "body": "config.load()cmd = [\"\", \"\", \"\"]if auto:egg = sys.version_info.major == try:import wheel as _wheel = Trueexcept ImportError:wheel = Falseif egg:cmd.append(\"\")if wheel:cmd.append(\"\")ctx.run(\"\")ctx.run(''.join(cmd))if devpi:ctx.run(\"\")", "docstring": "Distribute the project.", "id": "f3283:m2"} {"signature": "def _samefile(fname1, fname2):", "body": "if sys.platform.startswith(''):return os.path.normpath(os.path.normcase(fname1)) == os.path.normpath(os.path.normcase(fname2))else:return os.path.samefile(fname1, fname2)", "docstring": "OS independent `samefile` implementation.", "id": "f3285:m1"} {"signature": "def whichgen(command, path=None, verbose=, exts=None): ", "body": "matches = []if path is None:using_given_path = path = os.environ.get(\"\", \"\").split(os.pathsep)if sys.platform.startswith(\"\"):path.insert(, os.curdir) else:using_given_path = if sys.platform.startswith(\"\"):if exts is None:exts = os.environ.get(\"\", \"\").split(os.pathsep)for ext in exts:if ext.lower() == \"\":breakelse:exts = ['', '', '']elif not isinstance(exts, list):raise TypeError(\"\")else:if exts is not None:raise WhichError(\"\" % sys.platform)exts = []if os.sep in command or os.altsep and os.altsep in command:passelse:for i, dir_name in enumerate(path):if sys.platform.startswith(\"\") and len(dir_name) >= and dir_name[] == '' and dir_name[-] == '':dir_name = dir_name[:-]for ext in ['']+exts:abs_name = os.path.abspath(os.path.normpath(os.path.join(dir_name, command+ext)))if os.path.isfile(abs_name):if using_given_path:from_where = \"\" % ielif not sys.platform.startswith(\"\"):from_where = \"\" % ielif i == :from_where = \"\"else:from_where = \"\" % (i-)match = _cull((abs_name, from_where), matches, verbose)if match:if verbose:yield matchelse:yield match[]match = _get_registered_executable(command)if match is not None:match = _cull(match, matches, verbose)if match:if verbose:yield matchelse:yield match[]", "docstring": "Return a generator of full paths to the given command.\n\n \"command\" is a the name of the executable to search for.\n \"path\" is an optional alternate path list to search. The default it\n to use the PATH environment variable.\n \"verbose\", if true, will cause a 2-tuple to be returned for each\n match. The second element is a textual description of where the\n match was found.\n \"exts\" optionally allows one to specify a list of extensions to use\n instead of the standard list for this system. This can\n effectively be used as an optimization to, for example, avoid\n stat's of \"foo.vbs\" when searching for \"foo\" and you know it is\n not a VisualBasic script but \".vbs\" is on PATHEXT. This option\n is only supported on Windows.\n\n This method returns a generator which yields either full paths to\n the given command or, if verbose, tuples of the form (, ).", "id": "f3285:m3"} {"signature": "def _get_registered_executable(exe_name):", "body": "registered = Noneif sys.platform.startswith(''):if os.path.splitext(exe_name)[].lower() != '':exe_name += ''import _winreg try:key = \"\" + exe_namevalue = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key)registered = (value, \"\"+key)except _winreg.error:passif registered and not os.path.exists(registered[]):registered = Nonereturn registered", "docstring": "Windows allow application paths to be registered in the registry.", "id": "f3285:m0"} {"signature": "def _cull(potential, matches, verbose=):", "body": "for match in matches: if _samefile(potential[], match[]):if verbose:sys.stderr.write(\"\" % potential)return Noneif not stat.S_ISREG(os.stat(potential[]).st_mode):if verbose:sys.stderr.write(\"\" % potential)elif not os.access(potential[], os.X_OK):if verbose:sys.stderr.write(\"\" % potential)else:matches.append(potential)return potentialreturn None", "docstring": "Cull inappropriate matches. Possible reasons:\n - a duplicate of a previous match\n - not a disk file\n - not executable (non-Windows)\n If 'potential' is approved it is returned and added to 'matches'.\n Otherwise, None is returned.", "id": "f3285:m2"} {"signature": "def whichall(command, path=None, verbose=, exts=None):", "body": "return list(whichgen(command, path, verbose, exts))", "docstring": "Return a list of full paths to all matches of the given command on the path.\n\n \"command\" is a the name of the executable to search for.\n \"path\" is an optional alternate path list to search. The default it\n to use the PATH environment variable.\n \"verbose\", if true, will cause a 2-tuple to be returned for each\n match. The second element is a textual description of where the\n match was found.\n \"exts\" optionally allows one to specify a list of extensions to use\n instead of the standard list for this system. This can\n effectively be used as an optimization to, for example, avoid\n stat's of \"foo.vbs\" when searching for \"foo\" and you know it is\n not a VisualBasic script but \".vbs\" is on PATHEXT. This option\n is only supported on Windows.", "id": "f3285:m5"} {"signature": "def workdir_is_clean(self, quiet=False):", "body": "self.run('', **RUN_KWARGS)unchanged = Truetry:self.run('', report_error=False, **RUN_KWARGS)except exceptions.Failure:unchanged = Falseif not quiet:notify.warning('')self.run('', **RUN_KWARGS)try:self.run('', report_error=False, **RUN_KWARGS)except exceptions.Failure:unchanged = Falseif not quiet:notify.warning('')self.run('', **RUN_KWARGS)return unchanged", "docstring": "Check for uncommitted changes, return `True` if everything is clean.\n\n Inspired by http://stackoverflow.com/questions/3878624/.", "id": "f3286:c0:m0"} {"signature": "def commit(self, message):", "body": "self.run_elective(''.format(message))", "docstring": "Commit pending changes.", "id": "f3286:c0:m2"} {"signature": "def add_file(self, filename):", "body": "notify.warning(''.format(filename))", "docstring": "Stage a file for committing, or commit it directly (depending on the SCM).", "id": "f3287:c0:m1"} {"signature": "def tag(self, label, message=None):", "body": "notify.warning(''.format(label, ''.format(message) if message else '',))", "docstring": "Tag the current workdir state.", "id": "f3287:c0:m3"} {"signature": "def run_elective(self, cmd, *args, **kwargs):", "body": "if self._commit:return self.run(cmd, *args, **kwargs)else:notify.warning(\"\".format(cmd))kwargs = kwargs.copy()kwargs[''] = Falsereturn self.run('', *args, **kwargs)", "docstring": "Run a command, or just echo it, depending on `commit`.", "id": "f3288:c0:m2"} {"signature": "def matches(self, path):", "body": "return bool(self.compiled.match(path))", "docstring": "Check this pattern against given `path`.", "id": "f3290:c0:m2"} {"signature": "def excludes(pattern):", "body": "return Pattern(pattern, inclusive=False)", "docstring": "A single exclusive glob pattern.", "id": "f3290:m4"} {"signature": "def parse_glob(pattern):", "body": "if not pattern:returnbits = pattern.split(\"\")dirs, filename = bits[:-], bits[-]for dirname in dirs:if dirname == \"\":yield \"\"else:yield glob2re(dirname) + \"\"yield glob2re(filename)", "docstring": "Generate parts of regex transformed from glob pattern.", "id": "f3290:m1"} {"signature": "def run(cmd, **kw):", "body": "kw = kw.copy()kw.setdefault('', False) report_error = kw.pop('', True)runner = kw.pop('', invoke_run)try:return runner(cmd, **kw)except exceptions.Failure as exc:sys.stdout.flush()sys.stderr.flush()if report_error:notify.error(\"\".format(cmd, exc.result.return_code,))raisefinally:sys.stdout.flush()sys.stderr.flush()", "docstring": "Run a command and flush its output.", "id": "f3292:m1"} {"signature": "def search_file_upwards(name, base=None):", "body": "base = base or os.getcwd()while base != os.path.dirname(base):if os.path.exists(os.path.join(base, name)):return basebase = os.path.dirname(base)return None", "docstring": "Search for a file named `name` from cwd or given directory to root.\n Return None if nothing's found.", "id": "f3293:m0"} {"signature": "def failure(msg):", "body": "error(msg)sys.exit()", "docstring": "Emit a fatal message and exit.", "id": "f3294:m5"} {"signature": "def warning(msg):", "body": "_flush()sys.stderr.write(\"\".format(msg))sys.stderr.flush()", "docstring": "Emit a warning message.", "id": "f3294:m3"} {"signature": "def _flush():", "body": "sys.stdout.flush()sys.stderr.flush()", "docstring": "Flush all console output.", "id": "f3294:m0"} {"signature": "@contextmanagerdef pushd(path):", "body": "saved = os.getcwd()os.chdir(path)try:yield savedfinally:os.chdir(saved)", "docstring": "A context that enters a given directory and restores the old state on exit.\n\n The original directory is returned as the context variable.", "id": "f3295:m1"} {"signature": "def pretty_path(path, _home_re=re.compile('' + re.escape(os.path.expanduser('') + os.sep))):", "body": "path = decode_filename(path)path = _home_re.sub('' + os.sep, path)return path", "docstring": "Prettify path for humans, and make it Unicode.", "id": "f3295:m0"} {"signature": "def try_until_even(req):", "body": "response = yield reqwhile response % :try:response = yield ''except GeneratorExit:returnexcept ValueError:yield ''return response", "docstring": "an example relay", "id": "f3299:m3"} {"signature": "@py2_compatibledef mymax(val):", "body": "while val < :try:sent = yield valexcept GeneratorExit:returnexcept ValueError:sent = yield ''except TypeError:return_('')if sent > val:val = sentreturn_(val * )", "docstring": "an example generator function", "id": "f3300:m4"} {"signature": "@py2_compatibledef try_until_positive(req):", "body": "response = yield reqwhile response < :try:response = yield ''except GeneratorExit:returnexcept ValueError:yield ''return_(response)", "docstring": "an example relay", "id": "f3300:m2"} {"signature": "@py2_compatibledef try_until_even(req):", "body": "response = yield reqwhile response % :try:response = yield ''except GeneratorExit:returnexcept ValueError:yield ''return_(response)", "docstring": "an example relay", "id": "f3300:m3"} {"signature": "def with_generator(name):", "body": "gens = [getattr(common, name)]if not PY2:from . import py3gens.append(getattr(py3, name))return pytest.mark.parametrize(name, gens)", "docstring": "use a python 2/3 parametrized generator", "id": "f3302:m0"} {"signature": "@abc.abstractmethoddef __iter__(self):", "body": "raise NotImplementedError()", "docstring": "Returns\n-------\n~typing.Generator[T_yield, T_send, T_return]\n the generator iterator", "id": "f3303:c0:m0"} {"signature": "def __call__(self, *args, **kwargs):", "body": "raise NotImplementedError()", "docstring": "Returns\n-------\n~typing.Generator[T_yield, T_send, T_return]\n the resulting generator", "id": "f3303:c1:m0"} {"signature": "@py2_compatibledef imap_yield(func, gen):", "body": "gen = iter(gen)assert _is_just_started(gen)yielder = yield_from(gen)for item in yielder:with yielder:yielder.send((yield func(item)))return_(yielder.result)", "docstring": "Apply a function to all ``yield`` values of a generator\n\n Parameters\n ----------\n func: ~typing.Callable[[T_yield], T_mapped]\n the function to apply\n gen: Generable[T_yield, T_send, T_return]\n the generator iterable.\n\n Returns\n -------\n ~typing.Generator[T_mapped, T_send, T_return]\n the mapped generator", "id": "f3305:m6"} {"signature": "@py2_compatibledef irelay(gen, thru):", "body": "gen = iter(gen)assert _is_just_started(gen)yielder = yield_from(gen)for item in yielder:with yielder:subgen = thru(item)subyielder = yield_from(subgen)for subitem in subyielder:with subyielder:subyielder.send((yield subitem))yielder.send(subyielder.result)return_(yielder.result)", "docstring": "Create a new generator by relaying yield/send interactions\n through another generator\n\n Parameters\n ----------\n gen: Generable[T_yield, T_send, T_return]\n the original generator\n thru: ~typing.Callable[[T_yield], ~typing.Generator]\n the generator callable through which each interaction is relayed\n\n Returns\n -------\n ~typing.Generator\n the relayed generator", "id": "f3305:m9"} {"signature": "def reusable(func):", "body": "sig = signature(func)origin = funcwhile hasattr(origin, ''):origin = origin.__wrapped__return type(origin.__name__,(ReusableGenerator, ),dict([('', origin.__doc__),('', origin.__module__),('', sig),('', staticmethod(func)),] + [(name, property(compose(itemgetter(name),attrgetter(''))))for name in sig.parameters] + ([('', origin.__qualname__),] if sys.version_info > (, ) else [])))", "docstring": "Create a reusable class from a generator function\n\n Parameters\n ----------\n func: GeneratorCallable[T_yield, T_send, T_return]\n the function to wrap\n\n Note\n ----\n * the callable must have an inspectable signature\n * If bound to a class, the new reusable generator is callable as a method.\n To opt out of this, add a :func:`staticmethod` decorator above\n this decorator.", "id": "f3305:m3"} {"signature": "@py2_compatibledef imap_send(func, gen):", "body": "gen = iter(gen)assert _is_just_started(gen)yielder = yield_from(gen)for item in yielder:with yielder:yielder.send(func((yield item)))return_(yielder.result)", "docstring": "Apply a function to all ``send`` values of a generator\n\n Parameters\n ----------\n func: ~typing.Callable[[T_send], T_mapped]\n the function to apply\n gen: Generable[T_yield, T_mapped, T_return]\n the generator iterable.\n\n Returns\n -------\n ~typing.Generator[T_yield, T_send, T_return]\n the mapped generator", "id": "f3305:m7"} {"signature": "def sendreturn(gen, value):", "body": "try:gen.send(value)except StopIteration as e:return stopiter_value(e)else:raise RuntimeError('')", "docstring": "Send an item into a generator expecting a final return value\n\n Parameters\n ----------\n gen: ~typing.Generator[T_yield, T_send, T_return]\n the generator to send the value to\n value: T_send\n the value to send\n\n Raises\n ------\n RuntimeError\n if the generator did not return as expected\n\n Returns\n -------\n T_return\n the generator's return value", "id": "f3305:m5"} {"signature": "def __call__(self, product_name, **properties):", "body": "if not product_name in self:return self.default(tag=product_name, **properties)return self[product_name](**properties)", "docstring": "Builds and returns a Blok object", "id": "f3315:c0:m2"} {"signature": "def __new__(metaclass, name, parents, class_dict, *kargs, **kwargs):", "body": "attributes = {name: attribute for name, attribute in class_dict.items() if isinstance(attribute,AbstractAttribute)}if attributes:if hasattr(parents[], ''):full_attributes = parents[].attribute_descriptors.copy()full_attributes.update(attributes)attributes = full_attributesblok_attributes = {}render_attributes = []direct_attributes = []init_attributes = []accessor_attributes = []attribute_map = {}for attribute_name, attribute in attributes.items():if not hasattr(attribute, ''):attribute.name = attribute_nameif isinstance(attribute, DirectAttribute):direct_attributes.append(attribute)if hasattr(attribute, ''):render_attributes.append(attribute)if not hasattr(attribute, ''):attribute.object_attribute = ''.format(attribute_name)if getattr(attribute, '', False):init_attributes.append(attribute_name)if isinstance(attribute, (BlokAttribute, NestedBlokAttribute)) and hasattr(attribute.type, ''):blok_attributes[attribute.type.tag] = attributeif isinstance(attribute, AccessorAttribute):accessor_attributes.append(attribute)if not hasattr(attribute, ''):attribute.parent_attribute = ''.format(attribute_name)attribute_map[attribute.name] = attribute_nameif direct_attributes and not name == '' and '' in class_dict:class_dict[''] += tuple(attribute.object_attribute for attribute in direct_attributes)class_dict[''] += tuple(attribute.parent_attribute for attribute in accessor_attributes)if render_attributes:if hasattr(parents[], ''):render_attributes = list(parents[].render_attributes) + render_attributesclass_dict[''] = set(render_attributes)if init_attributes:if hasattr(parents[], ''):init_attributes = list(parents[].init_attributes) + init_attributesclass_dict[''] = init_attributesif blok_attributes:if hasattr(parents[], ''):full_blok_attributes = dict(parents[].blok_attributes)full_blok_attributes.update(blok_attributes)blok_attributes = full_blok_attributesclass_dict[''] = blok_attributesif attribute_map:if hasattr(parents[], ''):full_attribute_map = dict(parents[].attribute_map)full_attribute_map.update(attribute_map)attribute_map = full_attribute_mapclass_dict[''] = attribute_mapclass_dict[''] = attributesattribute_signals = (attribute.signal for attribute in attributes.values() if getattr(attribute, ''))if attribute_signals:class_dict[''] = class_dict.get('', ()) + tuple(attribute_signals)return super(TagAttributes, metaclass).__new__(metaclass, name, parents, class_dict, *kargs, **kwargs)", "docstring": "Updates a tag class to automatically register all signals", "id": "f3317:c1:m0"} {"signature": "def render(self, *args, **kwargs):", "body": "render_to = StringIO()self.output(render_to, *args, **kwargs)return render_to.getvalue()", "docstring": "Renders as a str", "id": "f3317:c2:m1"} {"signature": "@propertydef attributes(self):", "body": "if not hasattr(self, ''):self._attributes = {}return self._attributes", "docstring": "Lazily creates and returns a tags attributes", "id": "f3317:c5:m1"} {"signature": "def render(self, *args, **kwargs):", "body": "render_to = StringIO()self.output(render_to, *args, **kwargs)return render_to.getvalue()", "docstring": "Renders as a str", "id": "f3317:c0:m4"} {"signature": "def output(self, to=None, formatted=False, indent=, indentation='', *args, **kwargs):", "body": "if formatted:to.write(self.start_tag)to.write('')if not self.tag_self_closes:for blok in self.blox:to.write(indentation * (indent + ))blok.output(to=to, indent=indent + , formatted=True, indentation=indentation, *args, **kwargs)to.write('')to.write(indentation * indent)to.write(self.end_tag)if not indentation:to.write('')else:to.write(self.start_tag)if not self.tag_self_closes:for blok in self.blox:blok.output(to=to, *args, **kwargs)to.write(self.end_tag)", "docstring": "Outputs to a stream (like a file or request)", "id": "f3317:c8:m1"} {"signature": "def __call__(self, *blox, position=None):", "body": "if position is not None:for blok in blox:self.blox_container.blox.insert(position, blok)else:for blok in blox:self.blox_container.blox.append(blok)return blok", "docstring": "Adds a nested blok to this blok", "id": "f3317:c4:m3"} {"signature": "def output(self, to=None, *args, **kwargs):", "body": "to.write(cgi.escape(str(self._value)))", "docstring": "Outputs the set text", "id": "f3318:c1:m0"} {"signature": "def file(file_object, start_on=None, ignore=(), use_short=True, **queries):", "body": "return string(file_object.read(), start_on=start_on, ignore=ignore, use_short=use_short, **queries)", "docstring": "Returns a blox template from a file stream object", "id": "f3320:m1"} {"signature": "def string(html, start_on=None, ignore=(), use_short=True, **queries):", "body": "if use_short:html = grow_short(html)return _to_template(fromstring(html), start_on=start_on,ignore=ignore, **queries)", "docstring": "Returns a blox template from an html string", "id": "f3320:m0"} {"signature": "def _pop_none(self, kwargs):", "body": "for key, value in copy(kwargs).items():if value is None or value == ():kwargs.pop(key)if hasattr(value, ''):kwargs[key] = value.read()", "docstring": "Remove default values (anything where the value is None). click is unfortunately bad at the way it\n sends through unspecified defaults.", "id": "f3329:c1:m0"} {"signature": "@resources.command(ignore_defaults=True, no_args_is_help=False)@click.option('', '', '', is_flag=True, default=False, show_default=True,help='')@click.option('', default=, type=int, show_default=True,help='')@click.option('', type=int, show_default=True, required=False,help='')@click.option('', '', required=False, nargs=, multiple=True,help='''''')def list(self, all_pages=False, **kwargs):", "body": "if kwargs.get('', None) and '' in kwargs['']:all_status = kwargs.pop('').strip('').split('')queries = list(kwargs.pop('', ()))for status in all_status:if status in STATUS_CHOICES:queries.append(('', status))else:raise exc.TowerCLIError(''.format(status))kwargs[''] = tuple(queries)if all_pages:kwargs.pop('', None)kwargs.pop('', None)debug.log('', header='')response = self.read(**kwargs)self._convert_pagenum(response)if all_pages and response['']:cursor = copy(response)while cursor['']:cursor = self.read(**dict(kwargs, page=cursor['']))self._convert_pagenum(cursor)response[''] += cursor['']response[''] += cursor['']response[''] = Nonereturn response", "docstring": "Return a list of objects.\n\n If one or more filters are provided through keyword arguments, filter the results accordingly.\n\n If no filters are provided, return all results.\n\n =====API DOCS=====\n Retrieve a list of objects.\n\n :param all_pages: Flag that if set, collect all pages of content from the API when returning results.\n :type all_pages: bool\n :param page: The page to show. Ignored if all_pages is set.\n :type page: int\n :param query: Contains 2-tuples used as query parameters to filter resulting resource objects.\n :type query: list\n :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.\n :returns: A JSON object containing details of all resource objects returned by Tower backend.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3329:c1:m8"} {"signature": "def _disassoc(self, url_fragment, me, other):", "body": "url = self.endpoint + '' % (me, url_fragment)r = client.get(url, params={'': other}).json()if r[''] == :return {'': False}r = client.post(url, data={'': True, '': other})return {'': True}", "docstring": "Disassociate the `other` record from the `me` record.", "id": "f3329:c1:m10"} {"signature": "def _lookup(self, fail_on_missing=False, fail_on_found=False, include_debug_header=True, **kwargs):", "body": "read_params = {}for field_name in self.identity:if field_name in kwargs:read_params[field_name] = kwargs[field_name]if '' in self.identity and len(self.identity) == :return {}if not read_params:raise exc.BadRequest('''')try:existing_data = self.get(include_debug_header=include_debug_header, **read_params)if fail_on_found:raise exc.Found('' %read_params)return existing_dataexcept exc.NotFound:if fail_on_missing:raise exc.NotFound('' %read_params)return {}", "docstring": "=====API DOCS=====\nAttempt to perform a lookup that is expected to return a single result, and return the record.\n\nThis method is a wrapper around `get` that strips out non-unique keys, and is used internally by\n`write` and `delete`.\n\n:param fail_on_missing: Flag that raise exception if no resource is found.\n:type fail_on_missing: bool\n:param fail_on_found: Flag that raise exception if a resource is found.\n:type fail_on_found: bool\n:param include_debug_header: Flag determining whether to print debug messages when querying\n Tower backend.\n:type include_debug_header: bool\n:param `**kwargs`: Keyword arguments list of available fields used for searching resource.\n:returns: A JSON object containing details of the resource returned by Tower backend.\n:rtype: dict\n\n:raises tower_cli.exceptions.BadRequest: When no field are provided in kwargs.\n:raises tower_cli.exceptions.Found: When a resource is found and fail_on_found flag is on.\n:raises tower_cli.exceptions.NotFound: When no resource is found and fail_on_missing flag\n is on.\n=====API DOCS=====", "id": "f3329:c1:m1"} {"signature": "@resources.command(ignore_defaults=True)def get(self, pk=None, **kwargs):", "body": "if kwargs.pop('', True):debug.log('', header='')response = self.read(pk=pk, fail_on_no_results=True, fail_on_multiple_results=True, **kwargs)return response[''][]", "docstring": "Return one and exactly one object.\n\n Lookups may be through a primary key, specified as a positional argument, and/or through filters specified\n through keyword arguments.\n\n If the number of results does not equal one, raise an exception.\n\n =====API DOCS=====\n Retrieve one and exactly one object.\n\n :param pk: Primary key of the resource to be read. Tower CLI will only attempt to read *that* object\n if ``pk`` is provided (not ``None``).\n :type pk: int\n :param `**kwargs`: Keyword arguments used to look up resource object to retrieve if ``pk`` is not provided.\n :returns: loaded JSON of the retrieved resource object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3329:c1:m7"} {"signature": "def _get_patch_url(self, url, pk):", "body": "return url + '' % pk", "docstring": "Overwrite this method to handle specific corner cases to the url passed to PATCH method.", "id": "f3329:c1:m4"} {"signature": "@resources.command@click.option('', default=False, show_default=True, type=bool, is_flag=True,help='')@click.option('', default=False, show_default=True, type=bool, is_flag=True,help='''')def create(self, **kwargs):", "body": "return self.write(create_on_missing=True, **kwargs)", "docstring": "Create an object.\n\n Fields in the resource's `identity` tuple are used for a lookup; if a match is found, then no-op\n (unless `force_on_exists` is set) but do not fail (unless `fail_on_found` is set).\n\n =====API DOCS=====\n Create an object.\n\n :param fail_on_found: Flag that if set, the operation fails if an object matching the unique criteria\n already exists.\n :type fail_on_found: bool\n :param force_on_exists: Flag that if set, then if a match is found on unique fields, other fields will\n be updated to the provided values.; If unset, a match causes the request to be\n a no-op.\n :type force_on_exists: bool\n :param `**kwargs`: Keyword arguments which, all together, will be used as POST body to create the\n resource object.\n :returns: A dictionary combining the JSON output of the created resource, as well as two extra fields:\n \"changed\", a flag indicating if the resource is created successfully; \"id\", an integer which\n is the primary key of the created object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3329:c2:m0"} {"signature": "@propertydef help(self):", "body": "if self.help_text:return self.help_textreturn '' % self.name", "docstring": "Return the help text that was passed to the constructor, or a\n sensible default if none was provided.", "id": "f3330:c1:m3"} {"signature": "def configure_model(self, attrs, field_name):", "body": "self.relationship = field_nameself._set_method_names(relationship=field_name)if self.res_name is None:self.res_name = grammar.singularize(attrs.get('', '').strip(''))", "docstring": "Hook for ResourceMeta class to call when initializing model class.\nSaves fields obtained from resource class backlinks", "id": "f3330:c2:m2"} {"signature": "def ordered_dump(data, Dumper=yaml.Dumper, **kws):", "body": "class OrderedDumper(Dumper):passdef _dict_representer(dumper, data):return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,data.items())OrderedDumper.add_representer(OrderedDict,_dict_representer)return yaml.dump(data, None, OrderedDumper, **kws)", "docstring": "Expand PyYAML's built-in dumper to support parsing OrderedDict. Return\n a string as parse result of the original data structure, which includes\n OrderedDict.\n\n Args:\n data: the data structure to be dumped(parsed) which is supposed to\n contain OrderedDict.\n Dumper: the yaml serializer to be expanded and used.\n kws: extra key-value arguments to be passed to yaml.dump.", "id": "f3333:m3"} {"signature": "def string_to_dict(var_string, allow_kv=True, require_dict=True):", "body": "try:return_dict = yaml.load(var_string, Loader=yaml.SafeLoader)if require_dict:assert type(return_dict) is dictexcept (AttributeError, yaml.YAMLError, AssertionError):try:assert allow_kvreturn_dict = parse_kv(var_string)except Exception:raise exc.TowerCLIError('''' % var_string)return return_dict", "docstring": "Returns a dictionary given a string with yaml or json syntax.\n If data is not present in a key: value format, then it return\n an empty dictionary.\n\n Attempts processing string by 3 different methods in order:\n 1. as JSON 2. as YAML 3. as custom key=value syntax\n Throws an error if all of these fail in the standard ways.", "id": "f3333:m1"} {"signature": "def log(s, header='', file=sys.stderr, nl=, **kwargs):", "body": "if not settings.verbose:returnif header:word_arr = s.split('')multi = []word_arr.insert(, '' % header.upper())i = while i < len(word_arr):to_add = ['']count = while count <= :count += len(word_arr[i]) + if count <= :to_add.append(word_arr[i])i += if i == len(word_arr):breakif len(to_add) == :to_add.append(word_arr[i])i += if i != len(word_arr):count -= len(word_arr[i]) + to_add.append('' * ( - count))multi.append(''.join(to_add))s = ''.join(multi)lines = len(multi)else:lines = if isinstance(nl, int) and nl > lines:s += '' * (nl - lines)return secho(s, file=file, **kwargs)", "docstring": "Log the given output to stderr if and only if we are in\n verbose mode.\n\n If we are not in verbose mode, this is a no-op.", "id": "f3336:m0"} {"signature": "def unified_job_template_options(method):", "body": "jt_dec = click.option('', type=types.Related(''),help='')prj_dec = click.option('', type=types.Related(''),help='')inv_src_dec = click.option('', type=types.Related(''),help='')def ujt_translation(_method):def _ujt_translation(*args, **kwargs):for fd in ['', '', '']:if fd in kwargs and kwargs[fd] is not None:kwargs[''] = kwargs.pop(fd)return _method(*args, **kwargs)return functools.wraps(_method)(_ujt_translation)return ujt_translation(inv_src_dec(prj_dec(jt_dec(method))))", "docstring": "Adds the decorators for all types of unified job templates,\nand if the non-unified type is specified, converts it into the\nunified_job_template kwarg.", "id": "f3338:m0"} {"signature": "def get_resource(name):", "body": "module = importlib.import_module('' % name)return module.Resource()", "docstring": "Return an instance of the requested Resource class.\n\n Since all of the resource classes are named `Resource`, this provides\n a slightly cleaner interface for using these classes via. importing rather\n than through the CLI.", "id": "f3339:m0"} {"signature": "@click.command()@with_global_options@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', required=False, multiple=True)@click.option('', is_flag=True)def receive(organization=None, user=None, team=None, credential_type=None, credential=None,notification_template=None, inventory_script=None, inventory=None, project=None, job_template=None,workflow=None, all=None):", "body": "from tower_cli.cli.transfer.receive import Receiverreceiver = Receiver()assets_to_export = {}for asset_type in SEND_ORDER:assets_to_export[asset_type] = locals()[asset_type]receiver.receive(all=all, asset_input=assets_to_export)", "docstring": "Export assets from Tower.\n\n 'tower receive' exports one or more assets from a Tower instance\n\n For all of the possible assets types the TEXT can either be the assets name\n (or username for the case of a user) or the keyword all. Specifying all\n will export all of the assets of that type.", "id": "f3340:m5"} {"signature": "@click.command()def logout():", "body": "if not supports_oauth():raise exc.TowerCLIError('')config.main(['', '', ''])", "docstring": "Removes an OAuth2 personal auth token from config.", "id": "f3340:m4"} {"signature": "def _echo_setting(key):", "body": "value = getattr(settings, key)secho('' % key, fg='', bold=True, nl=False)secho(six.text_type(value),bold=True,fg='' if isinstance(value, six.text_type) else '',)", "docstring": "Echo a setting to the CLI.", "id": "f3340:m1"} {"signature": "def _format_id(self, payload):", "body": "if '' in payload:return str(payload[''])if '' in payload:return ''.join([six.text_type(item['']) for item in payload['']])raise MultipleRelatedError('')", "docstring": "Echos only the id", "id": "f3341:c0:m6"} {"signature": "def get_command(self, ctx, name):", "body": "if not hasattr(self.resource, name):return Nonemethod = getattr(self.resource, name)attrs = getattr(method, '', {})help_text = inspect.getdoc(method)attrs[''] = self._auto_help_text(help_text or '')ignore_defaults = attrs.pop('', False)new_method = self._echo_method(method)click_params = getattr(method, '', [])new_method.__click_params__ = copy(click_params)new_method = with_global_options(new_method)fao = attrs.pop('', True)if fao:for field in reversed(self.resource.fields):if not field.is_option:continueif not isinstance(fao, bool) and field.name not in fao:continueargs = [field.option]if field.key:args.insert(, field.key)short_fields = {'': '','': '','': '','': ''}if field.name in short_fields:args.append(''+short_fields[field.name])option_help = field.helpif isinstance(field.type, StructuredInput):option_help += ''if field.required:option_help = '' + option_helpelif field.read_only:option_help = '' + option_helpoption_help = '' + option_helpclick.option(*args,default=field.default if not ignore_defaults else None,help=option_help,type=field.type,show_default=field.show_default,multiple=field.multiple,is_eager=False)(new_method)cmd = click.command(name=name, cls=ActionSubcommand, **attrs)(new_method)code = six.get_function_code(method)if '' in code.co_varnames:click.argument('', nargs=, required=False, type=str, metavar='')(cmd)return cmd", "docstring": "Retrieve the appropriate method from the Resource,\n decorate it as a click command, and return that method.", "id": "f3341:c0:m9"} {"signature": "def list_commands(self, ctx):", "body": "return self.resource.commands", "docstring": "Return a list of all methods decorated with the\n @resources.command decorator.", "id": "f3341:c0:m1"} {"signature": "def _auto_help_text(self, help_text):", "body": "api_doc_delimiter = ''begin_api_doc = help_text.find(api_doc_delimiter)if begin_api_doc >= :end_api_doc = help_text.rfind(api_doc_delimiter) + len(api_doc_delimiter)help_text = help_text[:begin_api_doc] + help_text[end_api_doc:]an_prefix = ('', '', '', '')if not self.resource_name.lower().startswith(an_prefix):help_text = help_text.replace('','' % self.resource_name)if self.resource_name.lower().endswith(''):help_text = help_text.replace('','' % self.resource_name[:-],)help_text = help_text.replace('', self.resource_name)help_text = help_text.replace('', '')help_text = help_text.replace('','')for match in re.findall(r'', help_text):option = '' % match.replace('', '')help_text = help_text.replace('' % match, option)return help_text", "docstring": "Given a method with a docstring, convert the docstring\n to more CLI appropriate wording, and also disambiguate the\n word \"object\" on the base class docstrings.", "id": "f3341:c0:m2"} {"signature": "def _format_json(self, payload):", "body": "return json.dumps(payload, indent=)", "docstring": "Convert the payload into a JSON string with proper\n indentation and return it.", "id": "f3341:c0:m4"} {"signature": "def list_commands(self, ctx):", "body": "commands = set(self.list_resource_commands())commands.union(set(self.list_misc_commands()))return sorted(commands)", "docstring": "Return a list of commands present in the commands and resources\n folders, but not subcommands.", "id": "f3342:c0:m4"} {"signature": "def list_resource_commands(self):", "body": "resource_path = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir,''))answer = set([])for _, name, _ in pkgutil.iter_modules([resource_path]):res = tower_cli.get_resource(name)if not getattr(res, '', False):answer.add(name)return sorted(answer)", "docstring": "Returns a list of multi-commands for each resource type.", "id": "f3342:c0:m5"} {"signature": "def convert(self, value, param, ctx):", "body": "resource = tower_cli.get_resource(self.resource_name)if value is None:return Noneif isinstance(value, int):return valueif re.match(r'', value):return int(value)if value == '':return valuetry:debug.log('''' % param.name, header='')lookup_data = {resource.identity[-]: value}rel = resource.get(**lookup_data)except exc.MultipleResults:raise exc.MultipleRelatedError(''''''''''.format(self.resource_name,value),)except exc.TowerCLIError as ex:raise exc.RelatedError('' %(self.resource_name, str(ex)))return rel['']", "docstring": "Return the appropriate integer value. If a non-integer is\n provided, attempt a name-based lookup and return the primary key.", "id": "f3343:c4:m1"} {"signature": "def parse_args(self, ctx, args):", "body": "if not args and self.no_args_is_help and not ctx.resilient_parsing:click.echo(ctx.get_help())ctx.exit()return super(ActionSubcommand, self).parse_args(ctx, args)", "docstring": "Parse arguments sent to this command.\n\n The code for this method is taken from MultiCommand:\n https://github.com/mitsuhiko/click/blob/master/click/core.py\n\n It is Copyright (c) 2014 by Armin Ronacher.\n See the license:\n https://github.com/mitsuhiko/click/blob/master/LICENSE", "id": "f3345:c0:m1"} {"signature": "def format_options(self, ctx, formatter):", "body": "field_opts = []global_opts = []local_opts = []other_opts = []for param in self.params:if param.name in SETTINGS_PARMS:opts = global_optselif getattr(param, '', None) and param.help.startswith(''):opts = field_optsparam.help = param.help[len(''):]else:opts = local_optsrv = param.get_help_record(ctx)if rv is None:continueelse:opts.append(rv)if self.add_help_option:help_options = self.get_help_option_names(ctx)if help_options:other_opts.append([join_options(help_options)[], ''])if field_opts:with formatter.section(''):formatter.write_dl(field_opts)if local_opts:with formatter.section(''):formatter.write_dl(local_opts)if global_opts:with formatter.section(''):formatter.write_dl(global_opts)if other_opts:with formatter.section(''):formatter.write_dl(other_opts)", "docstring": "Monkey-patch click's format_options method to support option categorization.", "id": "f3345:c0:m2"} {"signature": "@propertydef _parsers(self):", "body": "return tuple([getattr(self, '' % i) for i in self._parser_names])", "docstring": "Return a tuple of all parsers, in order.\n\n This is referenced at runtime, to avoid gleefully ignoring the\n `runtime_values` context manager.", "id": "f3351:c1:m3"} {"signature": "def set_or_reset_runtime_param(self, key, value):", "body": "if self._runtime.has_option('', key):self._runtime = self._new_parser()if value is None:returnsettings._runtime.set('', key.replace('', ''),six.text_type(value))", "docstring": "Maintains the context of the runtime settings for invoking\n a command.\n\n This should be called by a click.option callback, and only\n called once for each setting for each command invocation.\n\n If the setting exists, it follows that the runtime settings are\n stale, so the entire runtime settings are reset.", "id": "f3351:c1:m4"} {"signature": "def _read(self, fp, fpname):", "body": "if os.path.isfile(fpname):file_permission = os.stat(fpname)if fpname != os.path.join(tower_dir, '') and ((file_permission.st_mode & stat.S_IRGRP) or(file_permission.st_mode & stat.S_IROTH)):warnings.warn(''.format(fpname), RuntimeWarning)try:return configparser.ConfigParser._read(self, fp, fpname)except configparser.MissingSectionHeaderError:fp.seek()string = '' % fp.read()flo = StringIO(string) return configparser.ConfigParser._read(self, flo, fpname)", "docstring": "Read the configuration from the given file.\n\n If the file lacks any section header, add a [general] section\n header that encompasses the whole thing.", "id": "f3351:c0:m0"} {"signature": "def with_global_options(method):", "body": "method = click.option('', '',help='''''''',required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('', '',help='''''',required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('', '',help='''''',required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('', '',help='''''''',required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('', '',help='''''',type=click.Choice(['', '', '', '']),required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('', '',default=None,help='',is_flag=True,required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('',default=None,help='',is_flag=True,required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('',default=None,help='''',is_flag=True,required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('',default=None,help='''',required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = click.option('',default=None,help='''',is_flag=True,required=False, callback=_apply_runtime_setting,is_eager=True,expose_value=False)(method)method = runtime_context_manager(method)return method", "docstring": "Apply the global options that we desire on every method within\n tower-cli to the given click command.", "id": "f3351:m3"} {"signature": "def __init__(self):", "body": "self._cache = {}defaults = {}for key in CONFIG_OPTIONS:defaults[key] = ''defaults.update({'': '','': '','': '','': '','': '','': '','': '',})self._defaults = self._new_parser(defaults=defaults)self._environment = self._new_parser(defaults=config_from_environment())self._global = self._new_parser()if os.path.isdir(tower_dir):try:os.listdir(tower_dir)except OSError:warnings.warn('''''',RuntimeWarning)self._global.read(os.path.join(tower_dir, ''))self._user = self._new_parser()user_filename = os.path.join(user_dir, CONFIG_FILENAME)self._user.read(user_filename)self._local = self._new_parser()local_dir = os.getcwd()local_dirs = [local_dir] if local_dir not in (user_dir, tower_dir) else []while os.path.split(local_dir)[]:local_dir, _ = os.path.split(local_dir)if local_dir not in (user_dir, tower_dir):local_dirs = [local_dir] + local_dirsfor local_dir in local_dirs:local_filename = os.path.join(local_dir, CONFIG_FILENAME)self._local.read(local_filename)self._runtime = self._new_parser()", "docstring": "Create the settings object, and read from appropriate files as\n well as from `sys.argv`.", "id": "f3351:c1:m1"} {"signature": "@resources.command(ignore_defaults=True, no_args_is_help=False)@click.option('', is_flag=True, default=False,help='')@click.option('', help='')def list(self, root=False, **kwargs):", "body": "if kwargs.get('', None):self.set_child_endpoint(parent=kwargs[''], inventory=kwargs.get('', None))kwargs.pop('')if root and not kwargs.get('', None):raise exc.UsageError('')if root:inventory_id = kwargs['']r = client.get('' % inventory_id)return r.json()return super(Resource, self).list(**kwargs)", "docstring": "Return a list of groups.\n\n =====API DOCS=====\n Retrieve a list of groups.\n\n :param root: Flag that if set, only root groups of a specific inventory will be listed.\n :type root: bool\n :param parent: Primary key or name of the group whose child groups will be listed.\n :type parent: str\n :param all_pages: Flag that if set, collect all pages of content from the API when returning results.\n :type all_pages: bool\n :param page: The page to show. Ignored if all_pages is set.\n :type page: int\n :param query: Contains 2-tuples used as query parameters to filter resulting resource objects.\n :type query: list\n :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.\n :returns: A JSON object containing details of all resource objects returned by Tower backend.\n :rtype: dict\n :raises tower_cli.exceptions.UsageError: When ``root`` flag is on and ``inventory`` is not present in\n ``**kwargs``.\n\n =====API DOCS=====", "id": "f3355:c0:m3"} {"signature": "def __getattribute__(self, name):", "body": "if name in ['', '', '']:raise AttributeErrorelse:return object.__getattribute__(self, name)", "docstring": "Disable inherited methods that cannot be applied to this\n particular resource.", "id": "f3356:c0:m0"} {"signature": "def configure_display(self, data, kwargs=None, write=False):", "body": "if settings.format != '':return if write:obj, obj_type, res, res_type = self.obj_res(kwargs)data[''] = kwargs['']data[obj_type] = objdata[res_type] = resself.set_display_columns(set_false=['' if obj_type == '' else ''],set_true=['' if res_type == '' else res_type])else:self.set_display_columns(set_false=['', ''],set_true=['', ''])if '' in data:for i in range(len(data[''])):self.populate_resource_columns(data[''][i])else:self.populate_resource_columns(data)", "docstring": "Populates columns and sets display attribute as needed.\n Operates on data.", "id": "f3356:c0:m5"} {"signature": "@resources.command(use_fields_as_options=ACTOR_FIELDS+RESOURCE_FIELDS+[''])def list(self, **kwargs):", "body": "data, self.endpoint = self.data_endpoint(kwargs)r = super(Resource, self).list(**data)self.configure_display(r)return r", "docstring": "Return a list of roles.\n\n =====API DOCS=====\n Retrieve a list of objects.\n\n :param all_pages: Flag that if set, collect all pages of content from the API when returning results.\n :type all_pages: bool\n :param page: The page to show. Ignored if all_pages is set.\n :type page: int\n :param query: Contains 2-tuples used as query parameters to filter resulting resource objects.\n :type query: list\n :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.\n :returns: A JSON object containing details of all resource objects returned by Tower backend.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3356:c0:m7"} {"signature": "@classmethoddef data_endpoint(cls, in_data, ignore=[]):", "body": "obj, obj_type, res, res_type = cls.obj_res(in_data, fail_on=[])data = {}if '' in ignore:obj = Noneif '' in ignore:res = Noneif obj and obj_type == '':data[''] = objif obj and obj_type == '':endpoint = '' % (grammar.pluralize(obj_type), obj)if res is not None:data[''] = reselif res:endpoint = '' % (grammar.pluralize(res_type), res)else:endpoint = ''if in_data.get('', False):data[''] = '' % in_data[''].lower()for key, value in in_data.items():if key not in RESOURCE_FIELDS and key not in ['', '', '']:data[key] = valuereturn data, endpoint", "docstring": "Converts a set of CLI input arguments, `in_data`, into\nrequest data and an endpoint that can be used to look\nup a role or list of roles.\n\nAlso changes the format of `type` in data to what the server\nexpects for the role model, as it exists in the database.", "id": "f3356:c0:m2"} {"signature": "def role_write(self, fail_on_found=False, disassociate=False, **kwargs):", "body": "data, self.endpoint = self.data_endpoint(kwargs, ignore=[''])debug.log('', header='')response = self.read(pk=None, fail_on_no_results=True,fail_on_multiple_results=True, **data)role_data = response[''][]role_id = role_data['']self.configure_display(role_data, kwargs, write=True)obj, obj_type, res, res_type = self.obj_res(kwargs)debug.log('' % obj_type,header='')data, self.endpoint = self.data_endpoint(kwargs)data[''] = res_type.replace('', '')response = self.read(pk=None, fail_on_no_results=False,fail_on_multiple_results=False, **data)msg = ''if response[''] > and not disassociate:msg = '' % obj_typeelif response[''] == and disassociate:msg = '' % obj_typeif msg:role_data[''] = Falseif fail_on_found:raise exc.NotFound(msg)else:debug.log(msg, header='')return role_datadebug.log('' % ('' if disassociate else '', obj_type), header='')post_data = {'': role_id}if disassociate:post_data[''] = Trueclient.post('' % (grammar.pluralize(obj_type), obj),data=post_data)role_data[''] = Truereturn role_data", "docstring": "Re-implementation of the parent `write` method specific to roles.\n Adds a grantee (user or team) to the resource's role.", "id": "f3356:c0:m6"} {"signature": "@resources.command(use_fields_as_options=ACTOR_FIELDS+RESOURCE_FIELDS+[''])@click.option('', default=False,show_default=True, type=bool, is_flag=True,help='''')def revoke(self, fail_on_found=False, **kwargs):", "body": "return self.role_write(fail_on_found=fail_on_found,disassociate=True, **kwargs)", "docstring": "Remove a user or a team from a role. Required information:\n 1) Type of the role\n 2) Resource of the role, inventory, credential, or any other\n 3) A user or a team to add to the role\n\n =====API DOCS=====\n Remove a user or a team from a role. Required information:\n * Type of the role.\n * Resource of the role, inventory, credential, or any other.\n * A user or a team to add to the role.\n\n :param fail_on_found: Flag that if set, the operation fails if a user/team dose not have the role.\n :type fail_on_found: bool\n :param `**kwargs`: The user to be disassociated and the role to disassociate.\n :returns: parsed JSON of role revoke.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3356:c0:m10"} {"signature": "def _configuration(self, kwargs, config_item):", "body": "if '' not in config_item:if '' not in kwargs:returnnc = kwargs[''] = {}for field in Resource.configuration[kwargs['']]:if field not in config_item:raise exc.TowerCLIError('''' % field)else:nc[field] = config_item[field]else:kwargs[''] =config_item['']", "docstring": "Combine configuration-related keyworded arguments into\n notification_configuration.", "id": "f3357:c0:m1"} {"signature": "@resources.commanddef list(self, all_pages=False, **kwargs):", "body": "self._separate(kwargs)return super(Resource, self).list(all_pages=all_pages, **kwargs)", "docstring": "Return a list of notification templates.\n\n Note here configuration-related fields like\n 'notification_configuration' and 'channels' will not be\n used even provided.\n\n If one or more filters are provided through keyword arguments,\n filter the results accordingly.\n\n If no filters are provided, return all results.\n\n =====API DOCS=====\n Retrieve a list of objects.\n\n :param all_pages: Flag that if set, collect all pages of content from the API when returning results.\n :type all_pages: bool\n :param page: The page to show. Ignored if all_pages is set.\n :type page: int\n :param query: Contains 2-tuples used as query parameters to filter resulting resource objects.\n :type query: list\n :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.\n :returns: A JSON object containing details of all resource objects returned by Tower backend.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3357:c0:m5"} {"signature": "@resources.command@click.option('', type=types.Related(''),required=False, help='')@click.option('', type=click.Choice(['', '']),required=False, help='''')def create(self, fail_on_found=False, force_on_exists=False, **kwargs):", "body": "config_item = self._separate(kwargs)jt_id = kwargs.pop('', None)status = kwargs.pop('', '')old_endpoint = self.endpointif jt_id is not None:jt = get_resource('')jt.get(pk=jt_id)try:nt_id = self.get(**copy.deepcopy(kwargs))['']except exc.NotFound:passelse:if fail_on_found:raise exc.TowerCLIError('''''''''')else:debug.log('''',header='')return jt.associate_notification_template(jt_id, nt_id, status=status)self.endpoint = '' %(jt_id, status)self._configuration(kwargs, config_item)result = super(Resource, self).create(**kwargs)self.endpoint = old_endpointreturn result", "docstring": "Create a notification template.\n\n All required configuration-related fields (required according to\n notification_type) must be provided.\n\n There are two types of notification template creation: isolatedly\n creating a new notification template and creating a new notification\n template under a job template. Here the two types are discriminated by\n whether to provide --job-template option. --status option controls\n more specific, job-run-status-related association.\n\n Fields in the resource's `identity` tuple are used for a lookup;\n if a match is found, then no-op (unless `force_on_exists` is set) but\n do not fail (unless `fail_on_found` is set).\n\n =====API DOCS=====\n Create an object.\n\n :param fail_on_found: Flag that if set, the operation fails if an object matching the unique criteria\n already exists.\n :type fail_on_found: bool\n :param force_on_exists: Flag that if set, then if a match is found on unique fields, other fields will\n be updated to the provided values.; If unset, a match causes the request to be\n a no-op.\n :type force_on_exists: bool\n :param `**kwargs`: Keyword arguments which, all together, will be used as POST body to create the\n resource object.\n :returns: A dictionary combining the JSON output of the created resource, as well as two extra fields:\n \"changed\", a flag indicating if the resource is created successfully; \"id\", an integer which\n is the primary key of the created object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3357:c0:m2"} {"signature": "def _separate(self, kwargs):", "body": "self._pop_none(kwargs)result = {}for field in Resource.config_fields:if field in kwargs:result[field] = kwargs.pop(field)if field in Resource.json_fields:if not isinstance(result[field], six.string_types):continuetry:data = json.loads(result[field])result[field] = dataexcept ValueError:raise exc.TowerCLIError('''')return result", "docstring": "Remove None-valued and configuration-related keyworded arguments", "id": "f3357:c0:m0"} {"signature": "@resources.commanddef get(self, pk=None, **kwargs):", "body": "self._separate(kwargs)return super(Resource, self).get(pk=pk, **kwargs)", "docstring": "Return one and exactly one notification template.\n\n Note here configuration-related fields like\n 'notification_configuration' and 'channels' will not be\n used even provided.\n\n Lookups may be through a primary key, specified as a positional\n argument, and/or through filters specified through keyword arguments.\n\n If the number of results does not equal one, raise an exception.\n\n =====API DOCS=====\n Retrieve one and exactly one object.\n\n :param pk: Primary key of the resource to be read. Tower CLI will only attempt to read *that* object\n if ``pk`` is provided (not ``None``).\n :type pk: int\n :param `**kwargs`: Keyword arguments used to look up resource object to retrieve if ``pk`` is not provided.\n :returns: loaded JSON of the retrieved resource object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3357:c0:m6"} {"signature": "@resources.commanddef delete(self, pk=None, fail_on_missing=False, **kwargs):", "body": "self._separate(kwargs)return super(Resource, self).delete(pk=pk, fail_on_missing=fail_on_missing, **kwargs)", "docstring": "Remove the given notification template.\n\n Note here configuration-related fields like\n 'notification_configuration' and 'channels' will not be\n used even provided.\n\n If `fail_on_missing` is True, then the object's not being found is\n considered a failure; otherwise, a success with no change is reported.\n\n =====API DOCS=====\n Remove the given object.\n\n :param pk: Primary key of the resource to be deleted.\n :type pk: int\n :param fail_on_missing: Flag that if set, the object's not being found is considered a failure; otherwise,\n a success with no change is reported.\n :type fail_on_missing: bool\n :param `**kwargs`: Keyword arguments used to look up resource object to delete if ``pk`` is not provided.\n :returns: dictionary of only one field \"changed\", which is a flag indicating whether the specified resource\n is successfully deleted.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3357:c0:m4"} {"signature": "@click.argument('', type=types.Related(''))@click.option('', is_flag=True, default=False,help='''')@click.option('', is_flag=True, default=False,help='')@click.option('', required=False, type=int,help='''''')@resources.command(use_fields_as_options=False, no_args_is_help=True)def update(self, inventory_source, monitor=False, wait=False,timeout=None, **kwargs):", "body": "debug.log('', header='')r = client.get('' % (self.endpoint, inventory_source))if not r.json()['']:raise exc.BadRequest('')debug.log('', header='')r = client.post('' % (self.endpoint, inventory_source), data={})inventory_update_id = r.json()['']if monitor or wait:if monitor:result = self.monitor(inventory_update_id, parent_pk=inventory_source, timeout=timeout)elif wait:result = self.wait(inventory_update_id, parent_pk=inventory_source, timeout=timeout)inventory = client.get('' % result['']).json()['']result[''] = int(inventory)return resultreturn {'': inventory_update_id,'': ''}", "docstring": "Update the given inventory source.\n\n =====API DOCS=====\n Update the given inventory source.\n\n :param inventory_source: Primary key or name of the inventory source to be updated.\n :type inventory_source: str\n :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched inventory update\n rather than exiting with a success.\n :type monitor: bool\n :param wait: Flag that if set, monitor the status of the inventory update, but do not print while it is\n in progress.\n :type wait: bool\n :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number\n of seconds.\n :type timeout: int\n :param `**kwargs`: Fields used to override underlyingl inventory source fields when creating and launching\n an inventory update.\n :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait``\n call if ``wait`` flag is on; dictionary of \"status\" if none of the two flags are on.\n :rtype: dict\n :raises tower_cli.exceptions.BadRequest: When the inventory source cannot be updated.\n\n =====API DOCS=====", "id": "f3358:c0:m0"} {"signature": "@resources.command@click.option('', is_flag=True, default=False,help='')def status(self, pk, detail=False, **kwargs):", "body": "job = self.last_job_data(pk, **kwargs)if detail:return jobreturn {'': job[''],'': job[''],'': job[''],}", "docstring": "Print the status of the most recent sync.\n\n =====API DOCS=====\n Retrieve the current inventory update status.\n\n :param pk: Primary key of the resource to retrieve status from.\n :type pk: int\n :param detail: Flag that if set, return the full JSON of the job resource rather than a status summary.\n :type detail: bool\n :param `**kwargs`: Keyword arguments used to look up resource object to retrieve status from if ``pk``\n is not provided.\n :returns: full loaded JSON of the specified unified job if ``detail`` flag is on; trimed JSON containing\n only \"elapsed\", \"failed\" and \"status\" fields of the unified job if ``detail`` flag is off.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3358:c0:m1"} {"signature": "@resources.command@click.option('', type=types.Related(''),required=False, help='')def create(self, fail_on_found=False, force_on_exists=False, **kwargs):", "body": "jt_id = kwargs.pop('', None)old_endpoint = self.endpointif jt_id is not None:jt = get_resource('')jt.get(pk=jt_id)try:label_id = self.get(name=kwargs.get('', None), organization=kwargs.get('', None))['']except exc.NotFound:passelse:if fail_on_found:raise exc.TowerCLIError('''')else:debug.log('', header='')return jt.associate_label(job_template=jt_id, label=label_id)self.endpoint = '' % jt_idresult = super(Resource, self).create(fail_on_found=fail_on_found, force_on_exists=force_on_exists, **kwargs)self.endpoint = old_endpointreturn result", "docstring": "Create a new label.\n\n There are two types of label creation: isolatedly creating a new label and creating a new label under\n a job template. Here the two types are discriminated by whether to provide --job-template option.\n\n Fields in the resource's `identity` tuple are used for a lookup; if a match is found, then no-op (unless\n `force_on_exists` is set) but do not fail (unless `fail_on_found` is set).\n\n =====API DOCS=====\n Create a label.\n\n :param job_template: Primary key or name of the job template for the created label to associate to.\n :type job_template: str\n :param fail_on_found: Flag that if set, the operation fails if an object matching the unique criteria\n already exists.\n :type fail_on_found: bool\n :param force_on_exists: Flag that if set, then if a match is found on unique fields, other fields will\n be updated to the provided values.; If unset, a match causes the request to be\n a no-op.\n :type force_on_exists: bool\n :param `**kwargs`: Keyword arguments which, all together, will be used as POST body to create the\n resource object.\n :returns: A dictionary combining the JSON output of the created resource, as well as two extra fields:\n \"changed\", a flag indicating if the resource is created successfully; \"id\", an integer which\n is the primary key of the created object.\n :rtype: dict\n :raises tower_cli.exceptions.TowerCLIError: When the label already exists and ``fail_on_found`` flag is on.\n\n =====API DOCS=====", "id": "f3359:c0:m1"} {"signature": "def _get_schema(self, wfjt_id):", "body": "node_res = get_resource('')node_results = node_res.list(workflow_job_template=wfjt_id,all_pages=True)['']return self._workflow_node_structure(node_results)", "docstring": "Returns a dictionary that represents the node network of the\nworkflow job template", "id": "f3361:c1:m1"} {"signature": "@resources.command(use_fields_as_options=False)@click.argument('', type=types.Related(''))@click.argument('', type=types.Variables(), required=False)def schema(self, wfjt, node_network=None):", "body": "existing_network = self._get_schema(wfjt)if not isinstance(existing_network, list):existing_network = []if node_network is None:if settings.format == '':settings.format = ''return existing_networkif hasattr(node_network, ''):node_network = node_network.read()node_network = string_to_dict(node_network, allow_kv=False, require_dict=False)if not isinstance(node_network, list):node_network = []_update_workflow([TreeNode(x, wfjt, include_id=True) for x in existing_network],[TreeNode(x, wfjt) for x in node_network])if settings.format == '':settings.format = ''return self._get_schema(wfjt)", "docstring": "Convert YAML/JSON content into workflow node objects if\nnode_network param is given.\nIf not, print a YAML representation of the node network.\n\n=====API DOCS=====\nConvert YAML/JSON content into workflow node objects if ``node_network`` param is given. If not,\nprint a YAML representation of the node network.\n\n:param wfjt: Primary key or name of the workflow job template to run schema against.\n:type wfjt: str\n:param node_network: JSON- or YAML-formatted string representing the topology of the workflow job\n template be updated to.\n:type node_network: str\n:returns: The latest topology (possibly after modification) of the workflow job template.\n:rtype: dict\n\n=====API DOCS=====", "id": "f3361:c1:m2"} {"signature": "@staticmethoddef _workflow_node_structure(node_results):", "body": "node_list_pos = {}for i, node_result in enumerate(node_results):for rel in ['', '', '']:node_result[''.format(rel)] = []node_list_pos[node_result['']] = ifor node_result in node_results:for rel in ['', '', '']:for sub_node_id in node_result[''.format(rel)]:j = node_list_pos[sub_node_id]node_results[j][''.format(rel)].append(node_result[''])root_nodes = []for node_result in node_results:is_root = Truefor rel in ['', '', '']:if node_result[''.format(rel)] != []:is_root = Falsebreakif is_root:root_nodes.append(node_result[''])def branch_schema(node_id):i = node_list_pos[node_id]node_dict = node_results[i]ret_dict = {\"\": node_id}for fd in NODE_STANDARD_FIELDS:val = node_dict.get(fd, None)if val is not None:if fd == '':job_type = node_dict['']['']['']ujt_key = JOB_TYPES[job_type]ret_dict[ujt_key] = valelse:ret_dict[fd] = valfor rel in ['', '', '']:sub_node_id_list = node_dict[''.format(rel)]if len(sub_node_id_list) == :continuerelationship_name = ''.format(rel)ret_dict[relationship_name] = []for sub_node_id in sub_node_id_list:ret_dict[relationship_name].append(branch_schema(sub_node_id))return ret_dictschema_dict = []for root_node_id in root_nodes:schema_dict.append(branch_schema(root_node_id))return schema_dict", "docstring": "Takes the list results from the API in `node_results` and\ntranslates this data into a dictionary organized in a\nhuman-readable heirarchial structure", "id": "f3361:c1:m0"} {"signature": "@resources.command(use_fields_as_options=False)@click.option('', type=types.Related(''))@click.option('',type=types.Related(''))@click.option('', type=click.Choice(['', '', '']),required=False, default='', help='''')def associate_notification_template(self, workflow,notification_template, status):", "body": "return self._assoc('' % status,workflow, notification_template)", "docstring": "Associate a notification template from this workflow.\n\n =====API DOCS=====\n Associate a notification template from this workflow job template.\n\n :param workflow: The workflow job template to associate to.\n :type workflow: str\n :param notification_template: The notification template to be associated.\n :type notification_template: str\n :param status: type of notification this notification template should be associated to.\n :type status: str\n :returns: Dictionary of only one key \"changed\", which indicates whether the association succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3361:c1:m3"} {"signature": "@resources.command(use_fields_as_options=False)@click.option('', type=types.Related(''))@click.option('',type=types.Related(''))@click.option('', type=click.Choice(['', '', '']),required=False, default='', help='''')def disassociate_notification_template(self, workflow,notification_template, status):", "body": "return self._disassoc('' % status,workflow, notification_template)", "docstring": "Disassociate a notification template from this workflow.\n\n =====API DOCS=====\n Disassociate a notification template from this workflow job template.\n\n :param job_template: The workflow job template to disassociate from.\n :type job_template: str\n :param notification_template: The notification template to be disassociated.\n :type notification_template: str\n :param status: type of notification this notification template should be disassociated from.\n :type status: str\n :returns: Dictionary of only one key \"changed\", which indicates whether the disassociation succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3361:c1:m4"} {"signature": "def read(self, *args, **kwargs):", "body": "if '' in kwargs:kwargs[''] = kwargs.pop('')r = super(Resource, self).read(*args, **kwargs)if '' in r:for d in r['']:self._promote_actor(d)else:self._promote_actor(d)return r", "docstring": "Do extra processing so we can display the actor field as\na top-level field", "id": "f3363:c0:m3"} {"signature": "@resources.command(use_fields_as_options=('', '', '', '', '', '', '','', '', '', '',))@click.option('', is_flag=True, default=False,help='''')@click.option('', is_flag=True, default=False,help='''')@click.option('', required=False, type=int,help='''''')def launch(self, monitor=False, wait=False, timeout=None, **kwargs):", "body": "r = client.get('')if '' not in r.json():raise exc.TowerCLIError('''''')self._pop_none(kwargs)debug.log('', header='')result = client.post(self.endpoint, data=kwargs)command = result.json()command_id = command['']if monitor:return self.monitor(command_id, timeout=timeout)elif wait:return self.wait(command_id, timeout=timeout)answer = OrderedDict((('', True),('', command_id),))answer.update(result.json())return answer", "docstring": "Launch a new ad-hoc command.\n\n Runs a user-defined command from Ansible Tower, immediately starts it,\n and returns back an ID in order for its status to be monitored.\n\n =====API DOCS=====\n Launch a new ad-hoc command.\n\n :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched command rather\n than exiting with a success.\n :type monitor: bool\n :param wait: Flag that if set, monitor the status of the job, but do not print while job is in progress.\n :type wait: bool\n :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number\n of seconds.\n :type timeout: int\n :param `**kwargs`: Fields needed to create and launch an ad hoc command.\n :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait``\n call if ``wait`` flag is on; dictionary of \"id\" and \"changed\" if none of the two flags are on.\n :rtype: dict\n :raises tower_cli.exceptions.TowerCLIError: When ad hoc commands are not available in Tower backend.\n\n =====API DOCS=====", "id": "f3365:c0:m0"} {"signature": "@resources.command(use_fields_as_options=False)@click.option('', type=types.Related(''))@click.option('',type=types.Related(''))@click.option('', type=click.Choice(['', '', '']),required=False, default='', help='''')def disassociate_notification_template(self, job_template,notification_template, status):", "body": "return self._disassoc('' % status,job_template, notification_template)", "docstring": "Disassociate a notification template from this job template.\n\n =====API DOCS=====\n Disassociate a notification template from this job template.\n\n :param job_template: The job template to disassociate from.\n :type job_template: str\n :param notification_template: The notification template to be disassociated.\n :type notification_template: str\n :param status: type of notification this notification template should be disassociated from.\n :type status: str\n :returns: Dictionary of only one key \"changed\", which indicates whether the disassociation succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3368:c0:m4"} {"signature": "@resources.command(use_fields_as_options=(''))@click.option('', help='''')def callback(self, pk=None, host_config_key='', extra_vars=None):", "body": "url = self.endpoint + '' % pkif not host_config_key:host_config_key = client.get(url).json()['']post_data = {'': host_config_key}if extra_vars:post_data[''] = parser.process_extra_vars(list(extra_vars), force_json=True)r = client.post(url, data=post_data, auth=None)if r.status_code == :return {'': True}", "docstring": "Contact Tower and request a configuration update using this job template.\n\n =====API DOCS=====\n Contact Tower and request a provisioning callback using this job template.\n\n :param pk: Primary key of the job template to run provisioning callback against.\n :type pk: int\n :param host_config_key: Key string used to authenticate the callback host.\n :type host_config_key: str\n :param extra_vars: Extra variables that are passed to provisioning callback.\n :type extra_vars: array of str\n :returns: A dictionary of a single key \"changed\", which indicates whether the provisioning callback\n is successful.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3368:c0:m5"} {"signature": "def command(method=None, **kwargs):", "body": "def actual_decorator(method):method._cli_command = Truemethod._cli_command_attrs = kwargsreturn methodif method and isinstance(method, types.FunctionType):return actual_decorator(method)else:return actual_decorator", "docstring": "Mark this method as a CLI command.\n\n This will only have any meaningful effect in methods that are members of a\n Resource subclass.", "id": "f3374:m0"} {"signature": "@resources.command(use_fields_as_options=False)def get(self, pk):", "body": "try:return next(s for s in self.list()[''] if s[''] == pk)except StopIteration:raise exc.NotFound('')", "docstring": "Return one and exactly one object\n\n =====API DOCS=====\n Return one and exactly one Tower setting.\n\n :param pk: Primary key of the Tower setting to retrieve\n :type pk: int\n :returns: loaded JSON of the retrieved Tower setting object.\n :rtype: dict\n :raises tower_cli.exceptions.NotFound: When no specified Tower setting exists.\n\n =====API DOCS=====", "id": "f3376:c0:m1"} {"signature": "def __getattribute__(self, name):", "body": "if name in ['', '']:raise AttributeErrorelse:return object.__getattribute__(self, name)", "docstring": "Disable inherited methods that cannot be applied to this\n particular resource.", "id": "f3376:c0:m5"} {"signature": "@resources.command(use_fields_as_options=False)@click.argument('')@click.argument('', default=None, required=False,type=types.Variables())def modify(self, setting, value):", "body": "prev_value = new_value = self.get(setting)['']answer = OrderedDict()encrypted = '' in six.text_type(prev_value)if encrypted or six.text_type(prev_value) != six.text_type(value):if setting == '':r = client.post('',data=self.coerce_type(setting, value))new_value = r.json()else:r = client.patch(self.endpoint,data={setting: self.coerce_type(setting, value)})new_value = r.json()[setting]answer.update(r.json())changed = encrypted or (prev_value != new_value)answer.update({'': changed,'': setting,'': new_value,})return answer", "docstring": "Modify an already existing object.\n\n Positional argument SETTING is the setting name and VALUE is its value,\n which can be provided directly or obtained from a file name if prefixed with '@'.\n\n =====API DOCS=====\n Modify an already existing Tower setting.\n\n :param setting: The name of the Tower setting to be modified.\n :type setting: str\n :param value: The new value of the Tower setting.\n :type value: str\n :returns: A dictionary combining the JSON output of the modified resource, as well as two extra fields:\n \"changed\", a flag indicating if the resource is successfully updated; \"id\", an integer which\n is the primary key of the updated object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3376:c0:m2"} {"signature": "def jt_aggregate(func, is_create=False, has_pk=False):", "body": "def helper(kwargs, obj):\"\"\"\"\"\"unified_job_template = Nonefor item in UNIFIED_JT:if kwargs.get(item, None) is not None:jt_id = kwargs.pop(item)if unified_job_template is None:unified_job_template = (item, jt_id)else:raise exc.UsageError('''')if unified_job_template is not None:kwargs[''] = unified_job_template[]obj.identity = tuple(list(obj.identity) + [''])return ''.join([UNIFIED_JT[unified_job_template[]],str(unified_job_template[]), ''])elif is_create:raise exc.UsageError('''')def decorator_without_pk(obj, *args, **kwargs):old_endpoint = obj.endpointnew_endpoint = helper(kwargs, obj)if is_create:obj.endpoint = new_endpointresult = func(obj, *args, **kwargs)obj.endpoint = old_endpointreturn resultdef decorator_with_pk(obj, pk=None, *args, **kwargs):old_endpoint = obj.endpointnew_endpoint = helper(kwargs, obj)if is_create:obj.endpoint = new_endpointresult = func(obj, pk=pk, *args, **kwargs)obj.endpoint = old_endpointreturn resultdecorator = decorator_with_pk if has_pk else decorator_without_pkfor item in CLICK_ATTRS:setattr(decorator, item, getattr(func, item, []))decorator.__doc__ = func.__doc__return decorator", "docstring": "Decorator to aggregate unified_jt-related fields.\n\n Args:\n func: The CURD method to be decorated.\n is_create: Boolean flag showing whether this method is create.\n has_pk: Boolean flag showing whether this method uses pk as argument.\n\n Returns:\n A function with necessary click-related attributes whose keyworded\n arguments are aggregated.\n\n Raises:\n exc.UsageError: Either more than one unified jt fields are\n provided, or none is provided when is_create flag is set.", "id": "f3379:m0"} {"signature": "@resources.commanddef summary(self):", "body": "pass", "docstring": "Placeholder to get swapped out for `stdout`.\n\n =====API DOCS=====\n foobar\n =====API DOCS=====", "id": "f3380:c0:m2"} {"signature": "@resources.command(use_fields_as_options=('', '', '', '', '','', '', '', '','', '', ''))def modify(self, pk=None, create_on_missing=False, **kwargs):", "body": "if '' in kwargs and '' not in kwargs:kwargs[''] = kwargs.pop('')return super(Resource, self).write(pk, create_on_missing=create_on_missing,force_on_exists=True, **kwargs)", "docstring": "Modify an already existing.\n\n To edit the project's organizations, see help for organizations.\n\n Fields in the resource's `identity` tuple can be used in lieu of a\n primary key for a lookup; in such a case, only other fields are\n written.\n\n To modify unique fields, you must use the primary key for the lookup.\n\n =====API DOCS=====\n Modify an already existing project.\n\n :param pk: Primary key of the resource to be modified.\n :type pk: int\n :param create_on_missing: Flag that if set, a new object is created if ``pk`` is not set and objects\n matching the appropriate unique criteria is not found.\n :type create_on_missing: bool\n :param `**kwargs`: Keyword arguments which, all together, will be used as PATCH body to modify the\n resource object. if ``pk`` is not set, key-value pairs of ``**kwargs`` which are\n also in resource's identity will be used to lookup existing reosource.\n :returns: A dictionary combining the JSON output of the modified resource, as well as two extra fields:\n \"changed\", a flag indicating if the resource is successfully updated; \"id\", an integer which\n is the primary key of the updated object.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3383:c0:m1"} {"signature": "@resources.command(use_fields_as_options=('', ''))@click.option('', is_flag=True, default=False,help='''')@click.option('', is_flag=True, default=False,help='')@click.option('', required=False, type=int,help='''''')def update(self, pk=None, create_on_missing=False, monitor=False,wait=False, timeout=None, name=None, organization=None):", "body": "project = self.get(pk, name=name, organization=organization)pk = project['']debug.log('',header='')result = client.get('' % pk)if not result.json()['']:raise exc.CannotStartJob('')debug.log('', header='')result = client.post('' % pk)project_update_id = result.json()['']if monitor:return self.monitor(project_update_id, parent_pk=pk,timeout=timeout)elif wait:return self.wait(project_update_id, parent_pk=pk, timeout=timeout)return {'': project_update_id,'': True,}", "docstring": "Trigger a project update job within Ansible Tower.\n Only meaningful on non-manual projects.\n\n =====API DOCS=====\n Update the given project.\n\n :param pk: Primary key of the project to be updated.\n :type pk: int\n :param monitor: Flag that if set, immediately calls ``monitor`` on the newly launched project update\n rather than exiting with a success.\n :type monitor: bool\n :param wait: Flag that if set, monitor the status of the project update, but do not print while it is\n in progress.\n :type wait: bool\n :param timeout: If provided with ``monitor`` flag set, this attempt will time out after the given number\n of seconds.\n :type timeout: int\n :param name: Name of the project to be updated if ``pk`` is not set.\n :type name: str\n :param organization: Primary key or name of the organization the project to be updated belonging to if\n ``pk`` is not set.\n :type organization: str\n :returns: Result of subsequent ``monitor`` call if ``monitor`` flag is on; Result of subsequent ``wait``\n call if ``wait`` flag is on; dictionary of \"status\" if none of the two flags are on.\n :rtype: dict\n :raises tower_cli.exceptions.CannotStartJob: When the project cannot be updated.\n\n =====API DOCS=====", "id": "f3383:c0:m2"} {"signature": "@resources.command@click.option('', is_flag=True, default=False,help='')def status(self, pk=None, detail=False, **kwargs):", "body": "job = self.last_job_data(pk, **kwargs)if detail:return jobreturn {'': job[''],'': job[''],'': job[''],}", "docstring": "Print the status of the most recent update.\n\n =====API DOCS=====\n Print the status of the most recent update.\n\n :param pk: Primary key of the resource to retrieve status from.\n :type pk: int\n :param detail: Flag that if set, return the full JSON of the job resource rather than a status summary.\n :type detail: bool\n :param `**kwargs`: Keyword arguments used to look up resource object to retrieve status from if ``pk``\n is not provided.\n :returns: full loaded JSON of the specified unified job if ``detail`` flag is on; trimed JSON containing\n only \"elapsed\", \"failed\" and \"status\" fields of the unified job if ``detail`` flag is off.\n :rtype: dict\n =====API DOCS=====", "id": "f3383:c0:m3"} {"signature": "@resources.command@unified_job_template_options@click.argument('', type=types.Related(''))@click.argument('', type=types.Related(''), required=False)def associate_always_node(self, parent, child=None, **kwargs):", "body": "return self._assoc_or_create('', parent, child, **kwargs)", "docstring": "Add a node to always run after the parent is finished.\n\n =====API DOCS=====\n Add a node to always run after the parent is finished.\n\n :param parent: Primary key of parent node to associate always node to.\n :type parent: int\n :param child: Primary key of child node to be associated.\n :type child: int\n :param `**kwargs`: Fields used to create child node if ``child`` is not provided.\n :returns: Dictionary of only one key \"changed\", which indicates whether the association succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3384:c0:m10"} {"signature": "@resources.command(use_fields_as_options=False)@click.argument('', type=types.Related(''))@click.argument('', type=types.Related(''))def disassociate_failure_node(self, parent, child):", "body": "return self._disassoc(self._forward_rel_name(''), parent, child)", "docstring": "Remove a failure node link.\n The resulatant 2 nodes will both become root nodes.\n\n =====API DOCS=====\n Remove a failure node link.\n\n :param parent: Primary key of parent node to disassociate failure node from.\n :type parent: int\n :param child: Primary key of child node to be disassociated.\n :type child: int\n :returns: Dictionary of only one key \"changed\", which indicates whether the disassociation succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3384:c0:m9"} {"signature": "def _parent_filter(self, parent, relationship, **kwargs):", "body": "if parent is None or relationship is None:return {}parent_filter_kwargs = {}query_params = ((self._reverse_rel_name(relationship), parent),)parent_filter_kwargs[''] = query_paramsif kwargs.get('', None) is None:parent_data = self.read(pk=parent)[''][]parent_filter_kwargs[''] = parent_data['']return parent_filter_kwargs", "docstring": "Returns filtering parameters to limit a search to the children\nof a particular node by a particular relationship.", "id": "f3384:c0:m3"} {"signature": "@resources.command(use_fields_as_options=False)@click.argument('', type=types.Related(''))@click.argument('', type=types.Related(''))def disassociate_always_node(self, parent, child):", "body": "return self._disassoc(self._forward_rel_name(''), parent, child)", "docstring": "Remove an always node link.\n The resultant 2 nodes will both become root nodes.\n\n =====API DOCS=====\n Remove an always node link.\n\n :param parent: Primary key of parent node to disassociate always node from.\n :type parent: int\n :param child: Primary key of child node to be disassociated.\n :type child: int\n :returns: Dictionary of only one key \"changed\", which indicates whether the disassociation succeeded.\n :rtype: dict\n\n =====API DOCS=====", "id": "f3384:c0:m11"} {"signature": "@resources.command(ignore_defaults=True)def list_facts(self, pk=None, **kwargs):", "body": "res = self.get(pk=pk, **kwargs)url = self.endpoint + '' % (res[''], '')return client.get(url, params={}).json()", "docstring": "Return a JSON object of all available facts of the given host.\n\n Note global option --format is not available here, as the output would always be JSON-formatted.\n\n =====API DOCS=====\n List all available facts of the given host.\n\n :param pk: Primary key of the target host.\n :type pk: int\n :param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.\n :returns: A JSON object of all available facts of the given host.\n :rtype: dict\n =====API DOCS=====", "id": "f3385:c0:m1"} {"signature": "def register_get(t):", "body": "t.register_json('',{'': , '': , '': '','': , '': ,}, method='')", "docstring": "After starting job, the launch method may grab info about\n the job just launched from this endpoint", "id": "f3399:m0"} {"signature": "def jt_vars_registration(t, extra_vars):", "body": "t.register_json('', {'': True,'': extra_vars,'': ,'': '','': {'': ''},})register_get(t)t.register_json('', {'': ''}, method='')t.register_json('', {}, method='')t.register_json('', {'': },method='')", "docstring": "Endpoints that are needed to get information from job template.\n This particular combination also entails\n 1) version of Tower - 2.2.0\n 2) successful job launch, id=42\n 3) prompts user for variables on launch", "id": "f3399:m2"} {"signature": "def setUp(self):", "body": "class BasicResource(models.Resource):endpoint = ''name = models.Field(unique=True)self.resource = BasicResource()self.command = ResSubcommand(self.resource)", "docstring": "Install a resource instance sufficient for testing common\n things with subcommands.", "id": "f3414:c0:m0"} {"signature": "def parse_requirements(filename):", "body": "reqs = []version_spec_in_play = Nonefor line in open(filename, '').read().strip().split(''):if not line.strip():continueif not line.startswith(''):reqs.append(line)continuematch = re.search(r''r'', line)if match:version_spec_in_play = match.groupdict()for key in ('', ''):version_spec_in_play[key] = int(version_spec_in_play[key])continueif '' not in line[:].strip() and version_spec_in_play:package = line[:].strip()op = version_spec_in_play['']vspec = (version_spec_in_play[''],version_spec_in_play[''])if '' in op and sys.version_info[:] == vspec:reqs.append(package)elif '>' in op and sys.version_info[:] > vspec:reqs.append(package)elif '' in op and sys.version_info[:] < vspec:reqs.append(package)return reqs", "docstring": "Parse out a list of requirements from the given requirements\n requirements file.", "id": "f3422:m0"} {"signature": "@classmethoddef keys(cls):", "body": "return cls._item_dict.keys()", "docstring": "Returns all of the Enum keys", "id": "f3426:c2:m5"} {"signature": "@classmethoddef lookup(cls, key, get=False):", "body": "if get:item = cls._item_dict.get(key)return item.name if item else keyreturn cls._item_dict[key].name", "docstring": "Returns the label for a given Enum key", "id": "f3426:c2:m4"} {"signature": "@classmethoddef verbose(cls, key=False, default=''):", "body": "if key is False:items = cls._item_dict.values()return [(x.key, x.value) for x in sorted(items, key=lambda x:x.sort or x.key)]item = cls._item_dict.get(key)return item.value if item else default", "docstring": "Returns the verbose name for a given enum value", "id": "f3426:c2:m8"} {"signature": "@classmethoddef values(cls):", "body": "return [x.name for x in cls._item_dict.values()]", "docstring": "Returns all of the Enum values", "id": "f3426:c2:m6"} {"signature": "def balls(timeout=timeout):", "body": "rc = requests.get(messages_url, timeout=timeout)rc.encoding = '' rc = rc.textdata = re.findall('', rc)balls = {}for i in data:balls[int(i[])] = i[]return balls", "docstring": "Return all balls in dict {id0: ball0, id1: ball1}.", "id": "f3437:m6"} {"signature": "@propertydef stadiums(self):", "body": "if not self._stadiums:self._stadiums = stadiums()return self._stadiums", "docstring": "Return all stadiums in dict {id0: stadium0, id1: stadium1}.\n\n :params year: Year.", "id": "f3437:c0:m11"} {"signature": "def leagues(year=, timeout=timeout):", "body": "rc = requests.get(messages_url, timeout=timeout)rc.encoding = '' rc = rc.textdata = re.findall('' % year, rc)leagues = {}for i in data:leagues[int(i[])] = i[]return leagues", "docstring": "Return all leagues in dict {id0: league0, id1: legaue1}.\n\n :params year: Year.", "id": "f3437:m3"} {"signature": "def bid(self, trade_id, bid, fast=False):", "body": "method = ''url = '' % trade_idif not fast:rc = self.tradeStatus(trade_id)[]if rc[''] >= bid or self.credits < bid:return False data = {'': bid}try:rc = self.__request__(method, url, data=json.dumps(data), params={'': self.sku_b}, fast=fast)[''][]except PermissionDenied: return Falseif rc[''] == '' or (rc[''] == '' and rc[''] == ''): return Trueelse:return False", "docstring": "Make a bid.\n\n :params trade_id: Trade id.\n :params bid: Amount of credits You want to spend.\n :params fast: True for fastest bidding (skips trade status & credits check).", "id": "f3437:c0:m18"} {"signature": "def clubStaff(self):", "body": "method = ''url = ''rc = self.__request__(method, url)return rc", "docstring": "Return staff in your club.", "id": "f3437:c0:m20"} {"signature": "def search(self, ctype, level=None, category=None, assetId=None, defId=None,min_price=None, max_price=None, min_buy=None, max_buy=None,league=None, club=None, position=None, zone=None, nationality=None,rare=False, playStyle=None, start=, page_size=itemsPerPage[''],fast=False):", "body": "method = ''url = ''if start == :events = [self.pin.event('', ''), self.pin.event('', '')]self.pin.send(events, fast=fast)params = {'': start,'': page_size,'': ctype, }if level:params[''] = levelif category:params[''] = categoryif assetId:params[''] = assetIdif defId:params[''] = defIdif min_price:params[''] = min_priceif max_price:params[''] = max_priceif min_buy:params[''] = min_buyif max_buy:params[''] = max_buyif league:params[''] = leagueif club:params[''] = clubif position:params[''] = positionif zone:params[''] = zoneif nationality:params[''] = nationalityif rare:params[''] = ''if playStyle:params[''] = playStylerc = self.__request__(method, url, params=params, fast=fast)if start == :events = [self.pin.event('', ''), self.pin.event('', '')]self.pin.send(events, fast=fast)return [itemParse(i) for i in rc.get('', ())]", "docstring": "Prepare search request, send and return parsed data as a dict.\n\n :param ctype: [development / ? / ?] Card type.\n :param level: (optional) [?/?/gold] Card level.\n :param category: (optional) [fitness/?/?] Card category.\n :param assetId: (optional) Asset id.\n :param defId: (optional) Definition id.\n :param min_price: (optional) Minimal price.\n :param max_price: (optional) Maximum price.\n :param min_buy: (optional) Minimal buy now price.\n :param max_buy: (optional) Maximum buy now price.\n :param league: (optional) League id.\n :param club: (optional) Club id.\n :param position: (optional) Position.\n :param nationality: (optional) Nation id.\n :param rare: (optional) [boolean] True for searching special cards.\n :param playStyle: (optional) Play style.\n :param start: (optional) Start page sent to server so it supposed to be 12/15, 24/30 etc. (default platform page_size*n)\n :param page_size: (optional) Page size (items per page).", "id": "f3437:c0:m16"} {"signature": "def stadiums(year=, timeout=timeout):", "body": "rc = requests.get(messages_url, timeout=timeout)rc.encoding = '' rc = rc.textdata = re.findall('' % year, rc)stadiums = {}for i in data:stadiums[int(i[])] = i[]return stadiums", "docstring": "Return all stadium in dict {id0: stadium0, id1: stadium1}.\n\n :params year: Year.", "id": "f3437:m5"} {"signature": "def __sendToPile__(self, pile, trade_id=None, item_id=None):", "body": "method = ''url = ''if not isinstance(item_id, (list, tuple)):item_id = (item_id,)data = {\"\": [{'': pile, '': str(i)} for i in item_id]}rc = self.__request__(method, url, data=json.dumps(data))if rc[''][]['']:self.logger.info(\"\".format(trade_id, item_id, pile))else:self.logger.error(\"\".format(trade_id, item_id, pile,rc[''][]['']))return rc[''][]['']", "docstring": "Send to pile.\n\n :params trade_id: (optional?) Trade id.\n :params item_id: Iteam id.", "id": "f3437:c0:m4"} {"signature": "def sendToSbs(self, challenge_id, item_id):", "body": "method = ''url = '' % challenge_idsquad = self.sbsSquad(challenge_id)players = []moved = Falsen = for i in squad['']['']:if i[''][''] == item_id: return Falseif i[''][''] == and not moved:i[''][''] = item_idmoved = Trueplayers.append({\"\": n,\"\": {\"\": i[''][''],\"\": False}})n += data = {'': players}if not moved:return Falseelse:self.__request__(method, url, data=json.dumps(data))return True", "docstring": "Send card FROM CLUB to first free slot in sbs squad.", "id": "f3437:c0:m35"} {"signature": "def tradepileClear(self):", "body": "method = ''url = ''self.__request__(method, url)", "docstring": "Removes all sold items from tradepile.", "id": "f3437:c0:m31"} {"signature": "def baseId(self, *args, **kwargs):", "body": "return baseId(*args, **kwargs)", "docstring": "Calculate base id and version from a resource id.", "id": "f3437:c0:m13"} {"signature": "def clubConsumables(self, fast=False):", "body": "method = ''url = ''rc = self.__request__(method, url)events = [self.pin.event('', '')]self.pin.send(events, fast=fast)events = [self.pin.event('', '')]self.pin.send(events, fast=fast)events = [self.pin.event('', '')]self.pin.send(events, fast=fast)return [itemParse(i) for i in rc.get('', ())]", "docstring": "Return all consumables from club.", "id": "f3437:c0:m21"} {"signature": "@propertydef playstyles(self, year=):", "body": "if not self._playstyles:self._playstyles = playstyles()return self._playstyles", "docstring": "Return all playstyles in dict {id0: playstyle0, id1: playstyle1}.\n\n :params year: Year.", "id": "f3437:c0:m7"} {"signature": "@propertydef teams(self, year=):", "body": "if year not in self._teams:self._teams[year] = teams(year)return self._teams[year]", "docstring": "Return all teams in dict {id0: team0, id1: team1}.\n\n :params year: Year.", "id": "f3437:c0:m10"} {"signature": "def tradepile(self):", "body": "method = ''url = ''rc = self.__request__(method, url)events = [self.pin.event('', ''), self.pin.event('', '')]if rc.get(''):events.append(self.pin.event('', ''))self.pin.send(events)return [itemParse(i) for i in rc.get('', ())]", "docstring": "Return items in tradepile.", "id": "f3437:c0:m24"} {"signature": "def saveSession(self):", "body": "if self.cookies_file:self.r.cookies.save(ignore_discard=True)with open(self.token_file, '') as f:f.write('' % (self.token_type, self.access_token))", "docstring": "Save cookies/session.", "id": "f3437:c0:m12"} {"signature": "def messages(self):", "body": "method = ''url = ''rc = self.__request__(method, url)return rc['']", "docstring": "Return active messages.", "id": "f3437:c0:m40"} {"signature": "def cardInfo(self, resource_id):", "body": "base_id = baseId(resource_id)if base_id in self.players:return self.players[base_id]else: url = ''.format(card_info_url, base_id)return requests.get(url, timeout=self.timeout).json()", "docstring": "Return card info.\n\n :params resource_id: Resource id.", "id": "f3437:c0:m14"} {"signature": "def watchlistDelete(self, trade_id):", "body": "method = ''url = ''if not isinstance(trade_id, (list, tuple)):trade_id = (trade_id,)trade_id = (str(i) for i in trade_id)params = {'': ''.join(trade_id)}self.__request__(method, url, params=params) return True", "docstring": "Remove cards from watchlist.\n\n :params trade_id: Trade id.", "id": "f3437:c0:m29"} {"signature": "def playstyles(year=, timeout=timeout):", "body": "rc = requests.get(messages_url, timeout=timeout)rc.encoding = '' rc = rc.textdata = re.findall('' % year, rc)playstyles = {}for i in data:playstyles[int(i[])] = i[]return playstyles", "docstring": "Return all playstyles in dict {id0: playstyle0, id1: playstyle1}.\n\n :params year: Year.", "id": "f3437:m8"} {"signature": "def build_options(self):", "body": "return None", "docstring": "Default value for optional column configuration. Only child\nclasses will have non-None values for this field. Value\ndepends on child class implementation.\n\nNote: Some child classes do not require any extra options\n(e.g. OrganicChemicalFormula), in that case, this implementation\nwill be invoked and no options will be present in the dictionary.\n\n:return: Options dictionary, or None if not implemented in child\n:rtype: dict or None", "id": "f3455:c0:m2"} {"signature": "def to_dict(self):", "body": "return {\"\": self.type,\"\": self.name,\"\": self.group_by_key,\"\": self.role,\"\": self.units,\"\": self.build_options()}", "docstring": "Converts the column to a dictionary representation accepted\nby the Citrination server.\n\n:return: Dictionary with basic options, plus any column type specific\n options held under the \"options\" key\n:rtype: dict", "id": "f3455:c0:m1"} {"signature": "def __init__(self, name, role, group_by_key=False, units=None, length=None):", "body": "super(VectorColumn, self).__init__(name=name,role=role,group_by_key=group_by_key,units=units)self.length = length", "docstring": "Constructor.\n\n:param name: The name of the column\n:type name: str\n:param role: The role the column will play in machine learning:\n \"Input\"\n \"Output\"\n \"Latent Variable\"\n \"Ignore\"\n:type role: str\n:param group_by_key: Whether or not this column should be used for\n grouping during cross validation\n:type group_by_key: bool\n:param units: Optionally, the units for this column\n:type units: str\n:param length: The length of vectors in this column\n:type length: int", "id": "f3457:c0:m0"} {"signature": "def __init__(self, key, value, loss=None):", "body": "self._key = keyself._value = valueself._loss = loss", "docstring": "Constructor.\n\n:param key: The descriptor key for the prediction\n:type key: str\n:param value: The predicted value\n:type value: str or float\n:param loss: The loss for the prediction\n:type loss: float", "id": "f3467:c0:m0"} {"signature": "def get_projection(self, key):", "body": "return self._projections.get(key)", "docstring": "Retrieves the projection registered under a particular\ndescriptor key.\n\n:param key: A descriptor key\n:return: A :class:`Projection`", "id": "f3468:c0:m3"} {"signature": "def projections(self):", "body": "return self._projections.keys()", "docstring": "List the descriptor keys with registered projections.\n\n:return: List of descriptor keys", "id": "f3468:c0:m2"} {"signature": "def add_projection(self, key, projection):", "body": "self._projections[key] = projection", "docstring": "Register a projection under a descriptor key.\n\n:param key: The descriptor key for the projection\n:type key: str\n:param projection: The projection for the provided descriptor key\n:type projection: :class:`Projection`", "id": "f3468:c0:m1"} {"signature": "def check_predict_status(self, view_id, predict_request_id):", "body": "failure_message = \"\"bare_response = self._get_success_json(self._get('' + str(view_id) + '' + str(predict_request_id) + '',None, failure_message=failure_message))result = bare_response[\"\"]return result", "docstring": "Returns a string indicating the status of the prediction job\n\n:param view_id: The data view id returned from data view create\n:param predict_request_id: The id returned from predict\n:return: Status data, also includes results if state is finished", "id": "f3469:c0:m9"} {"signature": "def predict(self, data_view_id, candidates, method=\"\", use_prior=True):", "body": "uid = self.submit_predict_request(data_view_id, candidates, method, use_prior)while self.check_predict_status(data_view_id, uid)[''] not in [\"\", \"\", \"\"]:time.sleep()result = self.check_predict_status(data_view_id, uid)if result[\"\"] == \"\":paired = zip(result[\"\"][\"\"], result[\"\"][\"\"])prediction_result_format = [{k: (p[][k], p[][k]) for k in p[].keys()} for p in paired]return list(map(lambda c: _get_prediction_result_from_candidate(c), prediction_result_format))else:raise RuntimeError(\"\".format(uid, result[\"\"]))", "docstring": "Predict endpoint. This simply wraps the async methods (submit and poll for status/results).\n\n:param data_view_id: The ID of the data view to use for prediction\n:type data_view_id: str\n:param candidates: A list of candidates to make predictions on\n:type candidates: list of dicts\n:param method: Method for propagating predictions through model graphs. \"scalar\" uses linearized uncertainty\npropagation, whereas \"scalar_from_distribution\" still returns scalar predictions but uses sampling to\npropagate uncertainty without a linear approximation.\n:type method: str (\"scalar\" or \"scalar_from_distribution\")\n:param use_prior: Whether to apply prior values implied by the property descriptors\n:type use_prior: bool\n:return: The results of the prediction\n:rtype: list of :class:`PredictionResult`", "id": "f3469:c0:m2"} {"signature": "def _data_analysis(self, data_view_id):", "body": "failure_message = \"\".format(data_view_id)return self._get_success_json(self._get(routes.data_analysis(data_view_id), failure_message=failure_message))", "docstring": "Data analysis endpoint.\n\n:param data_view_id: The model identifier (id number for data views)\n:type data_view_id: str\n:return: dictionary containing information about the data, e.g. dCorr and tsne", "id": "f3469:c0:m6"} {"signature": "def get_data_view(self, data_view_id):", "body": "url = routes.get_data_view(data_view_id)response = self._get(url).json()result = response[\"\"][\"\"]datasets_list = []for dataset in result[\"\"]:datasets_list.append(Dataset(name=dataset[\"\"],id=dataset[\"\"],description=dataset[\"\"]))columns_list = []for column in result[\"\"]:columns_list.append(ColumnFactory.from_dict(column))return DataView(view_id=data_view_id,name=result[\"\"],description=result[\"\"],datasets=datasets_list,columns=columns_list,)", "docstring": "Retrieves a summary of information for a given data view\n - view id\n - name\n - description\n - columns\n\n:param data_view_id: The ID number of the data view to which the\n run belongs, as a string\n:type data_view_id: str", "id": "f3469:c0:m13"} {"signature": "def get_design_run_status(self, data_view_id, run_uuid):", "body": "url = routes.get_data_view_design_status(data_view_id, run_uuid)response = self._get(url).json()status = response[\"\"]return ProcessStatus(result=status.get(\"\"),progress=status.get(\"\"),status=status.get(\"\"),messages=status.get(\"\"))", "docstring": "Retrieves the status of an in progress or completed design run\n\n:param data_view_id: The ID number of the data view to which the\n run belongs, as a string\n:type data_view_id: str\n:param run_uuid: The UUID of the design run to retrieve status for\n:type run_uuid: str\n:return: A :class:`ProcessStatus` object", "id": "f3469:c0:m11"} {"signature": "def is_ready(self):", "body": "return self.ready == True", "docstring": "Indicates whether or not the service is ready to be used.\n\n:return: A boolean\n:rtype: bool", "id": "f3473:c0:m14"} {"signature": "def __init__(self, xs, ys, responses, tags, uids):", "body": "self._xs = xsself._ys = ysself._responses = responsesself._tags = tagsself._uids = uids", "docstring": "Constructor.\n\n:param xs: A list of x values of the projection.\n:type xs: list of floats\n:param ys: A list of y values of the projection.\n:type ys: list of floats\n:param responses: A list of z values of the projection.\n:type responses: list of floats\n:param tags: A list of tags for the projected points\n:type tags: list of strings\n:param uids: A list of record UIDs for the projected points\n:type uids: list of strings", "id": "f3474:c0:m0"} {"signature": "def get_data_view_status(data_view_id):", "body": "return \"\".format(data_view_id)", "docstring": "URL for retrieving the statuses of all services\nassociated with a data view.\n\n:param data_view_id: The ID of the desired data views\n:type data_view_id: str", "id": "f3475:m7"} {"signature": "def __init__(self, title, normalized_progress, subtitle=None, subevent=None):", "body": "self._title = titleself._subtitle = subtitleself._subevent = subeventself._normalized_progress = normalized_progress", "docstring": "Constructor.\n\n:param title: The title of the event\n:type title: str\n:param subtitle: More detail about the event\n:type subtitle: str\n:param subevent: An event object describing the current state of the service's\n progress toward readiness\n:type subevent: Event\n:param normalized_progress: The fractional representation of the status of the event\n:type normalized_progress: float", "id": "f3476:c0:m0"} {"signature": "def get_value(self, key):", "body": "try:return self._values[key]except KeyError:return None", "docstring": "Retrieves a predicted value.\n\n:param key: A descriptor key for a registered predicted value.\n:type key: str\n:return: The value stored at the provided descriptor key. None if no key is provided.\n:rtype: :class:`PredictedValue`", "id": "f3477:c0:m2"} {"signature": "def __init__(self, result, progress, status, messages=None):", "body": "self._status = statusself._result = resultself._progress = progressself._messages = messages", "docstring": "Constructor.\n\n:param result: The result of the process\n:type result: any\n:param progress: The progress of the process as as percentage\n:type progress: int\n:param status: The status string for the process\n:type status: str\n:param messages: A list of messages representing the steps the process\n has already progressed through\n:type messages: list of str", "id": "f3478:c0:m0"} {"signature": "def __init__(self, uuid):", "body": "self._uuid = uuid", "docstring": "Constructor.\n\n:param uuid: The UUID of an in progress design run.\n:type uuid: str", "id": "f3480:c0:m0"} {"signature": "def __init__(self, best_materials, next_experiments):", "body": "self._best_materials = best_materialsself._next_experiments = next_experiments", "docstring": "Constructor.\n\n:param best_materials: An array of candidate dictionaries\n:type best_materials: list of dictionaries\n:param next_experiments: An array of candidate dictionaries\n:type next_experiments: list of dictionaries", "id": "f3481:c0:m0"} {"signature": "def __init__(self, name, accepted_categories):", "body": "self._type = \"\"self._name = nameself._categories = accepted_categories", "docstring": "Constructor.\n\n:param name: The name of the column in the data\n view to which this constraint should be applied\n:type name: str\n:param accepted_categories: An array of categories to constrain the name to\n:type accepted_categories: list of str", "id": "f3486:c0:m0"} {"signature": "def __init__(self, name, elements, minimum, maximum):", "body": "if not <= minimum <= :raise CitrinationClientError(\"\")if not <= maximum <= :raise CitrinationClientError(\"\")if not maximum >= minimum:raise CitrinationClientError(\"\")self._type = \"\"self._elements = elementsself._name = nameself._min = minimumself._max = maximum", "docstring": "Constructor.\n\n:param name: The name of the column in the data\n view to which this constraint should be applied\n:type name: str\n:param elements: An array of element abbreviations as\n strings, e.g. [\"Mg\", \"C\"]\n:type elements: list of str\n:param minimum: The minimum value (<= 100) as a percentage\n at which the specified elements should appear in\n candidate compositions\n:type minimum: float\n:param maximum: The maximum value (<= 100) as a percentage\n at which the specified elements should appear in\n candidate compositions\n:type maximum: float", "id": "f3490:c0:m0"} {"signature": "def __init__(self, name, value=None):", "body": "self._type = \"\" self._name = nameself._value = value", "docstring": "Constructor.\n\n:param name: The name of the column in the data\n view to which this constraint should be applied\n:type name: str\n:param value: The value the column should be constrained to\n:type value: float", "id": "f3491:c0:m0"} {"signature": "def _get_qualified_route(self, route):", "body": "return \"\".format(self.api_url, route)", "docstring": "Get a fully qualified api route.\n:param route: the route (e.g., /model)\n:return: the fully qualified route (e.g., https://citrination.com/model)", "id": "f3496:c0:m3"} {"signature": "def _delete(self, route, headers=None, failure_message=None):", "body": "headers = self._get_headers(headers)response_lambda = (lambda: requests.delete(self._get_qualified_route(route), headers=headers, verify=False, proxies=self.proxies))response = check_for_rate_limiting(response_lambda(), response_lambda)return self._handle_response(response, failure_message)", "docstring": "Execute a delete request and return the result\n:param headers:\n:return:", "id": "f3496:c0:m13"} {"signature": "def _post(self, route, data, headers=None, failure_message=None):", "body": "headers = self._get_headers(headers)response_lambda = (lambda: requests.post(self._get_qualified_route(route), headers=headers, data=data, verify=False, proxies=self.proxies))response = check_for_rate_limiting(response_lambda(), response_lambda)return self._handle_response(response, failure_message)", "docstring": "Execute a post request and return the result\n:param data:\n:param headers:\n:return:", "id": "f3496:c0:m8"} {"signature": "def _get(self, route, headers=None, failure_message=None):", "body": "headers = self._get_headers(headers)response_lambda = (lambda: requests.get(self._get_qualified_route(route), headers=headers, verify=False, proxies=self.proxies))response = check_for_rate_limiting(response_lambda(), response_lambda)return self._handle_response(response, failure_message)", "docstring": "Execute a post request and return the result\n:param headers:\n:return:", "id": "f3496:c0:m6"} {"signature": "def _put(self, route, data, headers=None, failure_message=None):", "body": "headers = self._get_headers(headers)response_lambda = (lambda: requests.put(self._get_qualified_route(route), headers=headers, data=data, verify=False, proxies=self.proxies))response = check_for_rate_limiting(response_lambda(), response_lambda)return self._handle_response(response, failure_message)", "docstring": "Execute a put request and return the result\n:param data:\n:param headers:\n:return:", "id": "f3496:c0:m10"} {"signature": "def create_dataset(self, name=None, description=None, public=False):", "body": "data = {\"\": _convert_bool_to_public_value(public)}if name:data[\"\"] = nameif description:data[\"\"] = descriptiondataset = {\"\": data}failure_message = \"\"result = self._get_success_json(self._post_json(routes.create_dataset(), dataset, failure_message=failure_message))return _dataset_from_response_dict(result)", "docstring": "Create a new data set.\n\n:param name: name of the dataset\n:type name: str\n:param description: description for the dataset\n:type description: str\n:param public: A boolean indicating whether or not the dataset should be public.\n:type public: bool\n:return: The newly created dataset.\n:rtype: :class:`Dataset`", "id": "f3511:c0:m9"} {"signature": "def download_files(self, dataset_files, destination=''):", "body": "if not isinstance(dataset_files, list):dataset_files = [dataset_files]for f in dataset_files:filename = f.path.lstrip('')local_path = os.path.join(destination, filename)if not os.path.isdir(os.path.dirname(local_path)):os.makedirs(os.path.dirname(local_path))r = requests.get(f.url, stream=True)with open(local_path, '') as output_file:shutil.copyfileobj(r.raw, output_file)", "docstring": "Downloads file(s) to a local destination.\n\n:param dataset_files:\n:type dataset_files: list of :class: `DatasetFile`\n:param destination: The path to the desired local download destination\n:type destination: str\n:param chunk: Whether or not to chunk the file. Default True\n:type chunk: bool", "id": "f3511:c0:m7"} {"signature": "def list_files(self, dataset_id, glob=\"\", is_dir=False):", "body": "data = {\"\": {\"\": glob,\"\": is_dir}}return self._get_success_json(self._post_json(routes.list_files(dataset_id), data, failure_message=\"\".format(dataset_id)))['']", "docstring": "List matched filenames in a dataset on Citrination.\n\n:param dataset_id: The ID of the dataset to search for files.\n:type dataset_id: int\n:param glob: A pattern which will be matched against files in the dataset.\n:type glob: str\n:param is_dir: A boolean indicating whether or not the pattern should match against the beginning of paths in the dataset.\n:type is_dir: bool\n:return: A list of filepaths in the dataset matching the provided glob.\n:rtype: list of strings", "id": "f3511:c0:m2"} {"signature": "def upload(self, dataset_id, source_path, dest_path=None):", "body": "upload_result = UploadResult()source_path = str(source_path)if not dest_path:dest_path = source_pathelse:dest_path = str(dest_path)if os.path.isdir(source_path):for path, subdirs, files in os.walk(source_path):relative_path = os.path.relpath(path, source_path)current_dest_prefix = dest_pathif relative_path is not \"\":current_dest_prefix = os.path.join(current_dest_prefix, relative_path)for name in files:current_dest_path = os.path.join(current_dest_prefix, name)current_source_path = os.path.join(path, name)try:if self.upload(dataset_id, current_source_path, current_dest_path).successful():upload_result.add_success(current_source_path)else:upload_result.add_failure(current_source_path,\"\")except (CitrinationClientError, ValueError) as e:upload_result.add_failure(current_source_path, str(e))return upload_resultelif os.path.isfile(source_path):file_data = { \"\": str(dest_path), \"\": str(source_path)}j = self._get_success_json(self._post_json(routes.upload_to_dataset(dataset_id), data=file_data))s3url = _get_s3_presigned_url(j)with open(source_path, '') as f:if os.stat(source_path).st_size == :data = \"\"else:data = fr = requests.put(s3url, data=data, headers=j[\"\"])if r.status_code == :data = {'': j[''][''], '': j['']}self._post_json(routes.update_file(j['']), data=data)upload_result.add_success(source_path)return upload_resultelse:raise CitrinationClientError(\"\".format(source_path))else:raise ValueError(\"\".format(source_path))", "docstring": "Upload a file, specifying source and dest paths a file (acts as the scp command).asdfasdf\n\n:param source_path: The path to the file on the source host asdf\n:type source_path: str\n:param dest_path: The path to the file where the contents of the upload will be written (on the dest host)\n:type dest_path: str\n:return: The result of the upload process\n:rtype: :class:`UploadResult`", "id": "f3511:c0:m1"} {"signature": "def successful(self):", "body": "return len(self._failures) == ", "docstring": "Indicates whether or not the entire upload was successful.\n\n:return: Whether or not the upload was successful\n:rtype: bool", "id": "f3514:c0:m3"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, id=None, is_featured=None,name=None, description=None, owner=None, email=None, updated_at=None, query=None, **kwargs):", "body": "self._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._simple = Noneself.simple = simpleself._simple_weight = Noneself.simple_weight = simple_weightself._id = Noneself.id = idself._is_featured = Noneself.is_featured = is_featuredself._name = Noneself.name = nameself._description = Noneself.description = descriptionself._owner = Noneself.owner = ownerself._email = Noneself.email = emailself._updated_at = Noneself.updated_at = updated_atself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: The logic to apply to the query ('SHOULD', 'MUST', 'MUST_NOT', or 'OPTIONAL').\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param id: One or more :class:`Filter` objects with filters against the id field.\n:param is_featured: One or more :class:`BooleanFilter` objects with filters against the isFeatured field.\n:param name: One or more :class:`Filter` objects with filters against the name field.\n:param description: One or more :class:`Filter` objects with filters against the description field.\n:param owner: One or more :class:`Filter` objects with filters against the owner field.\n:param email: One or more :class:`Filter` objects with filters against the email field.\n:param updated_at: One or more :class:`Filter` objects with filters against the time that the dataset was last updated.\n:param query: One or more :class:`DatasetQuery` objects with nested queries.", "id": "f3518:c0:m0"} {"signature": "def __init__(self, query=None, from_index=None, size=None, random_results=None, random_seed=None,score_relevance=None, return_max_score=None, timeout=None, count_pifs=None, **kwargs):", "body": "super(DatasetReturningQuery, self).__init__(query=query, from_index=from_index, size=size, random_results=random_results, random_seed=random_seed,score_relevance=score_relevance, return_max_score=return_max_score, timeout=timeout, **kwargs)self._count_pifs = Noneself.count_pifs = count_pifs", "docstring": "Constructor.\n\n:param query: One or more :class:`DataQuery` objects with the queries to run.\n:param from_index: Index of the first hit that should be returned.\n:param size: Total number of hits the should be returned.\n:param random_results: Whether to return a random set of records.\n:param random_seed: The random seed to use.\n:param score_relevance: Whether to use relevance scoring.\n:param return_max_score: Whether to return the maximum score.\n:param timeout: The number of milliseconds to wait for the query to execute.\n:param count_pifs: Whether to return counts of PIFs for each dataset.", "id": "f3519:c0:m0"} {"signature": "def __init__(self, took=None, total_num_hits=None, max_score=None, hits=None, **kwargs):", "body": "super(DatasetSearchResult, self).__init__(took=took, total_num_hits=total_num_hits, max_score=max_score,hits=self._get_object(DatasetSearchHit, hits), **kwargs)", "docstring": "Constructor.\n\n:param took: Number of milliseconds that the query took to execute.\n:param total_num_hits: Total number of hits.\n:param max_score: The maximum score.\n:param hits: List of :class:`DatasetSearchHit` objects.", "id": "f3520:c0:m0"} {"signature": "def __init__(self, id=None, score=None, is_featured=None, name=None, description=None, owner=None, email=None, num_pifs=None, updated_at=None, **kwargs):", "body": "self._id = Noneself.id = idself._score = Noneself.score = scoreself._is_featured = Noneself.is_featured = is_featuredself._name = Noneself.name = nameself._description = Noneself.description = descriptionself._owner = Noneself.owner = ownerself._email = Noneself.email = emailself._num_pifs = Noneself.num_pifs = num_pifsself._updated_at = Noneself.updated_at = updated_at", "docstring": "Constructor.\n\n:param id: String with the ID of the record.\n:param score: Score with the relevancy of the result.\n:param is_featured: Whether the dataset is a featured one.\n:param name: Name of the dataset.\n:param description: Description of the dataset.\n:param owner: Name of the owner of the dataset.\n:param email: Email address of the owner of the dataset.\n:param num_pifs: Number of PIFs in the dataset.\n:param updated_at: String with the last time that the dataset was updated.", "id": "f3521:c0:m0"} {"signature": "def __init__(self, took=None, results=None, **kwargs):", "body": "self._took = Noneself.took = tookself._results = Noneself.results = results", "docstring": "Constructor.\n\n:param took: Number of milliseconds that the query took to execute.\n:param results: List of :class:`DatasetMultiSearchResultElement` objects.", "id": "f3523:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, id=None, name=None, content=None,updated_at=None, query=None, **kwargs):", "body": "self._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._simple = Noneself.simple = simpleself._simple_weight = Noneself.simple_weight = simple_weightself._id = Noneself.id = idself._name = Noneself.name = nameself._content = Noneself.content = contentself._updated_at = Noneself.updated_at = updated_atself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: The logic to apply to the query ('SHOULD', 'MUST', 'MUST_NOT', or 'OPTIONAL').\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param id: One or more :class:`Filter` objects with filters against the id field.\n:param name: One or more :class:`Filter` objects with filters against the name field.\n:param content: One or more :class:`Filter` objects with filters against the content field.\n:param updated_at: One or more :class:`Filter` objects with filters against the time that the dataset was last updated.\n:param query: One or more :class:`DatasetQuery` objects with nested queries.", "id": "f3525:c0:m0"} {"signature": "def __init__(self, took=None, results=None, **kwargs):", "body": "self._took = Noneself.took = tookself._results = Noneself.results = results", "docstring": "Constructor.\n\n:param took: Number of milliseconds that the query took to execute.\n:param results: List of :class:`FileMultiSearchResultElement` objects.", "id": "f3527:c0:m0"} {"signature": "def __init__(self, result=None, status=None, **kwargs):", "body": "self._result = Noneself.result = resultself._status = Noneself.status = status", "docstring": "Constructor.\n\n:param result: A single :class:`FileSearchResult` object with the query results.\n:param status: 'SUCCESS', 'ERROR', or 'NOT_EXECUTED'.", "id": "f3529:c0:m0"} {"signature": "@staticmethoddef _get_list(values):", "body": "if values is None:return []elif isinstance(values, list):return valueselse:return [values]", "docstring": "Helper method that wraps values in a list. If the input is a list then it is returned. If the input is None then an empty list is returned. For anything else, the input value is wrapped as a single-element list.\n\n:param values: Value to make sure exists in a list.\n:return: List with the input values.", "id": "f3532:c0:m9"} {"signature": "def dataset_search(self, dataset_returning_query):", "body": "self._validate_search_query(dataset_returning_query)return self._execute_search_query(dataset_returning_query,DatasetSearchResult)", "docstring": "Run a dataset query against Citrination.\n\n:param dataset_returning_query: :class:`DatasetReturningQuery` to execute.\n:type dataset_returning_query: :class:`DatasetReturningQuery`\n:return: Dataset search result object with the results of the query.\n:rtype: :class:`DatasetSearchResult`", "id": "f3532:c0:m4"} {"signature": "def pif_multi_search(self, multi_query):", "body": "failure_message = \"\"response_dict = self._get_success_json(self._post(routes.pif_multi_search, data=json.dumps(multi_query, cls=QueryEncoder),failure_message=failure_message))return PifMultiSearchResult(**keys_to_snake_case(response_dict['']))", "docstring": "Run each in a list of PIF queries against Citrination.\n\n:param multi_query: :class:`MultiQuery` object to execute.\n:return: :class:`PifMultiSearchResult` object with the results of the query.", "id": "f3532:c0:m7"} {"signature": "def __init__(self, logic=None, weight=None, exists=None, equal=None, filter=None, **kwargs):", "body": "self._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._exists = Noneself.exists = existsself._equal = Noneself.equal = equalself._filter = Noneself.filter = filter", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight for the filter.\n:param exists: True/False to simply test whether the field exists and has a non-null value.\n:param equal: String with the phrase to match against.\n:param filter: List of :class:`BooleanFilter` objects with sub-filters.", "id": "f3533:c0:m0"} {"signature": "def __init__(self, query=None, extraction_sort=None, from_index=None, size=None, random_results=None,random_seed=None, score_relevance=None, return_max_score=None, timeout=None, **kwargs):", "body": "super(BaseReturningQuery, self).__init__(query=query, extraction_sort=extraction_sort, **kwargs)if '' in '':self.from_index = kwargs['']self._from = Noneself.from_index = from_indexself._size = Noneself.size = sizeself._random_results = Noneself.random_results = random_resultsself._random_seed = Noneself.random_seed = random_seedself._score_relevance = Noneself.score_relevance = score_relevanceself._return_max_score = Noneself.return_max_score = return_max_scoreself._timeout = Noneself.timeout = timeout", "docstring": "Base class for all queries against datasets and the items that they contain on Citrination.\n\n:param query: One or more :class:`DataQuery` objects with the queries to run.\n:param extraction_sort: A single :class:`ExtractionSort` object for sorting.\n:param from_index: Index of the first hit that should be returned.\n:param size: Total number of hits the should be returned.\n:param random_results: Whether to return a random set of records.\n:param random_seed: The random seed to use.\n:param score_relevance: Whether to use relevance scoring.\n:param return_max_score: Whether to return the maximum score.\n:param timeout: The number of milliseconds to wait for the query to execute.", "id": "f3534:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, dataset=None, system=None, file=None,query=None, **kwargs):", "body": "self._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._simple = Noneself.simple = simpleself._simple_weight = Noneself.simple_weight = simple_weightself._dataset = Noneself.dataset = datasetself._system = Noneself.system = systemself._file = Noneself.file = fileself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: The logic to apply to the query ('SHOULD', 'MUST', 'MUST_NOT', or 'OPTIONAL').\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param dataset: One or more :class:`DatasetQuery` objects with queries against dataset metadata.\n:param system: One or more :class:`PifSystemQuery` objects with queries against PIF systems\n:param file: One or more :class:`FileQuery` objects with queries against file content or metadata.\n:type file: :class:`FileQuery`\n:param query: Nested list of :class:`DataQuery` objects.", "id": "f3537:c0:m0"} {"signature": "def __init__(self, queries=None, **kwargs):", "body": "self._queries = Noneself.queries = queries", "docstring": "Constructor.\n\n:param queries: One or more queries to run.", "id": "f3538:c0:m0"} {"signature": "def __init__(self, took=None, total_num_hits=None, max_score=None, hits=None, **kwargs):", "body": "self._took = Noneself.took = tookself._total_num_hits = Noneself.total_num_hits = total_num_hitsself._max_score = Noneself.max_score = max_scoreself._hits = Noneself.hits = hits", "docstring": "Constructor.\n\n:param took: Number of milliseconds that the query took to execute.\n:param total_num_hits: Total number of hits.\n:param max_score: The maximum score.\n:param hits: List of hits.", "id": "f3539:c0:m0"} {"signature": "def __init__(self, sort=None, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None,extract_all=None, extract_when_missing=None, length=None, offset=None, filter=None, **kwargs):", "body": "super(ChemicalFieldQuery, self).__init__(sort=sort, weight=weight, logic=logic, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, length=length, offset=offset, **kwargs)self._filter = Noneself.filter = filter", "docstring": "Constructor.\n\n:param sort: ASCENDING or DESCENDING to set the sort order on this field.\n:param logic: Logic for this query. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param length: One or more :class:`FieldOperation` objects against the length field.\n:param offset: One or more :class:`FieldOperation` objects against the offset field.\n:param filter: One or more :class:`ChemicalFilter` objects against this field.", "id": "f3542:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None,extract_all=None, extract_when_missing=None, tags=None, length=None, offset=None, element=None,actual_weight_percent=None, actual_atomic_percent=None, ideal_weight_percent=None,ideal_atomic_percent=None, query=None, **kwargs):", "body": "super(CompositionQuery, self).__init__(logic=logic, weight=weight, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, tags=tags, length=length,offset=offset, **kwargs)self._element = Noneself.element = elementself._actual_weight_percent = Noneself.actual_weight_percent = actual_weight_percentself._actual_atomic_percent = Noneself.actual_atomic_percent = actual_atomic_percentself._ideal_weight_percent = Noneself.ideal_weight_percent = ideal_weight_percentself._ideal_atomic_percent = Noneself.ideal_atomic_percent = ideal_atomic_percentself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight of the query.\n:param simple: String with the simple query to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.\n:param element: One or more :class:`ChemicalFieldQuery` operations against the element field.\n:param actual_weight_percent: One or more :class:`FieldQuery` operations against the actual weight percent field.\n:param actual_atomic_percent: One or more :class:`FieldQuery` operations against the actual atomic percent field.\n:param ideal_weight_percent: One or more :class:`FieldQuery` operations against the ideal weight percent field.\n:param ideal_atomic_percent: One or more :class:`FieldQuery` operations against the ideal atomic percent field.\n:param query: One or more :class:`CompositionQuery` objects with the nest queries.", "id": "f3543:c0:m0"} {"signature": "def __init__(self, key=None, order=None, **kwargs):", "body": "self._key = Noneself.key = keyself._order = Noneself.order = order", "docstring": "Constructor.\n\n:param key: String with the key that will be sorted on.\n:param order: The order to use. Either ASCENDING or DESCENDING.", "id": "f3545:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None, extract_all=None,extract_when_missing=None, tags=None, length=None, offset=None, name=None, value=None,query=None, **kwargs):", "body": "super(IdQuery, self).__init__(logic=logic, weight=weight, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, tags=tags, length=length,offset=offset, **kwargs)self._name = Noneself.name = nameself._value = Noneself.value = valueself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight of the query.\n:param simple: String with the query to run over all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.\n:param name: One or more :class:`FieldQuery` operations against the name field.\n:param value: One or more :class:`FieldQuery` operations against the value field.\n:param query: One or more :class:`IdQuery` objects with nested queries.", "id": "f3550:c0:m0"} {"signature": "def __init__(self, sort=None, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None,extract_all=None, extract_when_missing=None, length=None, offset=None, **kwargs):", "body": "self._sort = Noneself.sort = sortself._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._simple = Noneself.simple = simpleself._simple_weight = Noneself.simple_weight = simple_weightself._extract_as = Noneself.extract_as = extract_asself._extract_all = Noneself.extract_all = extract_allself._extract_when_missing = Noneself.extract_when_missing = extract_when_missingself._length = Noneself.length = lengthself._offset = Noneself.offset = offset", "docstring": "Constructor.\n\n:param sort: ASCENDING or DESCENDING to set the sort order on this field.\n:param logic: Logic for this query. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.", "id": "f3551:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None, extract_all=None,extract_when_missing=None, tags=None, length=None, offset=None, number=None, title=None,caption=None, query=None, **kwargs):", "body": "super(DisplayItemQuery, self).__init__(logic=logic, weight=weight, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, tags=tags, length=length,offset=offset, **kwargs)self._title = Noneself.title = titleself._number = Noneself.number = numberself._caption = Noneself.caption = captionself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight of the query.\n:param simple: String with the simple query to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.\n:param number: One or more :class:`FieldQuery` operations against the number field.\n:param title: One or more :class:`FieldQuery` operations against the title field.\n:param caption: One or more :class:`FieldQuery` operations against the caption field.\n:param query: One or more :class:`DisplayItemQuery` objects as nested queries.", "id": "f3554:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None, extract_all=None,extract_when_missing=None, tags=None, length=None, offset=None, producer=None, url=None,query=None, **kwargs):", "body": "super(SourceQuery, self).__init__(logic=logic, weight=weight, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, tags=tags, length=length,offset=offset, **kwargs)self._producer = Noneself.producer = producerself._url = Noneself.url = urlself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight of the query.\n:param simple: String with the simple query to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.\n:param producer: One or more :class:`FieldQuery` operations against the producer field.\n:param url: One or more :class:`FieldQuery` operations against the url field.\n:param query: One or more :class:`SourceQuery` objects with nested queries.", "id": "f3556:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None, extract_all=None,extract_when_missing=None, tags=None, length=None, offset=None, doi=None, isbn=None, issn=None,url=None, title=None, publisher=None, journal=None, volume=None, issue=None, year=None,figure=None, table=None, pages=None, authors=None, editors=None, affiliations=None,acknowledgements=None, references=None, query=None, **kwargs):", "body": "super(ReferenceQuery, self).__init__(logic=logic, weight=weight, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, tags=tags, length=length,offset=offset, **kwargs)self._doi = Noneself.doi = doiself._isbn = Noneself.isbn = isbnself._issn = Noneself.issn = issnself._url = Noneself.url = urlself._title = Noneself.title = titleself._publisher = Noneself.publisher = publisherself._journal = Noneself.journal = journalself._volume = Noneself.volume = volumeself._issue = Noneself.issue = issueself._year = Noneself.year = yearself._figure = Noneself.figure = figureself._table = Noneself.table = tableself._pages = Noneself.pages = pagesself._authors = Noneself.authors = authorsself._editors = Noneself.editors = editorsself._affiliations = Noneself.affiliations = affiliationsself._acknowledgements = Noneself.acknowledgements = acknowledgementsself._references = Noneself.references = referencesself._query = Noneself.query = query", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight of the query.\n:param simple: String with the simple query to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.\n:param doi: One or more :class:`FieldQuery` operations against the doi field.\n:param isbn: One or more :class:`FieldQuery` operations against the isbn field.\n:param issn: One or more :class:`FieldQuery` operations against the issn field.\n:param url: One or more :class:`FieldQuery` operations against the url field.\n:param title: One or more :class:`FieldQuery` operations against the title field.\n:param publisher: One or more :class:`FieldQuery` operations against the publisher field.\n:param journal: One or more :class:`FieldQuery` operations against the journal field.\n:param volume: One or more :class:`FieldQuery` operations against the volume field.\n:param issue: One or more :class:`FieldQuery` operations against the issue field.\n:param year: One or more :class:`FieldQuery` operations against the year field.\n:param figure: One or more :class:`DisplayItemQuery` operations against the figure field.\n:param table: One or more :class:`DisplayItemQuery` operations against the table field.\n:param pages: One or more :class:`PagesQuery` operations against the pages field.\n:param authors: One or more :class:`NameQuery` operations against the authors field.\n:param editors: One or more :class:`NameQuery` operations against the editors field.\n:param affiliations: One or more :class:`FieldQuery` operations against the affiliations field.\n:param acknowledgements: One or more :class:`FieldQuery` operations against the acknowledgements field.\n:param references: One or more :class:`ReferenceQuery` operations against the references field.\n:param query: One or more :class:`ReferenceQuery` objects with nested queries.", "id": "f3557:c0:m0"} {"signature": "def __init__(self, sort=None, weight=None, logic=None, simple=None, simple_weight=None, extract_as=None,extract_all=None, extract_when_missing=None, length=None, offset=None, filter=None, **kwargs):", "body": "super(FieldQuery, self).__init__(sort=sort, weight=weight, logic=logic, simple=simple, simple_weight=simple_weight, extract_as=extract_as,extract_all=extract_all, extract_when_missing=extract_when_missing, length=length, offset=offset, **kwargs)self._filter = Noneself.filter = filter", "docstring": "Constructor.\n\n:param sort: ASCENDING or DESCENDING to set the sort order on this field.\n:param weight: Weight of the query.\n:param logic: Logic for this query. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param length: One or more :class:`FieldQuery` objects against the length field.\n:param offset: One or more :class:`FieldQuery` objects against the offset field.\n:param filter: One or more :class:`Filter` objects against this field.", "id": "f3558:c0:m0"} {"signature": "def __init__(self, logic=None, weight=None, simple=None, simple_weight=None, extract_as=None, extract_all=None,extract_when_missing=None, tags=None, length=None, offset=None, **kwargs):", "body": "self._logic = Noneself.logic = logicself._weight = Noneself.weight = weightself._simple = Noneself.simple = simpleself._simple_weight = Noneself.simple_weight = simple_weightself._extract_as = Noneself.extract_as = extract_asself._extract_all = Noneself.extract_all = extract_allself._extract_when_missing = Noneself.extract_when_missing = extract_when_missingself._tags = Noneself.tags = tagsself._length = Noneself.length = lengthself._offset = Noneself.offset = offset", "docstring": "Constructor.\n\n:param logic: Logic for this filter. Must be equal to one of \"MUST\", \"MUST_NOT\", \"SHOULD\", or \"OPTIONAL\".\n:param weight: Weight for the query.\n:param simple: String with the simple search to run against all fields.\n:param simple_weight: Dictionary of relative paths to their weights for simple queries.\n:param extract_as: String with the alias to save this field under.\n:param extract_all: Boolean setting whether all values in an array should be extracted.\n:param extract_when_missing: Any valid JSON-supported object or PIF object. This value is returned when a value is missing that should be extracted (and the overall query is still satisfied).\n:param tags: One or more :class:`FieldQuery` operations against the tags field.\n:param length: One or more :class:`FieldQuery` operations against the length field.\n:param offset: One or more :class:`FieldQuery` operations against the offset field.", "id": "f3559:c0:m0"} {"signature": "def __init__(self, id=None, dataset=None, dataset_version=None, score=None, updated_at=None, system=None, extracted=None, extracted_path=None, **kwargs):", "body": "self._id = Noneself.id = idself._dataset = Noneself.dataset = datasetself._dataset_version = Noneself.dataset_version = dataset_versionself._score = Noneself.score = scoreself._updated_at = Noneself.updated_at = updated_atself._system = Noneself.system = systemself._extracted = Noneself.extracted = extractedself._extracted_path = Noneself.extracted_path = extracted_path", "docstring": "Constructor.\n\n:param id: String with the ID of the record.\n:param dataset: Integer with the dataset of the record.\n:param dataset_version: Integer with the dataset version of the record.\n:param score: Score with the relevancy of the result.\n:param updated_at: String with the last time that the record was updated.\n:param system: Pif System object that matched.\n:param extracted: Dictionary with a map of extracted property names to values.\n:param extracted_path: Dictionary with a map of extracted property names to paths in a PIF.", "id": "f3564:c0:m0"} {"signature": "def load_file_as_json(path):", "body": "with open(path, \"\") as f:parsed_dict = json.load(f)return parsed_dict", "docstring": "Given a filepath, loads the file as a dictionary from JSON\n\n:param path: The path to a JSON file", "id": "f3568:m0"} {"signature": "def __get_ml_configuration_status(self, job_id):", "body": "failure_message = \"\"response = self._get_success_json(self._get('' + job_id + '', None, failure_message=failure_message))['']return response", "docstring": "After invoking the create_ml_configuration async method, you can use this method to\ncheck on the status of the builder job.\n\n:param job_id: The identifier returned from create_ml_configuration\n:return: Job status", "id": "f3579:c0:m10"} {"signature": "def create_ml_configuration(self, search_template, extract_as_keys, dataset_ids):", "body": "data = {\"\":search_template,\"\":extract_as_keys}failure_message = \"\"config_job_id = self._get_success_json(self._post_json('', data, failure_message=failure_message))['']['']['']while True:config_status = self.__get_ml_configuration_status(config_job_id)print('', config_status)if config_status[''] == '':ml_config = self.__convert_response_to_configuration(config_status[''], dataset_ids)return ml_configtime.sleep()", "docstring": "This method will spawn a server job to create a default ML configuration based on a search template and\nthe extract as keys.\nThis function will submit the request to build, and wait for the configuration to finish before returning.\n\n:param search_template: A search template defining the query (properties, datasets etc)\n:param extract_as_keys: Array of extract-as keys defining the descriptors\n:param dataset_ids: Array of dataset identifiers to make search template from\n:return: An identifier used to request the status of the builder job (get_ml_configuration_status)", "id": "f3579:c0:m7"} {"signature": "def create_ml_configuration_from_datasets(self, dataset_ids):", "body": "available_columns = self.search_template_client.get_available_columns(dataset_ids)search_template = self.search_template_client.create(dataset_ids, available_columns)return self.create_ml_configuration(search_template, available_columns, dataset_ids)", "docstring": "Creates an ml configuration from dataset_ids and extract_as_keys\n\n:param dataset_ids: Array of dataset identifiers to make search template from\n:return: An identifier used to request the status of the builder job (get_ml_configuration_status)", "id": "f3579:c0:m6"} {"signature": "def validate(self, ml_template):", "body": "data = {\"\":ml_template}failure_message = \"\"res = self._get_success_json(self._post_json('', data, failure_message=failure_message))['']if res['']:return ''return res['']", "docstring": "Runs the template against the validation endpoint, returns a message indicating status of the templte\n\n:param ml_template: Template to validate\n:return: OK or error message if validation failed", "id": "f3580:c0:m1"} {"signature": "def add_descriptor(self, descriptor, role='', group_by_key=False):", "body": "descriptor.validate()if descriptor.key in self.configuration[\"\"]:raise ValueError(\"\")self.configuration[''].append(descriptor.as_dict())self.configuration[\"\"][descriptor.key] = roleif group_by_key:self.configuration[\"\"].append(descriptor.key)", "docstring": "Add a descriptor column.\n\n:param descriptor: A Descriptor instance (e.g., RealDescriptor, InorganicDescriptor, etc.)\n:param role: Specify a role (input, output, latentVariable, or ignore)\n:param group_by_key: Whether or not to group by this key during cross validation", "id": "f3581:c0:m3"} {"signature": "def dataset_ids(self, dataset_ids):", "body": "self.configuration[''] = dataset_ids", "docstring": "Sets the dataset ids to use for the view\n\n:param dataset_ids: Array of strings, one for each dataset id", "id": "f3581:c0:m1"} {"signature": "def get_available_columns(self, dataset_ids):", "body": "if not isinstance(dataset_ids, list):dataset_ids = [dataset_ids]data = {\"\":dataset_ids}failure_message = \"\".format(dataset_ids)return self._get_success_json(self._post_json('', data, failure_message=failure_message))['']", "docstring": "Retrieves the set of columns from the combination of dataset ids given\n\n:param dataset_ids: The id of the dataset to retrieve columns from\n:type dataset_ids: list of int\n:return: A list of column names from the dataset ids given.\n:rtype: list of str", "id": "f3582:c0:m2"} {"signature": "def __int__(self):", "body": "return self._ip_dec", "docstring": "Return the decimal representation of the address/netmask.", "id": "f3601:c0:m16"} {"signature": "def _dot_to_dec(ip, check=True):", "body": "if check and not is_dot(ip):raise ValueError('' % ip)octets = str(ip).split('')dec = dec |= int(octets[]) << dec |= int(octets[]) << dec |= int(octets[]) << dec |= int(octets[])return dec", "docstring": "Dotted decimal notation to decimal conversion.", "id": "f3601:m15"} {"signature": "def get_network_ip(self):", "body": "return self._net_ip", "docstring": "Return the network address.", "id": "f3601:c3:m9"} {"signature": "def _check_nm(nm, notation):", "body": "_NM_CHECK_FUNCT = {NM_DOT: _dot_to_dec,NM_HEX: _hex_to_dec,NM_BIN: _bin_to_dec,NM_OCT: _oct_to_dec,NM_DEC: _dec_to_dec_long}try:dec = _NM_CHECK_FUNCT[notation](nm, check=True)except ValueError:return Falseif dec in _NETMASKS_VALUES:return Truereturn False", "docstring": "Function internally used to check if the given netmask\n is of the specified notation.", "id": "f3601:m7"} {"signature": "def set_ip(self, ip):", "body": "self.set(ip=ip, netmask=self._nm)", "docstring": "Change the current IP.", "id": "f3601:c3:m3"} {"signature": "def is_hex_nm(nm):", "body": "return _check_nm(nm, NM_HEX)", "docstring": "Return true if the netmask is in hexadecimal notatation.", "id": "f3601:m9"} {"signature": "def is_dec_nm(nm):", "body": "return _check_nm(nm, NM_DEC)", "docstring": "Return true if the netmask is in decimal notatation.", "id": "f3601:m12"} {"signature": "def is_hex(ip):", "body": "try:dec = int(str(ip), )except (TypeError, ValueError):return Falseif dec > or dec < :return Falsereturn True", "docstring": "Return true if the IP address is in hexadecimal notation.", "id": "f3601:m3"} {"signature": "def detect(ip):", "body": "return _detect(ip, _isnm=False)", "docstring": "Detect the notation of an IP address.\n\n @param ip: the IP address.\n @type ip: integers, strings or object with an appropriate __str()__ method.\n @return: one of the IP_* constants; IP_UNKNOWN if undetected.", "id": "f3601:m34"} {"signature": "def p_detect(ip):", "body": "return NOTATION_MAP[detect(ip)][]", "docstring": "Return the notation of an IP address (string).", "id": "f3601:m36"} {"signature": "def _dec_to_hex(ip):", "body": "return hex(ip)", "docstring": "Decimal to hexadecimal conversion.", "id": "f3601:m18"} {"signature": "def is_bin_nm(nm):", "body": "return _check_nm(nm, NM_BIN)", "docstring": "Return true if the netmask is in binary notatation.", "id": "f3601:m10"} {"signature": "def __iadd__(self, other):", "body": "self.set(self._add(other), notation=IP_DEC)return self", "docstring": "Augmented arithmetic sum.", "id": "f3601:c1:m3"} {"signature": "def _dec_to_oct(ip):", "body": "return oct(ip)", "docstring": "Decimal to octal conversion.", "id": "f3601:m20"} {"signature": "def detect_nm(nm):", "body": "return _detect(nm, _isnm=True)", "docstring": "Detect the notation of a netmask.\n @param nm: the netmask.\n @type nm: integers, strings or object with an appropriate __str()__ method.\n @return: one of the NM_* constants; NM_UNKNOWN if undetected.", "id": "f3601:m35"} {"signature": "def is_dot(ip):", "body": "octets = str(ip).split('')if len(octets) != :return Falsefor i in octets:try:val = int(i)except ValueError:return Falseif val > or val < :return Falsereturn True", "docstring": "Return true if the IP address is in dotted decimal notation.", "id": "f3601:m2"} {"signature": "def is_dec(ip):", "body": "try:dec = int(str(ip))except ValueError:return Falseif dec > or dec < :return Falsereturn True", "docstring": "Return true if the IP address is in decimal notation.", "id": "f3601:m6"} {"signature": "def _hex_to_dec(ip, check=True):", "body": "if check and not is_hex(ip):raise ValueError('' % ip)if isinstance(ip, int):ip = hex(ip)return int(str(ip), )", "docstring": "Hexadecimal to decimal conversion.", "id": "f3601:m17"} {"signature": "def _cmp_prepare(self, other):", "body": "if isinstance(other, self.__class__):return other._ip_decelif isinstance(other, int):return otherreturn self.__class__(other)._ip_dec", "docstring": "Prepare the item to be compared with this address/netmask.", "id": "f3601:c0:m9"} {"signature": "def get_ip(self):", "body": "return self._ip", "docstring": "Return the given address.", "id": "f3601:c3:m4"} {"signature": "def is_notation(ip, notation):", "body": "return _is_notation(ip, notation, _isnm=False)", "docstring": "Return true if the given address is in the given notation.", "id": "f3601:m31"} {"signature": "def get_dot(self):", "body": "return self._ip", "docstring": "Return the dotted decimal notation of the address/netmask.", "id": "f3601:c0:m3"} {"signature": "def is_oct(ip):", "body": "try:dec = int(str(ip), )except (TypeError, ValueError):return Falseif dec > or dec < :return Falsereturn True", "docstring": "Return true if the IP address is in octal notation.", "id": "f3601:m5"} {"signature": "def _wildcard_to_dec(nm, check=False):", "body": "if check and not is_wildcard_nm(nm):raise ValueError('' % nm)return - _dot_to_dec(nm, check=False)", "docstring": "Wildcard bits to decimal conversion.", "id": "f3601:m28"} {"signature": "def __init__(self, ip, notation=IP_UNKNOWN):", "body": "self.set(ip, notation)", "docstring": "Initialize the object.", "id": "f3601:c0:m0"} {"signature": "def set(self, ip, netmask=None):", "body": "if isinstance(ip, str) and netmask is None:ipnm = ip.split('')if len(ipnm) != :raise ValueError('' % ip)ip = ipnm[]netmask = ipnm[]if isinstance(ip, IPv4Address):self._ip = ipelse:self._ip = IPv4Address(ip)if isinstance(netmask, IPv4NetMask):self._nm = netmaskelse:self._nm = IPv4NetMask(netmask)ipl = int(self._ip)nml = int(self._nm)base_add = ipl & nmlself._ip_num = - - nmlif self._ip_num in (-, ):if self._ip_num == -:self._ip_num = else:self._ip_num = self._net_ip = Noneself._bc_ip = Noneself._first_ip_dec = base_addself._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC)if self._ip_num == :last_ip_dec = self._first_ip_decelse:last_ip_dec = self._first_ip_dec + self._last_ip = IPv4Address(last_ip_dec, notation=IP_DEC)returnself._net_ip = IPv4Address(base_add, notation=IP_DEC)self._bc_ip = IPv4Address(base_add + self._ip_num + , notation=IP_DEC)self._first_ip_dec = base_add + self._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC)self._last_ip = IPv4Address(base_add + self._ip_num, notation=IP_DEC)", "docstring": "Set the IP address and the netmask.", "id": "f3601:c3:m1"} {"signature": "def get_wildcard(self):", "body": "return _convert(self._ip, notation=NM_WILDCARD,inotation=IP_DOT, _check=False, _isnm=self._isnm)", "docstring": "Return the wildcard bits notation of the netmask.", "id": "f3601:c2:m1"} {"signature": "def p_detect_nm(nm):", "body": "return NOTATION_MAP[detect_nm(nm)][]", "docstring": "Return the notation of a netmask (string).", "id": "f3601:m37"} {"signature": "def _dec_to_dot(ip):", "body": "first = int((ip >> ) & )second = int((ip >> ) & )third = int((ip >> ) & )fourth = int(ip & )return '' % (first, second, third, fourth)", "docstring": "Decimal to dotted decimal notation conversion.", "id": "f3601:m16"} {"signature": "def _convert(ip, notation, inotation, _check, _isnm):", "body": "inotation_orig = inotationnotation_orig = notationinotation = _get_notation(inotation)notation = _get_notation(notation)if inotation is None:raise ValueError('' % inotation_orig)if notation is None:raise ValueError('' % notation_orig)docheck = _check or Falseif inotation == IP_UNKNOWN:inotation = _detect(ip, _isnm)if inotation == IP_UNKNOWN:raise ValueError('')if _check is None:docheck = Trueif _isnm:docheck = Falsedec = if inotation == IP_DOT:dec = _dot_to_dec(ip, docheck)elif inotation == IP_HEX:dec = _hex_to_dec(ip, docheck)elif inotation == IP_BIN:dec = _bin_to_dec(ip, docheck)elif inotation == IP_OCT:dec = _oct_to_dec(ip, docheck)elif inotation == IP_DEC:dec = _dec_to_dec_long(ip, docheck)elif _isnm and inotation == NM_BITS:dec = _bits_to_dec(ip, docheck)elif _isnm and inotation == NM_WILDCARD:dec = _wildcard_to_dec(ip, docheck)else:raise ValueError('' % inotation_orig)if _isnm and dec not in _NETMASKS_VALUES:raise ValueError('' % ip)if notation == IP_DOT:return _dec_to_dot(dec)elif notation == IP_HEX:return _dec_to_hex(dec)elif notation == IP_BIN:return _dec_to_bin(dec)elif notation == IP_OCT:return _dec_to_oct(dec)elif notation == IP_DEC:return _dec_to_dec_str(dec)elif _isnm and notation == NM_BITS:return _dec_to_bits(dec)elif _isnm and notation == NM_WILDCARD:return _dec_to_wildcard(dec)else:raise ValueError('' % notation_orig)", "docstring": "Internally used to convert IPs and netmasks to other notations.", "id": "f3601:m38"} {"signature": "def is_notation_nm(nm, notation):", "body": "return _is_notation(nm, notation, _isnm=True)", "docstring": "Return true if the given netmask is in the given notation.", "id": "f3601:m32"} {"signature": "def __isub__(self, other):", "body": "self.set(self._sub(other), notation=IP_DEC)return self", "docstring": "Augmented arithmetic subtraction.", "id": "f3601:c1:m6"} {"signature": "def convert_nm(nm, notation=IP_DOT, inotation=IP_UNKNOWN, check=True):", "body": "return _convert(nm, notation, inotation, _check=check, _isnm=True)", "docstring": "Convert a netmask to another notation.", "id": "f3601:m40"} {"signature": "def __len__(self):", "body": "return self.get_ip_number()", "docstring": "Return the number of usable IP address.", "id": "f3601:c3:m16"} {"signature": "def _oct_to_dec(ip, check=True):", "body": "if check and not is_oct(ip):raise ValueError('' % ip)if isinstance(ip, int):ip = oct(ip)return int(str(ip), )", "docstring": "Octal to decimal conversion.", "id": "f3601:m19"} {"signature": "def __str__(self):", "body": "return self.get()", "docstring": "Print this address/netmask.", "id": "f3601:c0:m8"} {"signature": "def get_ip_number(self):", "body": "return self._ip_num", "docstring": "Return the number of usable IP addresses.", "id": "f3601:c3:m11"} {"signature": "def _get_notation(notation):", "body": "return _NOTATION_KEYS.get(notation, None)", "docstring": "Given a numeric value or string value, returns one in IP_DOT, IP_HEX,\n IP_BIN, etc., or None if unable to convert to the internally\n used numeric convention.", "id": "f3601:m0"} {"signature": "def _dec_to_bin(ip):", "body": "bits = []while ip:bits.append(_BYTES_TO_BITS[ip & ])ip >>= bits.reverse()return ''.join(bits) or *''", "docstring": "Decimal to binary conversion.", "id": "f3601:m23"} {"signature": "def get_netmask(self):", "body": "return self._nm", "docstring": "Return the netmask.", "id": "f3601:c3:m6"} {"signature": "def generate(length=DEFAULT_LENGTH):", "body": "return ''.join(random.SystemRandom().choice(ALPHABET)for _ in range(length))", "docstring": "Generate a random string of the specified length.\n\nThe returned string is composed of an alphabet that shouldn't include any\ncharacters that are easily mistakeable for one another (I, 1, O, 0), and\nhopefully won't accidentally contain any English-language curse words.", "id": "f3605:m0"} {"signature": "@classmethoddef coerce(cls, key, value):", "body": "if not isinstance(value, MutableDict):if isinstance(value, dict):return MutableDict(value)return Mutable.coerce(key, value)else:return value", "docstring": "Convert plain dictionaries to MutableDict.", "id": "f3607:c4:m0"} {"signature": "def __setitem__(self, key, value):", "body": "dict.__setitem__(self, key, value)self.changed()", "docstring": "Detect dictionary set events and emit change events.", "id": "f3607:c4:m1"} {"signature": "@compiles(utcnow)def _default_utcnow(element, compiler, **kw):", "body": "return \"\"", "docstring": "default compilation handler.\n\n Note that there is no SQL \"utcnow()\" function; this is a\n \"fake\" string so that we can produce SQL strings that are dialect-agnostic,\n such as within tests.", "id": "f3608:m0"} {"signature": "@classmethoddef _reference_table(cls, ref_table):", "body": "cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key]for col, refcol in cols:setattr(cls, \"\" % (ref_table.name, refcol.name), col)cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))", "docstring": "Create a foreign key reference from the local class to the given remote\n table.\n\n Adds column references to the declarative class and adds a\n ForeignKeyConstraint.", "id": "f3608:c0:m1"} {"signature": "def get_tm_session(session_factory, transaction_manager):", "body": "dbsession = session_factory()zope.sqlalchemy.register(dbsession, transaction_manager=transaction_manager)return dbsession", "docstring": "Get a ``sqlalchemy.orm.Session`` instance backed by a transaction.\n\nThis function will hook the session to the transaction manager which\nwill take care of committing any changes.\n\n- When using pyramid_tm it will automatically be committed or aborted\n depending on whether an exception is raised.\n\n- When using scripts you should wrap the session in a manager yourself.\n For example::\n\n import transaction\n\n engine = get_engine(settings)\n session_factory = get_session_factory(engine)\n with transaction.manager:\n dbsession = get_tm_session(session_factory, transaction.manager)", "id": "f3609:m2"} {"signature": "def many_to_one(clsname, **kw):", "body": "@declared_attrdef m2o(cls):cls._references((cls.__name__, clsname))return relationship(clsname, **kw)return m2o", "docstring": "Use an event to build a many-to-one relationship on a class.\n\n This makes use of the :meth:`.References._reference_table` method\n to generate a full foreign key relationship to the remote table.", "id": "f3611:m0"} {"signature": "def render_git_describe_long(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]rendered += \"\" % (pieces[\"\"], pieces[\"\"])else:rendered = pieces[\"\"]if pieces[\"\"]:rendered += \"\"return rendered", "docstring": "TAG-DISTANCE-gHEX[-dirty].\n\n Like 'git describe --tags --dirty --always -long'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)", "id": "f3617:m14"} {"signature": "def render_pep440(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]if pieces[\"\"] or pieces[\"\"]:rendered += plus_or_dot(pieces)rendered += \"\" % (pieces[\"\"], pieces[\"\"])if pieces[\"\"]:rendered += \"\"else:rendered = \"\" % (pieces[\"\"],pieces[\"\"])if pieces[\"\"]:rendered += \"\"return rendered", "docstring": "Build up version string, with post-release \"local version identifier\".\n\n Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n Exceptions:\n 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]", "id": "f3617:m9"} {"signature": "def register_vcs_handler(vcs, method): ", "body": "def decorate(f):\"\"\"\"\"\"if vcs not in HANDLERS:HANDLERS[vcs] = {}HANDLERS[vcs][method] = freturn freturn decorate", "docstring": "Decorator to mark a method as the handler for a particular VCS.", "id": "f3617:m2"} {"signature": "@register_vcs_handler(\"\", \"\")def git_versions_from_keywords(keywords, tag_prefix, verbose):", "body": "if not keywords:raise NotThisMethod(\"\")date = keywords.get(\"\")if date is not None:date = date.strip().replace(\"\", \"\", ).replace(\"\", \"\", )refnames = keywords[\"\"].strip()if refnames.startswith(\"\"):if verbose:print(\"\")raise NotThisMethod(\"\")refs = set([r.strip() for r in refnames.strip(\"\").split(\"\")])TAG = \"\"tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])if not tags:tags = set([r for r in refs if re.search(r'', r)])if verbose:print(\"\" % \"\".join(refs - tags))if verbose:print(\"\" % \"\".join(sorted(tags)))for ref in sorted(tags):if ref.startswith(tag_prefix):r = ref[len(tag_prefix):]if verbose:print(\"\" % r)return {\"\": r,\"\": keywords[\"\"].strip(),\"\": False, \"\": None,\"\": date}if verbose:print(\"\")return {\"\": \"\",\"\": keywords[\"\"].strip(),\"\": False, \"\": \"\", \"\": None}", "docstring": "Get version information from git keywords.", "id": "f3617:m6"} {"signature": "def render_git_describe(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]if pieces[\"\"]:rendered += \"\" % (pieces[\"\"], pieces[\"\"])else:rendered = pieces[\"\"]if pieces[\"\"]:rendered += \"\"return rendered", "docstring": "TAG[-DISTANCE-gHEX][-dirty].\n\n Like 'git describe --tags --dirty --always'.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)", "id": "f3617:m13"} {"signature": "@register_vcs_handler(\"\", \"\")def git_get_keywords(versionfile_abs):", "body": "keywords = {}try:f = open(versionfile_abs, \"\")for line in f.readlines():if line.strip().startswith(\"\"):mo = re.search(r'', line)if mo:keywords[\"\"] = mo.group()if line.strip().startswith(\"\"):mo = re.search(r'', line)if mo:keywords[\"\"] = mo.group()if line.strip().startswith(\"\"):mo = re.search(r'', line)if mo:keywords[\"\"] = mo.group()f.close()except EnvironmentError:passreturn keywords", "docstring": "Extract version information from the given file.", "id": "f3617:m5"} {"signature": "def get_config():", "body": "cfg = VersioneerConfig()cfg.VCS = \"\"cfg.style = \"\"cfg.tag_prefix = \"\"cfg.parentdir_prefix = \"\"cfg.versionfile_source = \"\"cfg.verbose = Falsereturn cfg", "docstring": "Create, populate and return the VersioneerConfig() object.", "id": "f3617:m1"} {"signature": "def get_keywords():", "body": "git_refnames = \"\"git_full = \"\"git_date = \"\"keywords = {\"\": git_refnames, \"\": git_full, \"\": git_date}return keywords", "docstring": "Get the keywords needed to look up the version information.", "id": "f3617:m0"} {"signature": "def parse_line(line, document=None):", "body": "result = re.match(line_pattern, line)if result:_, lineno, offset, severity, msg = result.groups()lineno = int(lineno or )offset = int(offset or )errno = if severity == '':errno = diag = {'': '','': {'': {'': lineno - , '': offset},'': {'': lineno - , '': offset + }},'': msg,'': errno}if document:word = document.word_at_position(diag[''][''])if word:diag[''][''][''] = (diag[''][''][''] + len(word))return diag", "docstring": "Return a language-server diagnostic from a line of the Mypy error report;\noptionally, use the whole document to provide more context on it.", "id": "f3619:m0"} {"signature": "def versions_from_file(filename):", "body": "try:with open(filename) as f:contents = f.read()except EnvironmentError:raise NotThisMethod(\"\")mo = re.search(r\"\",contents, re.M | re.S)if not mo:mo = re.search(r\"\",contents, re.M | re.S)if not mo:raise NotThisMethod(\"\")return json.loads(mo.group())", "docstring": "Try to determine the version from _version.py if present.", "id": "f3620:m9"} {"signature": "def render_pep440_post(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]if pieces[\"\"] or pieces[\"\"]:rendered += \"\" % pieces[\"\"]if pieces[\"\"]:rendered += \"\"rendered += plus_or_dot(pieces)rendered += \"\" % pieces[\"\"]else:rendered = \"\" % pieces[\"\"]if pieces[\"\"]:rendered += \"\"rendered += \"\" % pieces[\"\"]return rendered", "docstring": "TAG[.postDISTANCE[.dev0]+gHEX] .\n\n The \".dev0\" means dirty. Note that .dev0 sorts backwards\n (a dirty tree will appear \"older\" than the corresponding clean one),\n but you shouldn't be releasing software with -dirty anyways.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]", "id": "f3620:m14"} {"signature": "def scan_setup_py():", "body": "found = set()setters = Falseerrors = with open(\"\", \"\") as f:for line in f.readlines():if \"\" in line:found.add(\"\")if \"\" in line:found.add(\"\")if \"\" in line:found.add(\"\")if \"\" in line:setters = Trueif \"\" in line:setters = Trueif len(found) != :print(\"\")print(\"\")print(\"\")print(\"\")print(\"\")print(\"\")print(\"\")print(\"\")print(\"\")errors += if setters:print(\"\")print(\"\")print(\"\")print(\"\")errors += return errors", "docstring": "Validate the contents of setup.py against Versioneer's expectations.", "id": "f3620:m23"} {"signature": "def plus_or_dot(pieces):", "body": "if \"\" in pieces.get(\"\", \"\"):return \"\"return \"\"", "docstring": "Return a + if we don't already have one, else return a .", "id": "f3620:m11"} {"signature": "def render_pep440(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]if pieces[\"\"] or pieces[\"\"]:rendered += plus_or_dot(pieces)rendered += \"\" % (pieces[\"\"], pieces[\"\"])if pieces[\"\"]:rendered += \"\"else:rendered = \"\" % (pieces[\"\"],pieces[\"\"])if pieces[\"\"]:rendered += \"\"return rendered", "docstring": "Build up version string, with post-release \"local version identifier\".\n\n Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n Exceptions:\n 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]", "id": "f3620:m12"} {"signature": "def do_vcs_install(manifest_in, versionfile_source, ipy):", "body": "GITS = [\"\"]if sys.platform == \"\":GITS = [\"\", \"\"]files = [manifest_in, versionfile_source]if ipy:files.append(ipy)try:me = __file__if me.endswith(\"\") or me.endswith(\"\"):me = os.path.splitext(me)[] + \"\"versioneer_file = os.path.relpath(me)except NameError:versioneer_file = \"\"files.append(versioneer_file)present = Falsetry:f = open(\"\", \"\")for line in f.readlines():if line.strip().startswith(versionfile_source):if \"\" in line.strip().split()[:]:present = Truef.close()except EnvironmentError:passif not present:f = open(\"\", \"\")f.write(\"\" % versionfile_source)f.close()files.append(\"\")run_command(GITS, [\"\", \"\"] + files)", "docstring": "Git-specific installation logic for Versioneer.\n\n For Git, this means creating/changing .gitattributes to mark _version.py\n for export-subst keyword substitution.", "id": "f3620:m7"} {"signature": "def render(pieces, style):", "body": "if pieces[\"\"]:return {\"\": \"\",\"\": pieces.get(\"\"),\"\": None,\"\": pieces[\"\"],\"\": None}if not style or style == \"\":style = \"\" if style == \"\":rendered = render_pep440(pieces)elif style == \"\":rendered = render_pep440_pre(pieces)elif style == \"\":rendered = render_pep440_post(pieces)elif style == \"\":rendered = render_pep440_old(pieces)elif style == \"\":rendered = render_git_describe(pieces)elif style == \"\":rendered = render_git_describe_long(pieces)else:raise ValueError(\"\" % style)return {\"\": rendered, \"\": pieces[\"\"],\"\": pieces[\"\"], \"\": None,\"\": pieces.get(\"\")}", "docstring": "Render the given version pieces into the requested style.", "id": "f3620:m18"} {"signature": "def write_to_version_file(filename, versions):", "body": "os.unlink(filename)contents = json.dumps(versions, sort_keys=True,indent=, separators=(\"\", \"\"))with open(filename, \"\") as f:f.write(SHORT_VERSION_PY % contents)print(\"\" % (filename, versions[\"\"]))", "docstring": "Write the given version number to the given _version.py file.", "id": "f3620:m10"} {"signature": "def render_git_describe_long(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]rendered += \"\" % (pieces[\"\"], pieces[\"\"])else:rendered = pieces[\"\"]if pieces[\"\"]:rendered += \"\"return rendered", "docstring": "TAG-DISTANCE-gHEX[-dirty].\n\n Like 'git describe --tags --dirty --always -long'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)", "id": "f3620:m17"} {"signature": "def render_pep440_pre(pieces):", "body": "if pieces[\"\"]:rendered = pieces[\"\"]if pieces[\"\"]:rendered += \"\" % pieces[\"\"]else:rendered = \"\" % pieces[\"\"]return rendered", "docstring": "TAG[.post.devDISTANCE] -- No -dirty.\n\n Exceptions:\n 1: no tags. 0.post.devDISTANCE", "id": "f3620:m13"} {"signature": "def get_config_from_root(root):", "body": "setup_cfg = os.path.join(root, \"\")parser = configparser.SafeConfigParser()with open(setup_cfg, \"\") as f:parser.readfp(f)VCS = parser.get(\"\", \"\") def get(parser, name):if parser.has_option(\"\", name):return parser.get(\"\", name)return Nonecfg = VersioneerConfig()cfg.VCS = VCScfg.style = get(parser, \"\") or \"\"cfg.versionfile_source = get(parser, \"\")cfg.versionfile_build = get(parser, \"\")cfg.tag_prefix = get(parser, \"\")if cfg.tag_prefix in (\"\", ''):cfg.tag_prefix = \"\"cfg.parentdir_prefix = get(parser, \"\")cfg.verbose = get(parser, \"\")return cfg", "docstring": "Read the project setup.cfg file to determine Versioneer config.", "id": "f3620:m1"} {"signature": "def get_versions(verbose=False):", "body": "if \"\" in sys.modules:del sys.modules[\"\"]root = get_root()cfg = get_config_from_root(root)assert cfg.VCS is not None, \"\"handlers = HANDLERS.get(cfg.VCS)assert handlers, \"\" % cfg.VCSverbose = verbose or cfg.verboseassert cfg.versionfile_source is not None,\"\"assert cfg.tag_prefix is not None, \"\"versionfile_abs = os.path.join(root, cfg.versionfile_source)get_keywords_f = handlers.get(\"\")from_keywords_f = handlers.get(\"\")if get_keywords_f and from_keywords_f:try:keywords = get_keywords_f(versionfile_abs)ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)if verbose:print(\"\" % ver)return verexcept NotThisMethod:passtry:ver = versions_from_file(versionfile_abs)if verbose:print(\"\" % (versionfile_abs, ver))return verexcept NotThisMethod:passfrom_vcs_f = handlers.get(\"\")if from_vcs_f:try:pieces = from_vcs_f(cfg.tag_prefix, root, verbose)ver = render(pieces, cfg.style)if verbose:print(\"\" % ver)return verexcept NotThisMethod:passtry:if cfg.parentdir_prefix:ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)if verbose:print(\"\" % ver)return verexcept NotThisMethod:passif verbose:print(\"\")return {\"\": \"\", \"\": None,\"\": None, \"\": \"\",\"\": None}", "docstring": "Get the project version from whatever source is available.\n\n Returns dict with two keys: 'version' and 'full'.", "id": "f3620:m19"} {"signature": "def _default_json_default(obj):", "body": "if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):return obj.strftime(default_date_fmt)else:return str(obj)", "docstring": "Coerce everything to strings.\n All objects representing time get output according to default_date_fmt.", "id": "f3624:m0"} {"signature": "@_log_fn()def notice(**kwargs):", "body": "pass", "docstring": "log with pyzlog level NOTICE", "id": "f3624:m6"} {"signature": "def init_logs(self, path=None, target=None, level=None,server_hostname=None, extra=None):", "body": "path = path if path is not None else self.pathtarget = target if target is not None else self.targetlevel = level if level is not None else logging.DEBUGserver_hostname = (server_hostname if server_hostname is not Noneelse '')extra = extra if extra is not None else {'': None}init_logs(path=path, target=target, level=level,server_hostname=server_hostname, fields=extra)", "docstring": "Simple canned way to initialize pyzlog.\n\n Initialize pyslog for tests. If path or target are not\n specified, will default to path and target properties on the\n object. leve will default to logging.DEBUG, server_hostname\n defaults to localhost, and extra defaults to {'extra': None}\n\n :param path: path to find the log file\n :param target: name of the log file\n :param level: log level for this instance\n :param server_hostname: hostname to put in each entry\n :param extra: whitelist/defaults of extra fields to add to each entry\n :type path: string\n :type target: string\n :type level: int\n :type server_hostname: string\n :type extra: dict", "id": "f3624:c0:m2"} {"signature": "@_log_fn()def critical(**kwargs):", "body": "pass", "docstring": "log with pyzlog level CRITICAL", "id": "f3624:m10"} {"signature": "@_log_fn()def emergency(**kwargs):", "body": "pass", "docstring": "log with pyzlog level EMERGENCY", "id": "f3624:m4"} {"signature": "def init_logs(path=None,target=None,logger_name='',level=logging.DEBUG,maxBytes=**,backupCount=,application_name='',server_hostname=None,fields=None):", "body": "log_file = os.path.abspath(os.path.join(path, target))logger = logging.getLogger(logger_name)logger.setLevel(level)handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=maxBytes, backupCount=backupCount)handler.setLevel(level)handler.setFormatter(JsonFormatter(application_name=application_name,server_hostname=server_hostname,fields=fields))logger.addHandler(handler)", "docstring": "Initialize the zlogger.\n\n Sets up a rotating file handler to the specified path and file with\n the given size and backup count limits, sets the default\n application_name, server_hostname, and default/whitelist fields.\n\n :param path: path to write the log file\n :param target: name of the log file\n :param logger_name: name of the logger (defaults to root)\n :param level: log level for this logger (defaults to logging.DEBUG)\n :param maxBytes: size of the file before rotation (default 1MB)\n :param application_name: app name to add to each log entry\n :param server_hostname: hostname to add to each log entry\n :param fields: default/whitelist fields.\n :type path: string\n :type target: string\n :type logger_name: string\n :type level: int\n :type maxBytes: int\n :type backupCount: int\n :type application_name: string\n :type server_hostname: string\n :type fields: dict", "id": "f3624:m1"} {"signature": "def render_to_response(self, context, **response_kwargs):", "body": "if self.request.is_ajax():template = self.page_templateelse:template = self.get_template_names()return self.response_class(request=self.request,template=template,context=context,**response_kwargs)", "docstring": "Returns a response with a template depending if the request is ajax \nor not and it renders with the given context.", "id": "f3630:c2:m3"} {"signature": "def aggregate_history(self, ip, days_limit=None):", "body": "first_date = Nonelast_date = Noneprec_asn = Noneprec_block = Nonefor entry in self.history(ip, days_limit):if entry is None:continuedate, asn, block = entryif first_date is None:last_date = datefirst_date = dateprec_asn = asnprec_block = blockelif prec_asn == asn and prec_block == block:first_date = dateelse:yield first_date, last_date, prec_asn, prec_blocklast_date = datefirst_date = dateprec_asn = asnprec_block = blockif first_date is not None:yield first_date, last_date, prec_asn, prec_block", "docstring": "Get the full history of an IP, aggregate the result instead of\nreturning one line per day.\n\n:param ip: IP address to search for\n:param days_limit: Max amount of days to query. (None means no limit)\n\n:rtype: list. For each change: FirstDay, LastDay, ASN, Block", "id": "f3639:c0:m9"} {"signature": "def downloadURL(url):", "body": "urlretrieve(url, os.path.join(c.raw_data, path_temp_bviewfile))os.rename(os.path.join(c.raw_data, path_temp_bviewfile), c.path_bviewfile)", "docstring": "Inconditianilly download the URL in a temporary directory.\nWhen finished, the file is moved in the real directory.\nLike this an other process will not attempt to extract an inclomplete file.", "id": "f3641:m1"} {"signature": "def downloadURL(url, filename):", "body": "path_temp_bviewfile = os.path.join(c.raw_data, c.bview_dir, '', filename)path_bviewfile = os.path.join(c.raw_data, c.bview_dir, filename)try:f = urlopen(url)except:return Falseif f.getcode() != :publisher.warning(''.format(url, f.getcode()))return Falsetry:with open(path_temp_bviewfile, '') as outfile:outfile.write(f.read())os.rename(path_temp_bviewfile, path_bviewfile)except:os.remove(path_temp_bviewfile)return Falsereturn True", "docstring": "Inconditianilly download the URL in a temporary directory.\nWhen finished, the file is moved in the real directory.\nLike this an other process will not attempt to extract an inclomplete file.", "id": "f3643:m1"} {"signature": "def already_downloaded(filename):", "body": "cur_file = os.path.join(c.bview_dir, filename)old_file = os.path.join(c.bview_dir, '', filename)if not os.path.exists(cur_file) and not os.path.exists(old_file):return Falsereturn True", "docstring": "Verify that the file has not already been downloaded.", "id": "f3643:m2"} {"signature": "def run_splitted_processing(max_simultaneous_processes, process_name,filenames):", "body": "pids = []while len(filenames) > :while len(filenames) > and len(pids) < max_simultaneous_processes:filename = filenames.pop()pids.append(service_start(service=process_name,param=['', filename, '',imported_day]))while len(pids) == max_simultaneous_processes:time.sleep(sleep_timer)pids = update_running_pids(pids)while len(pids) > :time.sleep(sleep_timer)pids = update_running_pids(pids)", "docstring": "Run processes which push the routing dump of the RIPE in a redis\ndatabase.\nThe dump has been splitted in multiple files and each process run\non one of this files.", "id": "f3646:m3"} {"signature": "@app.route('', methods=[''])def __entry_point():", "body": "ip = request.remote_addrua = request.headers.get('', '')method = request.json.get('')if method is None:__query_logging(ip, ua, method, level='')return json.dumps({'': ''})if method not in authorized_methods:__query_logging(ip, ua, method, level='')return json.dumps({'': ''})fct = globals().get(method)if fct is None:__query_logging(ip, ua, method, level='')return json.dumps({'': ''})if request.json.get('') is None:__query_logging(ip, ua, method, level='')return json.dumps({'': ''})try:result = fct(request.json)__query_logging(ip, ua, method, request.json.get(''),request.json.get(''), request.json.get(''))return resultexcept Exception:__query_logging(ip, ua, method, request.json.get(''), level='')return json.dumps({'': ''})", "docstring": "Function called when an query is made on /json. Expects a JSON\nobject with at least a 'method' entry.", "id": "f3650:m2"} {"signature": "def compile(fmt, names=None):", "body": "if names is None:return CompiledFormat(fmt)else:return CompiledFormatDict(fmt, names)", "docstring": "Compile given format string `fmt` and return a compiled format\n object that can be used to pack and/or unpack data multiple times.\n\n Returns a :class:`~bitstruct.CompiledFormat` object if `names` is\n ``None``, and otherwise a :class:`~bitstruct.CompiledFormatDict`\n object.\n\n See :func:`~bitstruct.pack_dict()` for details on `names`.", "id": "f3652:m13"} {"signature": "def pack_into(fmt, buf, offset, *args, **kwargs):", "body": "return CompiledFormat(fmt).pack_into(buf,offset,*args,**kwargs)", "docstring": "Pack given values v1, v2, ... into given bytearray `buf`, starting\n at given bit offset `offset`. Pack according to given format\n string `fmt`. Give `fill_padding` as ``False`` to leave padding\n bits in `buf` unmodified.", "id": "f3652:m5"} {"signature": "def pack(fmt, *args):", "body": "return CompiledFormat(fmt).pack(*args)", "docstring": "Return a bytes object containing the values v1, v2, ... packed\n according to given format string `fmt`. If the total number of\n bits are not a multiple of 8, padding will be added at the end of\n the last byte.\n\n `fmt` is a string of bitorder-type-length groups, and optionally a\n byteorder identifier after the groups. Bitorder and byteorder may\n be omitted.\n\n Bitorder is either ``>`` or ``<``, where ``>`` means MSB first and\n ``<`` means LSB first. If bitorder is omitted, the previous\n values' bitorder is used for the current value. For example, in\n the format string ``'u1`` or ``<``, where ``>`` means most\n significant byte first and ``<`` means least significant byte\n first. If byteorder is omitted, most significant byte first is\n used.\n\n There are eight types; ``u``, ``s``, ``f``, ``b``, ``t``, ``r``,\n ``p`` and ``P``.\n\n - ``u`` -- unsigned integer\n - ``s`` -- signed integer\n - ``f`` -- floating point number of 16, 32, or 64 bits\n - ``b`` -- boolean\n - ``t`` -- text (ascii or utf-8)\n - ``r`` -- raw, bytes\n - ``p`` -- padding with zeros, ignore\n - ``P`` -- padding with ones, ignore\n\n Length is the number of bits to pack the value into.\n\n Example format string with default bit and byte ordering:\n ``'u1u3p7s16'``\n\n Same format string, but with least significant byte first:\n ``'u1u3p7s16<'``\n\n Same format string, but with LSB first (``<`` prefix) and least\n significant byte first (``<`` suffix): ``'if len(args) < self._number_of_arguments:raise Error(\"\".format(self._number_of_arguments,len(args)))self.pack_into_any(buf, offset, args, **kwargs)", "docstring": "See :func:`~bitstruct.pack_into()`.", "id": "f3652:c12:m3"} {"signature": "def unpack_from(fmt, data, offset=):", "body": "return CompiledFormat(fmt).unpack_from(data, offset)", "docstring": "Unpack `data` (bytes or bytearray) according to given format string\n `fmt`, starting at given bit offset `offset`. The result is a\n tuple even if it contains exactly one item.", "id": "f3652:m6"} {"signature": "def unpack_from_dict(fmt, names, data, offset=):", "body": "return CompiledFormatDict(fmt, names).unpack_from(data, offset)", "docstring": "Same as :func:`~bitstruct.unpack_from_dict()`, but returns a\n dictionary.\n\n See :func:`~bitstruct.pack_dict()` for details on `names`.", "id": "f3652:m10"} {"signature": "def make_seekable(fileobj):", "body": "if sys.version_info < (, ) and isinstance(fileobj, file):filename = fileobj.namefileobj = io.FileIO(fileobj.fileno(), closefd=False)fileobj.name = filenameassert isinstance(fileobj, io.IOBase),\"\"% type(fileobj)return fileobj if fileobj.seekable()else ArchiveTemp(fileobj)", "docstring": "If the file-object is not seekable, return ArchiveTemp of the fileobject,\notherwise return the file-object itself", "id": "f3671:m0"} {"signature": "def __init__(self, field, *byfields):", "body": "fieldstrs = []if len(byfields) == and isinstance(byfields[], type) andissubclass(byfields[], SortDirection):byfields = [byfields[](field)]for f in byfields:fieldstrs += [f.field, f.DIRSTRING]args = [field]if fieldstrs:args += [''] + fieldstrssuper(first_value, self).__init__(*args)self._field = field", "docstring": "Selects the first value of the given field within the group.\n\n### Parameter\n\n- **field**: Source field used for the value\n- **byfields**: How to sort the results. This can be either the\n *class* of `aggregation.Asc` or `aggregation.Desc` in which\n case the field `field` is also used as the sort input.\n\n `byfields` can also be one or more *instances* of `Asc` or `Desc`\n indicating the sort order for these fields", "id": "f3676:c11:m0"} {"signature": "def drop_index(self):", "body": "return self.redis.execute_command(self.DROP_CMD, self.index_name)", "docstring": "Drop the index if it exists", "id": "f3679:c5:m3"} {"signature": "def search(self, query):", "body": "args, query = self._mk_query_args(query)st = time.time()res = self.redis.execute_command(self.SEARCH_CMD, *args)return Result(res,not query._no_content,duration=(time.time() - st) * ,has_payload=query._with_payloads)", "docstring": "Search the index for a given query, and return a result of documents\n\n### Parameters\n\n- **query**: the search query. Either a text for simple queries with default parameters, or a Query object for complex queries.\n See RediSearch's documentation on query format\n- **snippet_sizes**: A dictionary of {field: snippet_size} used to trim and format the result. e.g.e {'body': 500}", "id": "f3679:c5:m10"} {"signature": "def add_document(self, doc_id, nosave=False, score=, payload=None,replace=False, partial=False, language=None, **fields):", "body": "return self._add_document(doc_id, conn=None, nosave=nosave, score=score, payload=payload, replace=replace,partial=partial, language=language, **fields)", "docstring": "Add a single document to the index.\n\n### Parameters\n\n- **doc_id**: the id of the saved document.\n- **nosave**: if set to true, we just index the document, and don't save a copy of it. This means that searches will just return ids.\n- **score**: the document ranking, between 0.0 and 1.0 \n- **payload**: optional inner-index payload we can save for fast access in scoring functions\n- **replace**: if True, and the document already is in the index, we perform an update and reindex the document\n- **partial**: if True, the fields specified will be added to the existing document.\n This has the added benefit that any fields specified with `no_index`\n will not be reindexed again. Implies `replace`\n- **language**: Specify the language used for document tokenization.\n- **fields** kwargs dictionary of the document fields to be saved and/or indexed. \n NOTE: Geo points shoule be encoded as strings of \"lon,lat\"", "id": "f3679:c5:m5"} {"signature": "def in_order(self):", "body": "self._in_order = Truereturn self", "docstring": "Match only documents where the query terms appear in the same order in the document.\ni.e. for the query 'hello world', we do not match 'world hello'", "id": "f3680:c0:m9"} {"signature": "def slop(self, slop):", "body": "self._slop = slopreturn self", "docstring": "Allow a masimum of N intervening non matched terms between phrase terms (0 means exact phrase)", "id": "f3680:c0:m8"} {"signature": "def paging(self, offset, num):", "body": "self._offset = offsetself._num = numreturn self", "docstring": "Set the paging for the query (defaults to 0..10).\n\n- **offset**: Paging offset for the results. Defaults to 0\n- **num**: How many results do we want", "id": "f3680:c0:m11"} {"signature": "def get_args(self):", "body": "args = [self._query_string]if self._no_content:args.append('')if self._fields:args.append('')args.append(len(self._fields))args += self._fieldsif self._verbatim:args.append('')if self._no_stopwords:args.append('')if self._filters:for flt in self._filters:assert isinstance(flt, Filter)args += flt.argsif self._with_payloads:args.append('')if self._ids:args.append('')args.append(len(self._ids))args += self._idsif self._slop >= :args += ['', self._slop]if self._in_order:args.append('')if self._return_fields:args.append('')args.append(len(self._return_fields))args += self._return_fieldsif self._sortby:assert isinstance(self._sortby, SortbyField)args.append('')args += self._sortby.argsif self._language:args += ['', self._language]args += self._summarize_fields + self._highlight_fieldsargs += [\"\", self._offset, self._num]return args", "docstring": "Format the redis arguments for this query and return them", "id": "f3680:c0:m10"} {"signature": "def summarize(self, fields=None, context_len=None, num_frags=None, sep=None):", "body": "args = ['']fields = self._mk_field_list(fields)if fields:args += ['', str(len(fields))] + fieldsif context_len is not None:args += ['', str(context_len)]if num_frags is not None:args += ['', str(num_frags)]if sep is not None:args += ['', sep]self._summarize_fields = argsreturn self", "docstring": "Return an abridged format of the field, containing only the segments of\nthe field which contain the matching term(s).\n\nIf `fields` is specified, then only the mentioned fields are\nsummarized; otherwise all results are summarized.\n\nServer side defaults are used for each option (except `fields`) if not specified\n\n- **fields** List of fields to summarize. All fields are summarized if not specified\n- **context_len** Amount of context to include with each fragment\n- **num_frags** Number of fragments per document\n- **sep** Separator string to separate fragments", "id": "f3680:c0:m5"} {"signature": "def add_filter(self, flt):", "body": "self._filters.append(flt)return self", "docstring": "Add a numeric or geo filter to the query. \n**Currently only one of each filter is supported by the engine**\n\n- **flt**: A NumericFilter or GeoFilter object, used on a corresponding field", "id": "f3680:c0:m17"} {"signature": "def highlight(self, fields=None, tags=None):", "body": "args = ['']fields = self._mk_field_list(fields)if fields:args += ['', str(len(fields))] + fieldsif tags:args += [''] + list(tags)self._highlight_fields = argsreturn self", "docstring": "Apply specified markup to matched term(s) within the returned field(s)\n\n- **fields** If specified then only those mentioned fields are highlighted, otherwise all fields are highlighted\n- **tags** A list of two strings to surround the match.", "id": "f3680:c0:m6"} {"signature": "def __init__(self, query_string):", "body": "self._query_string = query_stringself._offset = self._num = self._no_content = Falseself._no_stopwords = Falseself._fields = Noneself._verbatim = Falseself._with_payloads = Falseself._filters = list()self._ids = Noneself._slop = -self._in_order = Falseself._sortby = Noneself._return_fields = []self._summarize_fields = []self._highlight_fields = []self._language = None", "docstring": "Create a new query object. \nThe query string is set in the constructor, and other options have setter functions.", "id": "f3680:c0:m0"} {"signature": "def query_string(self):", "body": "return self._query_string", "docstring": "Return the query string of this query only", "id": "f3680:c0:m1"} {"signature": "def sort_by(self, field, asc=True):", "body": "self._sortby = SortbyField(field, asc)return self", "docstring": "Add a sortby field to the query\n\n- **field** - the name of the field to sort by\n- **asc** - when `True`, sorting will be done in asceding order", "id": "f3680:c0:m18"} {"signature": "def limit_ids(self, *ids):", "body": "self._ids = idsreturn self", "docstring": "Limit the results to a specific set of pre-known document ids of any length", "id": "f3680:c0:m2"} {"signature": "def lt(n):", "body": "return between(None, n, inclusive_max=False)", "docstring": "Match any value less than n", "id": "f3682:m3"} {"signature": "def delete(self, string):", "body": "return self.redis.execute_command(AutoCompleter.SUGDEL_COMMAND, self.key, string)", "docstring": "Delete a string from the AutoCompleter index.\nReturns 1 if the string was found and deleted, 0 otherwise", "id": "f3684:c2:m3"} {"signature": "def __init__(self, key, host='', port=, conn = None):", "body": "self.key = keyself.redis = conn if conn is not None else Redis(connection_pool = ConnectionPool(host=host, port=port))", "docstring": "Create a new AutoCompleter client for the given key, and optional host and port\n\nIf conn is not None, we employ an already existing redis connection", "id": "f3684:c2:m0"} {"signature": "def load(self, *fields):", "body": "self._loadfields.extend(fields)return self", "docstring": "Indicate the fields to be returned in the response. These fields are\nreturned in addition to any others implicitly specified.\n\n### Parameters\n\n- **fields**: One or more fields in the format of `@field`", "id": "f3685:c6:m1"} {"signature": "def sort_by(self, *fields, **kwargs):", "body": "self._max = kwargs.get('', )if isinstance(fields, (string_types, SortDirection)):fields = [fields]for f in fields:if isinstance(f, SortDirection):self._sortby += [f.field, f.DIRSTRING]else:self._sortby.append(f)return self", "docstring": "Indicate how the results should be sorted. This can also be used for\n*top-N* style queries\n\n### Parameters\n\n- **fields**: The fields by which to sort. This can be either a single\n field or a list of fields. If you wish to specify order, you can\n use the `Asc` or `Desc` wrapper classes.\n- **max**: Maximum number of results to return. This can be used instead\n of `LIMIT` and is also faster.\n\n\nExample of sorting by `foo` ascending and `bar` descending:\n\n```\nsort_by(Asc('@foo'), Desc('@bar'))\n```\n\nReturn the top 10 customers:\n\n```\nAggregateRequest()\\\n .group_by('@customer', r.sum('@paid').alias(FIELDNAME))\\\n .sort_by(Desc('@paid'), max=10)\n```", "id": "f3685:c6:m5"} {"signature": "@require_template_debug@register.simple_tagdef attributes(var):", "body": "attrs = get_attributes(var)pprint(attrs)return attrs", "docstring": "Given a variable in the template's context, print and return the list of\nattributes thare accessible inside of the template. For example, private\nattributes or callables that require arguments are excluded.", "id": "f3701:m3"} {"signature": "@require_template_debug@register.simple_tag(takes_context=True)def set_trace(context):", "body": "try:import ipdb as pdbexcept ImportError:import pdbprint(\"\")print(\"\")render = lambda s: template.Template(s).render(context)availables = get_variables(context)pprint(availables)print('')print('')print('')for var in availables:locals()[var] = context[var]pdb.set_trace()return ''", "docstring": "Start a pdb set_trace inside of the template with the context available as\n'context'. Uses ipdb if available.", "id": "f3701:m5"} {"signature": "def _display_details(var_data):", "body": "meta_keys = (key for key in list(var_data.keys())if key.startswith(''))for key in meta_keys:display_key = key[:].capitalize()pprint(''.format(display_key, var_data.pop(key)))pprint(var_data)", "docstring": "Given a dictionary of variable attribute data from get_details display the\ndata in the terminal.", "id": "f3701:m1"} {"signature": "def get_attributes(var):", "body": "is_valid = partial(is_valid_in_template, var)return list(filter(is_valid, dir(var)))", "docstring": "Given a varaible, return the list of attributes that are available inside\nof a template", "id": "f3703:m4"} {"signature": "def _get_detail_value(var, attr):", "body": "value = getattr(var, attr)kls = getattr(getattr(value, '', ''), '', '')if kls in ('', '', ''):return klsif callable(value):return ''return value", "docstring": "Given a variable and one of its attributes that are available inside of\na template, return its 'method' if it is a callable, its class name if it\nis a model manager, otherwise return its value", "id": "f3703:m3"} {"signature": "def get_details(var):", "body": "var_data = {}module = getattr(var, '', '')kls = getattr(getattr(var, '', ''), '', '')if module:var_data[''] = moduleif kls:var_data[''] = klsfor attr in get_attributes(var):value = _get_detail_value(var, attr)if value is not None:var_data[attr] = valuereturn var_data", "docstring": "Given a variable inside the context, obtain the attributes/callables,\ntheir values where possible, and the module name and class name if possible", "id": "f3703:m2"} {"signature": "def _flatten(iterable):", "body": "for i in iterable:if isinstance(i, Iterable) and not isinstance(i, string_types):for sub_i in _flatten(i):yield sub_ielse:yield i", "docstring": "Given an iterable with nested iterables, generate a flat iterable", "id": "f3703:m0"} {"signature": "def volcano(differential_dfs, title='', scripts_mode=\"\", data_mode=\"\",organism=\"\", q_value_column_name=\"\", log2FC_column_name=\"\",output_dir=\"\", filename=\"\", version=this_version):", "body": "output_dir = Path(output_dir)output_dir.mkdir(exist_ok=True, parents=True)if isinstance(differential_dfs, pd.DataFrame):differential_dfs = {'': differential_dfs}for name, df in differential_dfs.items():df = df[[q_value_column_name, log2FC_column_name]]df.columns = ['', '']df = df.round()_verify_differential_df(df)del differential_dfs[name]differential_dfs[_sanitize(name)] = dfnames_and_differentials = f\"\"data_block = _data_block(data_mode, [('', names_and_differentials)], output_dir, include_gene_sets=False, organism=organism)scripts = third_party_scripts + [CDN_url(version)+\"\", CDN_url(version)+\"\", CDN_url(version)+\"\"]scripts_block = _scripts_block(scripts, scripts_mode, output_dir)html = templateEnv.get_template('').render(title=title, scripts_block=scripts_block+''+data_block, organism=\"\")(output_dir / filename).write_text(html)return (output_dir / filename).resolve()", "docstring": "Arguments:\n differential_dfs (dict or pandas.DataFrame): python dict of names to pandas dataframes, or a single dataframe, indexed by gene symbols which must have columns named log2FC and qval.\n title (str): The title of the plot (to be embedded in the html).\n scripts_mode (str): Choose from [`\"CDN\"`, `\"directory\"`, `\"inline\"`]:\n\n - `\"CDN\"` compiles a single HTML page with links to scripts hosted on a CDN,\n\n - `\"directory\"` compiles a directory with all scripts locally cached,\n\n - `\"inline\"` compiles a single HTML file with all scripts/styles inlined.\n\n data_mode (str): Choose from [\"directory\", \"inline\"]:\n\n - \"directory\" compiles a directory with all data locally cached,\n\n - \"inline\" compiles a single HTML file with all data inlined.\n\n organism (str): `\"human\"` or `\"mouse\"`\n q_value_column_name (str):\n log2FC_column_name (str):\n output_dir (str): the directory in which to output the file\n filename (str): the filename of the output file\n version (str): the version of the javascripts to use.\n Leave the default to pin the version, or choose \"latest\" to get updates,\n or choose part of the version string to get minor updates.\nReturns:\n Path: The filepath which the html was outputted to.", "id": "f3712:m9"} {"signature": "def heatmap(genes_by_samples_matrix, sample_attributes, title='', scripts_mode=\"\", data_mode=\"\",organism=\"\", separate_zscore_by=[\"\"],output_dir=\"\", filename=\"\", version=this_version):", "body": "output_dir = Path(output_dir)output_dir.mkdir(exist_ok=True, parents=True)_verify_sample_by_genes_matrix(genes_by_samples_matrix)_verify_sample_attributes(genes_by_samples_matrix, sample_attributes)genes_by_samples_matrix = genes_by_samples_matrix.round()matrix = f\"\"classes = f\"\"data_block = _data_block(data_mode, [('', matrix), ('', classes)], output_dir, organism=organism)scripts = third_party_scripts + [CDN_url(version)+\"\", CDN_url(version)+\"\", CDN_url(version)+\"\"]scripts_block = _scripts_block(scripts, scripts_mode, output_dir)html = templateEnv.get_template('').render(title=title, scripts_block=scripts_block+''+data_block, separate_zscore_by=separate_zscore_by)(output_dir / filename).write_text(html)return (output_dir / filename).resolve()", "docstring": "Arguments:\n genes_by_samples_matrix (pandas.DataFrame): dataframe indexed by genes, columns are samples\n sample_attributes (pandas.DataFrame): dataframe indexed by samples, columns are sample attributes (e.g. classes)\n title (str): The title of the plot (to be embedded in the html).\n scripts_mode (str): Choose from [`\"CDN\"`, `\"directory\"`, `\"inline\"`]:\n\n - `\"CDN\"` compiles a single HTML page with links to scripts hosted on a CDN,\n\n - `\"directory\"` compiles a directory with all scripts locally cached,\n\n - `\"inline\"` compiles a single HTML file with all scripts/styles inlined.\n\n data_mode (str): Choose from [\"directory\", \"inline\"]:\n\n - \"directory\" compiles a directory with all data locally cached,\n\n - \"inline\" compiles a single HTML file with all data inlined.\n\n organism (str): `\"human\"` or `\"mouse\"`\n separate_zscore_by (list):\n output_dir (str): the directory in which to output the file\n filename (str): the filename of the output file\n version (str): the version of the javascripts to use.\n Leave the default to pin the version, or choose \"latest\" to get updates,\n or choose part of the version string to get minor updates.\nReturns:\n Path: The filepath which the html was outputted to.", "id": "f3712:m12"} {"signature": "def graph(networkx_graph, title='', scripts_mode=\"\", data_mode=\"\",output_dir=\"\", filename=\"\", version=this_version):", "body": "output_dir = Path(output_dir)output_dir.mkdir(exist_ok=True, parents=True)scripts = third_party_scripts + [CDN_url(version)+\"\", CDN_url(version)+\"\"]scripts_block = _scripts_block(scripts, scripts_mode, output_dir)graph_json = nx_json.node_link_data(networkx_graph)for node in graph_json['']:for attr, val in node.items():if isinstance(val, numbers.Number):node[attr] = round(val, )for link in graph_json['']:for attr, val in link.items():if isinstance(val, numbers.Number):link[attr] = round(val, )graph_json = f\"\"data_block = _data_block(data_mode, [('', graph_json)], output_dir)html = templateEnv.get_template('').render(title=title, scripts_block=scripts_block+''+data_block, nodes=networkx_graph.nodes())(output_dir / filename).write_text(html)return (output_dir / filename).resolve()", "docstring": "Arguments:\n networkx_graph (networkx.Graph): any instance of networkx.Graph\n title (str): The title of the plot (to be embedded in the html).\n scripts_mode (str): Choose from [`\"CDN\"`, `\"directory\"`, `\"inline\"`]:\n\n - `\"CDN\"` compiles a single HTML page with links to scripts hosted on a CDN,\n\n - `\"directory\"` compiles a directory with all scripts locally cached,\n\n - `\"inline\"` compiles a single HTML file with all scripts/styles inlined.\n\n data_mode (str): Choose from [\"directory\", \"inline\"]:\n\n - \"directory\" compiles a directory with all data locally cached,\n\n - \"inline\" compiles a single HTML file with all data inlined.\n\n output_dir (str): the directory in which to output the file\n filename (str): the filename of the output file\n version (str): the version of the javascripts to use.\n Leave the default to pin the version, or choose \"latest\" to get updates,\n or choose part of the version string to get minor updates.\nReturns:\n Path: The filepath which the html was outputted to.", "id": "f3712:m13"} {"signature": "def version(self):", "body": "ver = Version()ver.conn = self.connver.attrs = {'': self.attrs[''],}ver.save()return ver", "docstring": "Create a new version under this service.", "id": "f3722:c1:m2"} {"signature": "def vcl(self, name, content):", "body": "vcl = VCL()vcl.conn = self.connvcl.attrs = {'': self.attrs[''],'': self.attrs[''],'': name,'': content,}vcl.save()return vcl", "docstring": "Create a new VCL under this version.", "id": "f3722:c2:m8"} {"signature": "def update(dst, src):", "body": "stack = [(dst, src)]def isdict(o):return hasattr(o, '')while stack:current_dst, current_src = stack.pop()for key in current_src:if key not in current_dst:current_dst[key] = current_src[key]else:if isdict(current_src[key]) and isdict(current_dst[key]):stack.append((current_dst[key], current_src[key]))else:current_dst[key] = current_src[key]return dst", "docstring": "Recursively update the destination dict-like object with the source dict-like object.\n\n Useful for merging options and Bunches together!\n\n Based on:\n http://code.activestate.com/recipes/499335-recursively-update-a-dictionary-without-hitting-py/#c1", "id": "f3724:m6"} {"signature": "def rmDirPatterns(*patterns, **kwargs):", "body": "kwargs[''] = ''kwargs[''] = ''return _walkWithAction(*patterns, **kwargs)", "docstring": "Remove all directories under the current path with the given patterns.", "id": "f3724:m3"} {"signature": "def pip_install(*args):", "body": "download_cache = ('' % options.paved.pip.download_cache) if options.paved.pip.download_cache else ''shv('' % (download_cache, ''.join(args)))", "docstring": "Send the given arguments to `pip install`.", "id": "f3724:m7"} {"signature": "@taskdef shell(info):", "body": "cmd = ''try:import django_extensionscmd = ''except ImportError:info(\"\")call_manage(cmd)", "docstring": "Run the ipython shell. Shorthand for `paver manage shell`.\n\n Uses `django_extensions `, if\n available, to provide `shell_plus`.", "id": "f3725:m4"} {"signature": "@task@consume_argsdef schema(args):", "body": "try:import southcmd = args and '' % ''.join(options.args) or ''call_manage(cmd)except ImportError:error('')", "docstring": "Run South's schemamigration command.", "id": "f3725:m6"} {"signature": "@taskdef pychecker():", "body": "packages = [x for x in options.setup.packages if '' not in x]sh(''.format(param=options.paved.pycheck.pychecker.param, files=''.join(packages)))", "docstring": "check of python programs by pychecker.\n\n requirements:\n - pychecker_ should be installed.\n\n options.paved.pycheck.pychecker.param\n\n .. _pychecker: http://pychecker.sourceforge.net/", "id": "f3726:m3"} {"signature": "@taskdef findimports():", "body": "packages = [x for x in options.setup.packages if '' not in x]sh(''.format(param=options.paved.pycheck.findimports.param, files=''.join(packages)))", "docstring": "print python module dependencies by findimports.\n\n requirements:\n - findimports_ should be installed. ``easy_install findimports``\n\n options.paved.pycheck.findimports.param\n\n .. _findimports: http://pypi.python.org/pypi/findimports", "id": "f3726:m1"} {"signature": "@taskdef nose():", "body": "sh(''.format(param=options.paved.pycheck.nose.param))", "docstring": "Run unit tests using nosetests.\n\n requirements:\n - nose_ should be installed.\n\n options.paved.pycheck.nose.param\n\n .. _nose: http://somethingaboutorange.com/mrl/projects/nose/1.0.0/", "id": "f3726:m4"} {"signature": "@task@needs('', '', '', '', '')def pycheckall():", "body": "", "docstring": "All pycheck tasks.", "id": "f3726:m5"} {"signature": "@task@needs('')def clean(options, info):", "body": "info(\"\", options.paved.clean.patterns)for wd in options.paved.clean.dirs:info(\"\", wd)for p in options.paved.clean.patterns:for f in wd.walkfiles(p):f.remove()", "docstring": "Clean up extra files littering the source tree.\n\n options.paved.clean.dirs: directories to search recursively\n options.paved.clean.patterns: patterns to search for and remove", "id": "f3727:m0"} {"signature": "@taskdef printoptions():", "body": "x = json.dumps(environment.options,indent=,sort_keys=True,skipkeys=True,cls=MyEncoder)print(x)", "docstring": "print paver options.\n\n Prettified by json.\n `long_description` is removed", "id": "f3727:m1"} {"signature": "def open_s3(bucket):", "body": "conn = boto.connect_s3(options.paved.s3.access_id, options.paved.s3.secret)try:bucket = conn.get_bucket(bucket)except boto.exception.S3ResponseError:bucket = conn.create_bucket(bucket)return bucket", "docstring": "Opens connection to S3 returning bucket and key", "id": "f3729:m0"} {"signature": "def download_s3(bucket_name, file_key, file_path, force=False):", "body": "file_path = path(file_path)bucket = open_s3(bucket_name)file_dir = file_path.dirname()file_dir.makedirs()s3_key = bucket.get_key(file_key)if file_path.exists():file_data = file_path.bytes()file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())try:s3_md5 = s3_key.etag.replace('', '')except KeyError:passelse:if s3_md5 == file_md5:info('' % file_path)returnelif not force:s3_datetime = datetime.datetime(*time.strptime(s3_key.last_modified, '')[:])local_datetime = datetime.datetime.utcfromtimestamp(file_path.stat().st_mtime)if s3_datetime < local_datetime:info(\"\" % (file_key))returninfo(\"\" % (file_key))try:with open(file_path, '') as fo:s3_key.get_contents_to_file(fo)except Exception as e:error(\"\" % e)raise", "docstring": "Download a remote file from S3.", "id": "f3729:m2"} {"signature": "def sphinx_make(*targets):", "body": "sh('' % ''.join(targets), cwd=options.paved.docs.path)", "docstring": "Call the Sphinx Makefile with the specified targets.\n\n `options.paved.docs.path`: the path to the Sphinx folder (where the Makefile resides).", "id": "f3730:m0"} {"signature": "@taskdef docs():", "body": "sphinx_make(*options.paved.docs.targets)", "docstring": "Make Sphinx docs.\n\n `options.paved.docs.path`: the path to the Sphinx folder (where the Makefile resides).\n\n `options.paved.docs.targets`: the Make targets to send to `sphinx_make`. Default is `html`.", "id": "f3730:m1"} {"signature": "@task@needs('')def rsync_docs():", "body": "assert options.paved.docs.rsync_location, \"\"sh('' % (path(options.paved.docs.path) / options.paved.docs.build_rel,options.paved.docs.rsync_location))", "docstring": "Upload the docs to a remote location via rsync.\n\n `options.paved.docs.rsync_location`: the target location to rsync files to.\n\n `options.paved.docs.path`: the path to the Sphinx folder (where the Makefile resides).\n\n `options.paved.docs.build_rel`: the path of the documentation\n build folder, relative to `options.paved.docs.path`.", "id": "f3730:m3"} {"signature": "@task@consume_argsdef pip_install(args):", "body": "util.pip_install(*args)", "docstring": "Send the given arguments to `pip install`.", "id": "f3732:m0"} {"signature": "@taskdef easy_install(args):", "body": "util.easy_install(*args)", "docstring": "Send the given arguments to `easy_install`.", "id": "f3732:m1"} {"signature": "def validate(self, messages):", "body": "messages = self.validate_creators(messages)messages = self.validate_created(messages)return messages", "docstring": "Returns True if the fields are valid according to the SPDX standard.\n Appends user friendly messages to the messages parameter.", "id": "f3735:c4:m6"} {"signature": "def load_license_list(file_name):", "body": "licenses_map = {}with codecs.open(file_name, '', encoding='') as lics:licenses = json.load(lics)version = licenses[''].split('')for lic in licenses['']:if lic.get(''):continuename = lic['']identifier = lic['']licenses_map[name] = identifierlicenses_map[identifier] = namereturn version, licenses_map", "docstring": "Return the licenses list version tuple and a mapping of licenses\nname->id and id->name loaded from a JSON file\nfrom https://github.com/spdx/license-list-data", "id": "f3736:m0"} {"signature": "def _add_parens(required, text):", "body": "return ''.format(text) if required else text", "docstring": "Add parens around a license expression if `required` is True, otherwise\nreturn `text` unmodified.", "id": "f3738:m0"} {"signature": "def validate(self, messages):", "body": "messages = self.validate_version(messages)messages = self.validate_data_lics(messages)messages = self.validate_name(messages)messages = self.validate_spdx_id(messages)messages = self.validate_namespace(messages)messages = self.validate_ext_document_references(messages)messages = self.validate_creation_info(messages)messages = self.validate_package(messages)messages = self.validate_extracted_licenses(messages)messages = self.validate_reviews(messages)return messages", "docstring": "Validate all fields of the document and update the\nmessages list with user friendly error messages for display.", "id": "f3738:c5:m8"} {"signature": "@classmethoddef from_identifier(cls, identifier):", "body": "if identifier in config.LICENSE_MAP.keys():return cls(config.LICENSE_MAP[identifier], identifier)else:return cls(identifier, identifier)", "docstring": "If identifier exists in config.LICENSE_MAP\n the full_name is retrieved from it. Otherwise\n the full_name is the same as the identifier.", "id": "f3738:c1:m1"} {"signature": "@classmethoddef from_full_name(cls, full_name):", "body": "if full_name in config.LICENSE_MAP.keys():return cls(full_name, config.LICENSE_MAP[full_name])else:return cls(full_name, full_name)", "docstring": "Returna new License for a full_name. If the full_name exists in\nconfig.LICENSE_MAP the identifier is retrieved from it.\nOtherwise the identifier is the same as the full_name.", "id": "f3738:c1:m2"} {"signature": "def validate(self, messages):", "body": "messages = self.validate_ext_doc_id(messages)messages = self.validate_spdx_doc_uri(messages)messages = self.validate_checksum(messages)return messages", "docstring": "Validate all fields of the ExternalDocumentRef class and update the\nmessages list with user friendly error messages for display.", "id": "f3738:c0:m3"} {"signature": "def tv_to_rdf(infile_name, outfile_name):", "body": "parser = Parser(Builder(), StandardLogger())parser.build()with open(infile_name) as infile:data = infile.read()document, error = parser.parse(data)if not error:with open(outfile_name, mode='') as outfile:write_document(document, outfile)return Trueelse:print('')messages = []document.validate(messages)print(''.join(messages))return False", "docstring": "Convert a SPDX file from tag/value format to RDF format.\nReturn True on sucess, False otherwise.", "id": "f3740:m0"} {"signature": "def create_doc(self):", "body": "doc_node = URIRef('')self.graph.add((doc_node, RDF.type, self.spdx_namespace.SpdxDocument))vers_literal = Literal(str(self.document.version))self.graph.add((doc_node, self.spdx_namespace.specVersion, vers_literal))data_lics = URIRef(self.document.data_license.url)self.graph.add((doc_node, self.spdx_namespace.dataLicense, data_lics))doc_name = URIRef(self.document.name)self.graph.add((doc_node, self.spdx_namespace.name, doc_name))return doc_node", "docstring": "Add and return the root document node to graph.", "id": "f3742:c8:m1"} {"signature": "def __init__(self, document, out):", "body": "super(Writer, self).__init__(document, out)", "docstring": "- document is spdx.document instance that will be written.\n- out is a file-like object that will be written to.", "id": "f3742:c8:m0"} {"signature": "def add_file_dependencies(self):", "body": "for doc_file in self.document.files:self.add_file_dependencies_helper(doc_file)", "docstring": "Add file dependencies to the graph.\nCalled after all files have been added.", "id": "f3742:c2:m4"} {"signature": "def create_conjunction_node(self, conjunction):", "body": "node = BNode()type_triple = (node, RDF.type, self.spdx_namespace.ConjunctiveLicenseSet)self.graph.add(type_triple)licenses = self.licenses_from_tree(conjunction)for lic in licenses:member_triple = (node, self.spdx_namespace.member, lic)self.graph.add(member_triple)return node", "docstring": "Return a node representing a conjunction of licenses.", "id": "f3742:c1:m3"} {"signature": "def create_extracted_license(self, lic):", "body": "licenses = list(self.graph.triples((None, self.spdx_namespace.licenseId, lic.identifier)))if len(licenses) != :return licenses[][] else:license_node = BNode()type_triple = (license_node, RDF.type, self.spdx_namespace.ExtractedLicensingInfo)self.graph.add(type_triple)ident_triple = (license_node, self.spdx_namespace.licenseId, Literal(lic.identifier))self.graph.add(ident_triple)text_triple = (license_node, self.spdx_namespace.extractedText, Literal(lic.text))self.graph.add(text_triple)if lic.full_name is not None:name_triple = (license_node, self.spdx_namespace.licenseName, self.to_special_value(lic.full_name))self.graph.add(name_triple)for ref in lic.cross_ref:triple = (license_node, RDFS.seeAlso, URIRef(ref))self.graph.add(triple)if lic.comment is not None:comment_triple = (license_node, RDFS.comment, Literal(lic.comment))self.graph.add(comment_triple)return license_node", "docstring": "Handle extracted license.\nReturn the license node.", "id": "f3742:c1:m6"} {"signature": "def create_review_node(self, review):", "body": "review_node = BNode()type_triple = (review_node, RDF.type, self.spdx_namespace.Review)self.graph.add(type_triple)reviewer_node = Literal(review.reviewer.to_value())self.graph.add((review_node, self.spdx_namespace.reviewer, reviewer_node))reviewed_date_node = Literal(review.review_date_iso_format)reviewed_triple = (review_node, self.spdx_namespace.reviewDate, reviewed_date_node)self.graph.add(reviewed_triple)if review.has_comment:comment_node = Literal(review.comment)comment_triple = (review_node, RDFS.comment, comment_node)self.graph.add(comment_triple)return review_node", "docstring": "Return a review node.", "id": "f3742:c3:m1"} {"signature": "def handle_package_has_file_helper(self, pkg_file):", "body": "nodes = list(self.graph.triples((None, self.spdx_namespace.fileName, Literal(pkg_file.name))))if len(nodes) == :return nodes[][]else:raise InvalidDocumentError('' +''.format(pkg_file.name))", "docstring": "Return node representing pkg_file\npkg_file should be instance of spdx.file.", "id": "f3742:c7:m6"} {"signature": "def reviews(self):", "body": "return map(self.create_review_node, self.document.reviews)", "docstring": "Returns a list of review nodes", "id": "f3742:c3:m2"} {"signature": "def to_special_value(self, value):", "body": "if isinstance(value, utils.NoAssert):return self.spdx_namespace.noassertionelif isinstance(value, utils.SPDXNone):return self.spdx_namespace.noneelse:return Literal(value)", "docstring": "Return proper spdx term or Literal", "id": "f3742:c0:m2"} {"signature": "def write_document(document, out, validate=True):", "body": "if validate:messages = []messages = document.validate(messages)if messages:raise InvalidDocumentError(messages)writer = Writer(document, out)writer.write()", "docstring": "Write an SPDX RDF document.\n- document - spdx.document instance.\n- out - file like object that will be written to.\nOptionally `validate` the document before writing and raise\nInvalidDocumentError if document.validate returns False.", "id": "f3742:m0"} {"signature": "def files(self):", "body": "return map(self.create_file_node, self.document.files)", "docstring": "Return list of file nodes.", "id": "f3742:c2:m2"} {"signature": "def creators(self):", "body": "return map(lambda c: Literal(c.to_value()), self.document.creation_info.creators)", "docstring": "Return a list of creator nodes.\nNote: Does not add anything to the graph.", "id": "f3742:c5:m1"} {"signature": "def create_file_node(self, doc_file):", "body": "file_node = URIRef(''.format(id=str(doc_file.spdx_id)))type_triple = (file_node, RDF.type, self.spdx_namespace.File)self.graph.add(type_triple)name_triple = (file_node, self.spdx_namespace.fileName, Literal(doc_file.name))self.graph.add(name_triple)if doc_file.has_optional_field(''):comment_triple = (file_node, RDFS.comment, Literal(doc_file.comment))self.graph.add(comment_triple)if doc_file.has_optional_field(''):ftype = self.spdx_namespace[self.FILE_TYPES[doc_file.type]]ftype_triple = (file_node, self.spdx_namespace.fileType, ftype)self.graph.add(ftype_triple)self.graph.add((file_node, self.spdx_namespace.checksum, self.create_checksum_node(doc_file.chk_sum)))conc_lic_node = self.license_or_special(doc_file.conc_lics)conc_lic_triple = (file_node, self.spdx_namespace.licenseConcluded, conc_lic_node)self.graph.add(conc_lic_triple)license_info_nodes = map(self.license_or_special, doc_file.licenses_in_file)for lic in license_info_nodes:triple = (file_node, self.spdx_namespace.licenseInfoInFile, lic)self.graph.add(triple)if doc_file.has_optional_field(''):comment_triple = (file_node, self.spdx_namespace.licenseComments, Literal(doc_file.license_comment))self.graph.add(comment_triple)cr_text_node = self.to_special_value(doc_file.copyright)cr_text_triple = (file_node, self.spdx_namespace.copyrightText, cr_text_node)self.graph.add(cr_text_triple)if doc_file.has_optional_field(''):notice_triple = (file_node, self.spdx_namespace.noticeText, doc_file.notice)self.graph.add(notice_triple)contrib_nodes = map(lambda c: Literal(c), doc_file.contributors)contrib_triples = [(file_node, self.spdx_namespace.fileContributor, node) for node in contrib_nodes]for triple in contrib_triples:self.graph.add(triple)return file_node", "docstring": "Create a node for spdx.file.", "id": "f3742:c2:m1"} {"signature": "def add_file_dependencies_helper(self, doc_file):", "body": "subj_triples = list(self.graph.triples((None, self.spdx_namespace.fileName, Literal(doc_file.name))))if len(subj_triples) != :raise InvalidDocumentError(''.format(doc_file.name))subject_node = subj_triples[][]for dependency in doc_file.dependencies:dep_triples = list(self.graph.triples((None, self.spdx_namespace.fileName, Literal(dependency))))if len(dep_triples) == :dep_node = dep_triples[][]dep_triple = (subject_node, self.spdx_namespace.fileDependency, dep_node)self.graph.add(dep_triple)else:print(''.format(doc_file.name, dependency))", "docstring": "Handle dependencies for a single file.\n- doc_file - instance of spdx.file.File.", "id": "f3742:c2:m3"} {"signature": "def write_package(package, out):", "body": "out.write('')write_value('', package.name, out)if package.has_optional_field(''):write_value('', package.version, out)write_value('', package.download_location, out)if package.has_optional_field(''):write_text_value('', package.summary, out)if package.has_optional_field(''):write_text_value('', package.source_info, out)if package.has_optional_field(''):write_value('', package.file_name, out)if package.has_optional_field(''):write_value('', package.supplier, out)if package.has_optional_field(''):write_value('', package.originator, out)if package.has_optional_field(''):write_value('', package.check_sum.to_tv(), out)write_value('', format_verif_code(package), out)if package.has_optional_field(''):write_text_value('', package.description, out)if isinstance(package.license_declared, (document.LicenseConjunction,document.LicenseDisjunction)):write_value('', u''.format(package.license_declared), out)else:write_value('', package.license_declared, out)if isinstance(package.conc_lics, (document.LicenseConjunction,document.LicenseDisjunction)):write_value('', u''.format(package.conc_lics), out)else:write_value('', package.conc_lics, out)for lics in sorted(package.licenses_from_files):write_value('', lics, out)if package.has_optional_field(''):write_text_value('', package.license_comment, out)if isinstance(package.cr_text, six.string_types):write_text_value('', package.cr_text, out)else:write_value('', package.cr_text, out)if package.has_optional_field(''):write_value('', package.homepage, out)for spdx_file in sorted(package.files):write_separators(out)write_file(spdx_file, out)", "docstring": "Write a package fields to out.", "id": "f3743:m9"} {"signature": "def write_extracted_licenses(lics, out):", "body": "write_value('', lics.identifier, out)if lics.full_name is not None:write_value('', lics.full_name, out)if lics.comment is not None:write_text_value('', lics.comment, out)for xref in sorted(lics.cross_ref):write_value('', xref, out)write_text_value('', lics.text, out)", "docstring": "Write extracted licenses fields to out.", "id": "f3743:m10"} {"signature": "def write_review(review, out):", "body": "out.write('')write_value('', review.reviewer, out)write_value('', review.review_date_iso_format, out)if review.has_comment:write_text_value('', review.comment, out)", "docstring": "Write the fields of a single review to out.", "id": "f3743:m5"} {"signature": "def write_annotation(annotation, out):", "body": "out.write('')write_value('', annotation.annotator, out)write_value('', annotation.annotation_date_iso_format, out)if annotation.has_comment:write_text_value('', annotation.comment, out)write_value('', annotation.annotation_type, out)write_value('', annotation.spdx_id, out)", "docstring": "Write the fields of a single annotation to out.", "id": "f3743:m6"} {"signature": "def add_artifact(self, symbol, value):", "body": "symbol = ''.format(symbol)artifact = getattr(self, symbol)artifact.append(value)", "docstring": "Add value as artifact_of_project{symbol}.", "id": "f3745:c1:m6"} {"signature": "def validate_optional_str_fields(self, messages):", "body": "FIELDS = ['','','','','','']messages = self.validate_str_fields(FIELDS, True, messages)return messages", "docstring": "Fields marked as optional and of type string in class\n docstring must be of a type that provides __str__ method.", "id": "f3746:c0:m8"} {"signature": "def validate_str_fields(self, fields, optional, messages):", "body": "for field_str in fields:field = getattr(self, field_str)if field is not None:attr = getattr(field, '', None)if not callable(attr):messages = messages + [''.format(field)]elif not optional:messages = messages + [''.format(field_str)]return messages", "docstring": "Helper for validate_mandatory_str_field and\n validate_optional_str_fields", "id": "f3746:c0:m10"} {"signature": "def set_file_license_comment(self, doc, text):", "body": "if self.has_package(doc) and self.has_file(doc):if not self.file_license_comment_set:self.file_license_comment_set = Trueself.file(doc).license_comment = textreturn Trueelse:raise CardinalityError('')else:raise OrderError('')", "docstring": "Raises OrderError if no package or file defined.\nRaises CardinalityError if more than one per file.", "id": "f3750:c5:m2"} {"signature": "def reset_document(self):", "body": "self.doc_version_set = Falseself.doc_comment_set = Falseself.doc_namespace_set = Falseself.doc_data_lics_set = Falseself.doc_name_set = Falseself.doc_spdx_id_set = False", "docstring": "Reset the internal state to allow building new document", "id": "f3750:c0:m7"} {"signature": "def set_pkg_source_info(self, doc, text):", "body": "self.assert_package_exists()if not self.package_source_info_set:self.package_source_info_set = Truedoc.package.source_info = textreturn Trueelse:raise CardinalityError('')", "docstring": "Sets the package's source information, if not already set.\n text - Free form text.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.", "id": "f3750:c4:m2"} {"signature": "def set_doc_comment(self, doc, comment):", "body": "if not self.doc_comment_set:self.doc_comment_set = Truedoc.comment = commentelse:raise CardinalityError('')", "docstring": "Sets document comment, Raises CardinalityError if\n comment already set.", "id": "f3750:c0:m5"} {"signature": "def set_pkg_chk_sum(self, doc, chk_sum):", "body": "self.assert_package_exists()if not self.package_chk_sum_set:self.package_chk_sum_set = Truedoc.package.check_sum = checksum.Algorithm('', chk_sum)else:raise CardinalityError('')", "docstring": "Sets the package check sum, if not already set.\n chk_sum - A string\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.", "id": "f3750:c4:m1"} {"signature": "def set_pkg_cr_text(self, doc, text):", "body": "self.assert_package_exists()if not self.package_cr_text_set:self.package_cr_text_set = Truedoc.package.cr_text = textelse:raise CardinalityError('')", "docstring": "Sets the package's license comment.\n Raises OrderError if no package previously defined.\n Raises CardinalityError if already set.", "id": "f3750:c4:m6"} {"signature": "def reset(self):", "body": "self.reset_creation_info()self.reset_document()self.reset_package()self.reset_file_stat()self.reset_reviews()self.reset_annotations()", "docstring": "Resets builder's state for building new documents.\n Must be called between usage with different documents.", "id": "f3750:c8:m1"} {"signature": "def set_chksum(self, doc, chk_sum):", "body": "if chk_sum:doc.ext_document_references[-].check_sum = checksum.Algorithm('', chk_sum)else:raise SPDXValueError('')", "docstring": "Sets the external document reference's check sum, if not already set.\nchk_sum - The checksum value in the form of a string.", "id": "f3750:c1:m0"} {"signature": "def set_file_notice(self, doc, text):", "body": "if self.has_package(doc) and self.has_file(doc):if not self.file_notice_set:self.file_notice_set = Trueself.file(doc).notice = tagvaluebuilders.str_from_text(text)return Trueelse:raise CardinalityError('')else:raise OrderError('')", "docstring": "Raises OrderError if no package or file defined.\n Raises CardinalityError if more than one.", "id": "f3750:c5:m5"} {"signature": "def set_pkg_desc(self, doc, text):", "body": "self.assert_package_exists()if not self.package_desc_set:self.package_desc_set = Truedoc.package.description = textelse:raise CardinalityError('')", "docstring": "Set's the package's description.\n Raises CardinalityError if description already set.\n Raises OrderError if no package previously defined.", "id": "f3750:c4:m8"} {"signature": "def set_pkg_verif_code(self, doc, code):", "body": "self.assert_package_exists()if not self.package_verif_set:self.package_verif_set = Truedoc.package.verif_code = codeelse:raise CardinalityError('')", "docstring": "Sets the package verification code, if not already set.\n code - A string.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.", "id": "f3750:c4:m3"} {"signature": "def get_extr_lics_comment(self, extr_lics):", "body": "comment_list = list(self.graph.triples((extr_lics, RDFS.comment, None)))if len(comment_list) > :self.more_than_one_error('')returnelif len(comment_list) == :return comment_list[][]else:return", "docstring": "Return license comment or None.", "id": "f3751:c1:m6"} {"signature": "def p_file_notice(self, f_term, predicate):", "body": "try:for _, _, notice in self.graph.triples((f_term, predicate, None)):self.builder.set_file_notice(self.doc, six.text_type(notice))except CardinalityError:self.more_than_one_error('')", "docstring": "Sets file notice text.", "id": "f3751:c3:m5"} {"signature": "def get_annotation_date(self, r_term):", "body": "annotation_date_list = list(self.graph.triples((r_term, self.spdx_namespace[''], None)))if len(annotation_date_list) != :self.error = Truemsg = ''self.logger.log(msg)returnreturn six.text_type(annotation_date_list[][])", "docstring": "Returns annotation date or None if not found.\n Reports error on failure.\n Note does not check value format.", "id": "f3751:c5:m4"} {"signature": "def p_file_comment(self, f_term, predicate):", "body": "try:for _, _, comment in self.graph.triples((f_term, predicate, None)):self.builder.set_file_comment(self.doc, six.text_type(comment))except CardinalityError:self.more_than_one_error('')", "docstring": "Sets file comment text.", "id": "f3751:c3:m6"} {"signature": "def p_file_lic_info(self, f_term, predicate):", "body": "for _, _, info in self.graph.triples((f_term, predicate, None)):lic = self.handle_lics(info)if lic is not None:self.builder.set_file_license_in_file(self.doc, lic)", "docstring": "Sets file license information.", "id": "f3751:c3:m11"} {"signature": "def get_file_name(self, f_term):", "body": "for _, _, name in self.graph.triples((f_term, self.spdx_namespace[''], None)):return namereturn", "docstring": "Returns first found fileName property or None if not found.", "id": "f3751:c3:m2"} {"signature": "def p_file_contributor(self, f_term, predicate):", "body": "for _, _, contributor in self.graph.triples((f_term, predicate, None)):self.builder.add_file_contribution(self.doc, six.text_type(contributor))", "docstring": "Parse all file contributors and adds them to the model.", "id": "f3751:c3:m4"} {"signature": "def parse_only_extr_license(self, extr_lic):", "body": "ident = self.get_extr_license_ident(extr_lic)text = self.get_extr_license_text(extr_lic)comment = self.get_extr_lics_comment(extr_lic)xrefs = self.get_extr_lics_xref(extr_lic)name = self.get_extr_lic_name(extr_lic)if not ident:returnlic = document.ExtractedLicense(ident)if text is not None:lic.text = textif name is not None:lic.full_name = nameif comment is not None:lic.comment = commentlic.cross_ref = map(lambda x: six.text_type(x), xrefs)return lic", "docstring": "Return an ExtractedLicense object to represent a license object.\nBut does not add it to the SPDXDocument model.\nReturn None if failed.", "id": "f3751:c1:m7"} {"signature": "def handle_pkg_lic(self, p_term, predicate, builder_func):", "body": "try:for _, _, licenses in self.graph.triples((p_term, predicate, None)):if (licenses, RDF.type, self.spdx_namespace['']) in self.graph:lics = self.handle_conjunctive_list(licenses)builder_func(self.doc, lics)elif (licenses, RDF.type, self.spdx_namespace['']) in self.graph:lics = self.handle_disjunctive_list(licenses)builder_func(self.doc, lics)else:try:lics = self.handle_lics(licenses)builder_func(self.doc, lics)except SPDXValueError:self.value_error('', licenses)except CardinalityError:self.more_than_one_error(''.format(predicate))", "docstring": "Handles package lics concluded or declared.", "id": "f3751:c2:m8"} {"signature": "def p_file_comments_on_lics(self, f_term, predicate):", "body": "try:for _, _, comment in self.graph.triples((f_term, predicate, None)):self.builder.set_file_license_comment(self.doc, six.text_type(comment))except CardinalityError:self.more_than_one_error('')", "docstring": "Sets file license comment.", "id": "f3751:c3:m10"} {"signature": "def p_file_lic_conc(self, f_term, predicate):", "body": "try:for _, _, licenses in self.graph.triples((f_term, predicate, None)):if (licenses, RDF.type, self.spdx_namespace['']) in self.graph:lics = self.handle_conjunctive_list(licenses)self.builder.set_concluded_license(self.doc, lics)elif (licenses, RDF.type, self.spdx_namespace['']) in self.graph:lics = self.handle_disjunctive_list(licenses)self.builder.set_concluded_license(self.doc, lics)else:try:lics = self.handle_lics(licenses)self.builder.set_concluded_license(self.doc, lics)except SPDXValueError:self.value_error('', licenses)except CardinalityError:self.more_than_one_error(''.format(predicate))", "docstring": "Sets file licenses concluded.", "id": "f3751:c3:m15"} {"signature": "def get_annotation_type(self, r_term):", "body": "for _, _, typ in self.graph.triples((r_term, self.spdx_namespace[''], None)):if typ is not None:return typelse:self.error = Truemsg = ''self.logger.log(msg)return", "docstring": "Returns annotation type or None if found none or more than one.\n Reports errors on failure.", "id": "f3751:c5:m2"} {"signature": "def get_review_date(self, r_term):", "body": "reviewed_list = list(self.graph.triples((r_term, self.spdx_namespace[''], None)))if len(reviewed_list) != :self.error = Truemsg = ''self.logger.log(msg)returnreturn six.text_type(reviewed_list[][])", "docstring": "Returns review date or None if not found.\n Reports error on failure.\n Note does not check value format.", "id": "f3751:c4:m3"} {"signature": "def get_extr_lic_name(self, extr_lic):", "body": "extr_name_list = list(self.graph.triples((extr_lic, self.spdx_namespace[''], None)))if len(extr_name_list) > :self.more_than_one_error('')returnelif len(extr_name_list) == :returnreturn self.to_special_value(extr_name_list[][])", "docstring": "Return the license name from an ExtractedLicense or None", "id": "f3751:c1:m4"} {"signature": "def parse_creation_info(self, ci_term):", "body": "for _s, _p, o in self.graph.triples((ci_term, self.spdx_namespace[''], None)):try:ent = self.builder.create_entity(self.doc, six.text_type(o))self.builder.add_creator(self.doc, ent)except SPDXValueError:self.value_error('', o)for _s, _p, o in self.graph.triples((ci_term, self.spdx_namespace[''], None)):try:self.builder.set_created_date(self.doc, six.text_type(o))except SPDXValueError:self.value_error('', o)except CardinalityError:self.more_than_one_error('')breakfor _s, _p, o in self.graph.triples((ci_term, RDFS.comment, None)):try:self.builder.set_creation_comment(self.doc, six.text_type(o))except CardinalityError:self.more_than_one_error('')breakfor _s, _p, o in self.graph.triples((ci_term, self.spdx_namespace[''], None)):try:self.builder.set_lics_list_ver(self.doc, six.text_type(o))except CardinalityError:self.more_than_one_error('')breakexcept SPDXValueError:self.value_error('', o)", "docstring": "Parse creators, created and comment.", "id": "f3751:c6:m2"} {"signature": "def to_special_value(self, value):", "body": "if value == self.spdx_namespace.none:return utils.SPDXNone()elif value == self.spdx_namespace.noassertion:return utils.NoAssert()elif value == self.spdx_namespace.unknown:return utils.UnKnown()else:return value", "docstring": "Checks if value is a special SPDX value such as\n NONE, NOASSERTION or UNKNOWN if so returns proper model.\n else returns value", "id": "f3751:c0:m3"} {"signature": "def parse_doc_fields(self, doc_term):", "body": "try:self.builder.set_doc_spdx_id(self.doc, doc_term)except SPDXValueError:self.value_error('', doc_term)try:if doc_term.count('', , len(doc_term)) <= :doc_namespace = doc_term.split('')[]self.builder.set_doc_namespace(self.doc, doc_namespace)else:self.value_error('', doc_term)except SPDXValueError:self.value_error('', doc_term)for _s, _p, o in self.graph.triples((doc_term, self.spdx_namespace[''], None)):try:self.builder.set_doc_version(self.doc, six.text_type(o))except SPDXValueError:self.value_error('', o)except CardinalityError:self.more_than_one_error('')breakfor _s, _p, o in self.graph.triples((doc_term, self.spdx_namespace[''], None)):try:self.builder.set_doc_data_lic(self.doc, six.text_type(o))except SPDXValueError:self.value_error('', o)except CardinalityError:self.more_than_one_error('')breakfor _s, _p, o in self.graph.triples((doc_term, self.spdx_namespace[''], None)):try:self.builder.set_doc_name(self.doc, six.text_type(o))except CardinalityError:self.more_than_one_error('')breakfor _s, _p, o in self.graph.triples((doc_term, RDFS.comment, None)):try:self.builder.set_doc_comment(self.doc, six.text_type(o))except CardinalityError:self.more_than_one_error('')break", "docstring": "Parses the version, data license, name, SPDX Identifier, namespace,\n and comment.", "id": "f3751:c6:m3"} {"signature": "def more_than_one_error(self, field):", "body": "msg = ''.format(field)self.logger.log(msg)self.error = True", "docstring": "Logs a more than one error.\n field is the field/property that has more than one defined.", "id": "f3751:c0:m1"} {"signature": "def parse_ext_doc_ref(self, ext_doc_ref_term):", "body": "for _s, _p, o in self.graph.triples((ext_doc_ref_term,self.spdx_namespace[''],None)):try:self.builder.set_ext_doc_id(self.doc, six.text_type(o))except SPDXValueError:self.value_error('', '')breakfor _s, _p, o in self.graph.triples((ext_doc_ref_term,self.spdx_namespace[''],None)):try:self.builder.set_spdx_doc_uri(self.doc, six.text_type(o))except SPDXValueError:self.value_error('', '')breakfor _s, _p, checksum in self.graph.triples((ext_doc_ref_term, self.spdx_namespace[''], None)):for _, _, value in self.graph.triples((checksum, self.spdx_namespace[''], None)):try:self.builder.set_chksum(self.doc, six.text_type(value))except SPDXValueError:self.value_error('', '')break", "docstring": "Parses the External Document ID, SPDX Document URI and Checksum.", "id": "f3751:c6:m4"} {"signature": "def p_file_type(self, f_term, predicate):", "body": "try:for _, _, ftype in self.graph.triples((f_term, predicate, None)):try:if ftype.endswith(''):ftype = ''elif ftype.endswith(''):ftype = ''elif ftype.endswith(''):ftype = ''elif ftype.endswith(''):ftype = ''self.builder.set_file_type(self.doc, ftype)except SPDXValueError:self.value_error('', ftype)except CardinalityError:self.more_than_one_error('')", "docstring": "Sets file type.", "id": "f3751:c3:m13"} {"signature": "def p_file_project(self, project):", "body": "for _, _, name in self.graph.triples((project, self.doap_namespace[''], None)):self.builder.set_file_atrificat_of_project(self.doc, '', six.text_type(name))for _, _, homepage in self.graph.triples((project, self.doap_namespace[''], None)):self.builder.set_file_atrificat_of_project(self.doc, '', six.text_type(homepage))", "docstring": "Helper function for parsing doap:project name and homepage.\n and setting them using the file builder.", "id": "f3751:c3:m8"} {"signature": "def p_entity_3(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]p[] = self.builder.build_person(self.document, value)except SPDXValueError:msg = ERROR_MESSAGES[''].format(p[], p.lineno())self.logger.log(msg)self.error = Truep[] = None", "docstring": "entity : PERSON_VALUE", "id": "f3753:c0:m149"} {"signature": "def p_review_comment_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.add_review_comment(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())except OrderError:self.order_error('', '', p.lineno())", "docstring": "review_comment : REVIEW_COMMENT TEXT", "id": "f3753:c0:m115"} {"signature": "def p_creator_comment_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "creator_comment : CREATOR_COMMENT error", "id": "f3753:c0:m142"} {"signature": "def p_file_artificat_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_artifact : prj_name_art error", "id": "f3753:c0:m20"} {"signature": "def p_extr_lic_id_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "extr_lic_id : LICS_ID error", "id": "f3753:c0:m17"} {"signature": "def p_package_version_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "package_version : PKG_VERSION error", "id": "f3753:c0:m108"} {"signature": "def p_prj_uri_art_1(self, p):", "body": "try:self.builder.set_file_atrificat_of_project(self.document,'', utils.UnKnown())except OrderError:self.order_error('', '', p.lineno())", "docstring": "prj_uri_art : ART_PRJ_URI UN_KNOWN", "id": "f3753:c0:m22"} {"signature": "def p_file_name_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_name : FILE_NAME error", "id": "f3753:c0:m52"} {"signature": "def p_extr_lic_text_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_lic_text(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "extr_lic_text : LICS_TEXT TEXT", "id": "f3753:c0:m14"} {"signature": "def p_annotation_type_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "annotation_type : ANNOTATION_TYPE error", "id": "f3753:c0:m124"} {"signature": "def p_conc_license_1(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "conc_license : NO_ASSERT", "id": "f3753:c0:m48"} {"signature": "def p_extr_lic_name_value_2(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "extr_lic_name_value : NO_ASSERT", "id": "f3753:c0:m13"} {"signature": "def p_file_artifact_1(self, p):", "body": "pass", "docstring": "file_artifact : prj_name_art file_art_rest\n | prj_name_art", "id": "f3753:c0:m19"} {"signature": "def p_annotation_date_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.add_annotation_date(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())except OrderError:self.order_error('', '', p.lineno())", "docstring": "annotation_date : ANNOTATION_DATE DATE", "id": "f3753:c0:m119"} {"signature": "def p_ext_doc_refs_1(self, p):", "body": "try:if six.PY2:doc_ref_id = p[].decode(encoding='')doc_uri = p[].decode(encoding='')ext_doc_chksum = p[].decode(encoding='')else:doc_ref_id = p[]doc_uri = p[]ext_doc_chksum = p[]self.builder.add_ext_doc_refs(self.document, doc_ref_id, doc_uri,ext_doc_chksum)except SPDXValueError:self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "ext_doc_ref : EXT_DOC_REF DOC_REF_ID DOC_URI EXT_DOC_REF_CHKSUM", "id": "f3753:c0:m137"} {"signature": "def p_entity_2(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]p[] = self.builder.build_org(self.document, value)except SPDXValueError:msg = ERROR_MESSAGES[''].format(p[], p.lineno())self.logger.log(msg)self.error = Truep[] = None", "docstring": "entity : ORG_VALUE", "id": "f3753:c0:m148"} {"signature": "def p_pkg_cr_text_1(self, p):", "body": "try:self.builder.set_pkg_cr_text(self.document, p[])except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "pkg_cr_text : PKG_CPY_TEXT pkg_cr_text_value", "id": "f3753:c0:m67"} {"signature": "def p_creator_comment_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_creation_comment(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "creator_comment : CREATOR_COMMENT TEXT", "id": "f3753:c0:m141"} {"signature": "def p_file_cr_value_2(self, p):", "body": "p[] = utils.SPDXNone()", "docstring": "file_cr_value : NONE", "id": "f3753:c0:m39"} {"signature": "def p_pkg_down_location_1(self, p):", "body": "try:self.builder.set_pkg_down_location(self.document, p[])except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "pkg_down_location : PKG_DOWN pkg_down_value", "id": "f3753:c0:m94"} {"signature": "def p_annotation_date_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "annotation_date : ANNOTATION_DATE error", "id": "f3753:c0:m120"} {"signature": "def p_annotation_spdx_id_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "annotation_spdx_id : ANNOTATION_SPDX_ID error", "id": "f3753:c0:m126"} {"signature": "def p_file_cr_text_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_cr_text : FILE_CR_TEXT error", "id": "f3753:c0:m37"} {"signature": "def p_package_name_1(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "package_name : PKG_NAME error", "id": "f3753:c0:m110"} {"signature": "def p_attrib(self, p):", "body": "pass", "docstring": "attrib : spdx_version\n | spdx_id\n | data_lics\n | doc_name\n | ext_doc_ref\n | doc_comment\n | doc_namespace\n | creator\n | created\n | creator_comment\n | locs_list_ver\n | reviewer\n | review_date\n | review_comment\n | annotator\n | annotation_date\n | annotation_comment\n | annotation_type\n | annotation_spdx_id\n | package_name\n | package_version\n | pkg_down_location\n | pkg_home\n | pkg_summary\n | pkg_src_info\n | pkg_file_name\n | pkg_supplier\n | pkg_orig\n | pkg_chksum\n | pkg_verif\n | pkg_desc\n | pkg_lic_decl\n | pkg_lic_conc\n | pkg_lic_ff\n | pkg_lic_comment\n | pkg_cr_text\n | file_name\n | file_type\n | file_chksum\n | file_conc\n | file_lics_info\n | file_cr_text\n | file_lics_comment\n | file_notice\n | file_comment\n | file_contrib\n | file_dep\n | file_artifact\n | extr_lic_id\n | extr_lic_text\n | extr_lic_name\n | lic_xref\n | lic_comment\n | unknown_tag", "id": "f3753:c0:m3"} {"signature": "def p_file_dep_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.add_file_dep(self.document, value)except OrderError:self.order_error('', '', p.lineno())", "docstring": "file_dep : FILE_DEP LINE", "id": "f3753:c0:m30"} {"signature": "def p_lics_list_ver_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "locs_list_ver : LIC_LIST_VER error", "id": "f3753:c0:m128"} {"signature": "def p_file_lic_info_value_3(self, p):", "body": "if six.PY2:value = p[].decode(encoding='')else:value = p[]p[] = document.License.from_identifier(value)", "docstring": "file_lic_info_value : LINE", "id": "f3753:c0:m47"} {"signature": "def p_pkg_src_info_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_pkg_source_info(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())except OrderError:self.order_error('', '', p.lineno())", "docstring": "pkg_src_info : PKG_SRC_INFO TEXT", "id": "f3753:c0:m83"} {"signature": "def p_file_dep_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_dep : FILE_DEP error", "id": "f3753:c0:m31"} {"signature": "def p_pkg_lic_decl_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_lic_decl : PKG_LICS_DECL error", "id": "f3753:c0:m75"} {"signature": "def p_lic_xref_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "lic_xref : LICS_CRS_REF error", "id": "f3753:c0:m7"} {"signature": "def p_file_cr_value_3(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "file_cr_value : NO_ASSERT", "id": "f3753:c0:m40"} {"signature": "def p_file_cr_text_1(self, p):", "body": "try:self.builder.set_file_copyright(self.document, p[])except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "file_cr_text : FILE_CR_TEXT file_cr_value", "id": "f3753:c0:m36"} {"signature": "def p_doc_namespace_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "doc_namespace : DOC_NAMESPACE error", "id": "f3753:c0:m132"} {"signature": "def p_created_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "created : CREATED error", "id": "f3753:c0:m146"} {"signature": "def p_file_conc_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_conc : FILE_LICS_CONC error", "id": "f3753:c0:m61"} {"signature": "def p_lics_list_ver_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_lics_list_ver(self.document, value)except SPDXValueError:self.error = Truemsg = ERROR_MESSAGES[''].format(p[], p.lineno())self.logger.log(msg)except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "locs_list_ver : LIC_LIST_VER LINE", "id": "f3753:c0:m127"} {"signature": "def p_doc_name_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_doc_name(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "doc_name : DOC_NAME LINE", "id": "f3753:c0:m135"} {"signature": "def p_file_type_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_type : FILE_TYPE error", "id": "f3753:c0:m57"} {"signature": "def p_pkg_lic_conc_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_lic_conc : PKG_LICS_CONC error", "id": "f3753:c0:m82"} {"signature": "def p_pkg_home_value_1(self, p):", "body": "if six.PY2:p[] = p[].decode(encoding='')else:p[] = p[]", "docstring": "pkg_home_value : LINE", "id": "f3753:c0:m91"} {"signature": "def p_file_conc_1(self, p):", "body": "try:self.builder.set_concluded_license(self.document, p[])except SPDXValueError:self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "file_conc : FILE_LICS_CONC conc_license", "id": "f3753:c0:m60"} {"signature": "def p_file_comment_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_comment : FILE_COMMENT error", "id": "f3753:c0:m55"} {"signature": "def p_package_name(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.create_package(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "package_name : PKG_NAME LINE", "id": "f3753:c0:m109"} {"signature": "def p_file_notice_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "file_notice : FILE_NOTICE error", "id": "f3753:c0:m35"} {"signature": "def p_pkg_lic_ff_1(self, p):", "body": "try:self.builder.set_pkg_license_from_file(self.document, p[])except OrderError:self.order_error('', '', p.lineno())except SPDXValueError:self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_lic_ff : PKG_LICS_FFILE pkg_lic_ff_value", "id": "f3753:c0:m76"} {"signature": "def p_file_lics_comment_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_file_license_comment(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "file_lics_comment : FILE_LICS_COMMENT TEXT", "id": "f3753:c0:m41"} {"signature": "def p_annotation_comment_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.add_annotation_comment(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())except OrderError:self.order_error('', '', p.lineno())", "docstring": "annotation_comment : ANNOTATION_COMMENT TEXT", "id": "f3753:c0:m121"} {"signature": "def p_pkg_supplier_values_1(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "pkg_supplier_values : NO_ASSERT", "id": "f3753:c0:m103"} {"signature": "def p_reviewer_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "reviewer : REVIEWER error", "id": "f3753:c0:m112"} {"signature": "def p_prj_home_art_1(self, p):", "body": "try:self.builder.set_file_atrificat_of_project(self.document, '', p[])except OrderError:self.order_error('', '', p.lineno())", "docstring": "prj_home_art : ART_PRJ_HOME LINE", "id": "f3753:c0:m25"} {"signature": "def p_pkg_lic_ff_value_1(self, p):", "body": "p[] = utils.SPDXNone()", "docstring": "pkg_lic_ff_value : NONE", "id": "f3753:c0:m77"} {"signature": "def p_pkg_desc_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_desc : PKG_DESC error", "id": "f3753:c0:m64"} {"signature": "def p_annotation_spdx_id_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_annotation_spdx_id(self.document, value)except CardinalityError:self.more_than_one_error('', p.lineno())except OrderError:self.order_error('', '', p.lineno())", "docstring": "annotation_spdx_id : ANNOTATION_SPDX_ID LINE", "id": "f3753:c0:m125"} {"signature": "def p_file_chksum_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_file_chksum(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "file_chksum : FILE_CHKSUM CHKSUM", "id": "f3753:c0:m58"} {"signature": "def p_reviewer_1(self, p):", "body": "self.builder.add_reviewer(self.document, p[])", "docstring": "reviewer : REVIEWER entity", "id": "f3753:c0:m111"} {"signature": "def p_pkg_file_name_1(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_file_name : PKG_FILE_NAME error", "id": "f3753:c0:m106"} {"signature": "def p_file_comment_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_file_comment(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "file_comment : FILE_COMMENT TEXT", "id": "f3753:c0:m54"} {"signature": "def p_pkg_down_value_2(self, p):", "body": "p[] = utils.SPDXNone()", "docstring": "pkg_down_value : NONE", "id": "f3753:c0:m97"} {"signature": "def p_creator_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "creator : CREATOR error", "id": "f3753:c0:m144"} {"signature": "def p_extr_lic_text_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "extr_lic_text : LICS_TEXT error", "id": "f3753:c0:m15"} {"signature": "def p_file_lic_info_value_2(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "file_lic_info_value : NO_ASSERT", "id": "f3753:c0:m46"} {"signature": "def p_pkg_down_value_3(self, p):", "body": "p[] = utils.NoAssert()", "docstring": "pkg_down_value : NO_ASSERT", "id": "f3753:c0:m98"} {"signature": "def p_file_contrib_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.add_file_contribution(self.document, value)except OrderError:self.order_error('', '', p.lineno())", "docstring": "file_contrib : FILE_CONTRIB LINE", "id": "f3753:c0:m32"} {"signature": "def p_pkg_chksum_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_pkg_chk_sum(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "pkg_chksum : PKG_CHKSUM CHKSUM", "id": "f3753:c0:m85"} {"signature": "def p_doc_comment_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "doc_comment : DOC_COMMENT error", "id": "f3753:c0:m130"} {"signature": "def p_pkg_verif_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_verif : PKG_VERF_CODE error", "id": "f3753:c0:m88"} {"signature": "def p_pkg_verif_1(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_pkg_verif_code(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())except SPDXValueError:self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_verif : PKG_VERF_CODE LINE", "id": "f3753:c0:m87"} {"signature": "def p_spdx_id(self, p):", "body": "if six.PY2:value = p[].decode(encoding='')else:value = p[]if not self.builder.doc_spdx_id_set:self.builder.set_doc_spdx_id(self.document, value)else:self.builder.set_file_spdx_id(self.document, value)", "docstring": "spdx_id : SPDX_ID LINE", "id": "f3753:c0:m53"} {"signature": "def p_pkg_file_name(self, p):", "body": "try:if six.PY2:value = p[].decode(encoding='')else:value = p[]self.builder.set_pkg_file_name(self.document, value)except OrderError:self.order_error('', '', p.lineno())except CardinalityError:self.more_than_one_error('', p.lineno())", "docstring": "pkg_file_name : PKG_FILE_NAME LINE", "id": "f3753:c0:m105"} {"signature": "def p_pkg_supplier_2(self, p):", "body": "self.error = Truemsg = ERROR_MESSAGES[''].format(p.lineno())self.logger.log(msg)", "docstring": "pkg_supplier : PKG_SUPPL error", "id": "f3753:c0:m102"} {"signature": "def has_package(self, doc):", "body": "return doc.package is not None", "docstring": "Returns true if the document has a package.", "id": "f3754:c7:m16"} {"signature": "def set_doc_data_lics(self, doc, lics):", "body": "if not self.doc_data_lics_set:self.doc_data_lics_set = Trueif validations.validate_data_lics(lics):doc.data_license = document.License.from_identifier(lics)return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets the document data license.\n Raises value error if malformed value, CardinalityError\n if already defined.", "id": "f3754:c0:m2"} {"signature": "def add_lic_xref(self, doc, ref):", "body": "if self.has_extr_lic(doc):self.extr_lic(doc).add_xref(ref)return Trueelse:raise OrderError('')", "docstring": "Adds a license cross reference.\n Raises OrderError if no License ID defined.", "id": "f3754:c8:m7"} {"signature": "def add_review_date(self, doc, reviewed):", "body": "if len(doc.reviews) != :if not self.review_date_set:self.review_date_set = Truedate = utils.datetime_from_iso_format(reviewed)if date is not None:doc.reviews[-].review_date = datereturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets the review date. Raises CardinalityError if\n already set. OrderError if no reviewer defined before.\n Raises SPDXValueError if invalid reviewed value.", "id": "f3754:c4:m3"} {"signature": "def set_pkg_file_name(self, doc, name):", "body": "self.assert_package_exists()if not self.package_file_name_set:self.package_file_name_set = Truedoc.package.file_name = namereturn Trueelse:raise CardinalityError('')", "docstring": "Sets the package file name, if not already set.\n name - Any string.\n Raises CardinalityError if already has a file_name.\n Raises OrderError if no pacakge previously defined.", "id": "f3754:c6:m4"} {"signature": "def set_file_spdx_id(self, doc, spdx_id):", "body": "if self.has_package(doc) and self.has_file(doc):if not self.file_spdx_id_set:self.file_spdx_id_set = Trueif validations.validate_file_spdx_id(spdx_id):self.file(doc).spdx_id = spdx_idreturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets the file SPDX Identifier.\nRaises OrderError if no package or no file defined.\nRaises SPDXValueError if malformed value.\nRaises CardinalityError if more than one spdx_id set.", "id": "f3754:c7:m2"} {"signature": "def build_org(self, doc, entity):", "body": "match = self.org_re.match(entity)if match and validations.validate_org_name(match.group(self.ORG_NAME_GROUP)):name = match.group(self.ORG_NAME_GROUP).strip()email = match.group(self.ORG_EMAIL_GROUP)if (email is not None) and (len(email) != ):return creationinfo.Organization(name=name, email=email.strip())else:return creationinfo.Organization(name=name, email=None)else:raise SPDXValueError('')", "docstring": "Builds an organization object of of a string representation.\n Returns built organization. Raises SPDXValueError if failed to extract\n name.", "id": "f3754:c2:m1"} {"signature": "def set_lic_name(self, doc, name):", "body": "if self.has_extr_lic(doc):if not self.extr_lic_name_set:self.extr_lic_name_set = Trueif validations.validate_extr_lic_name(name):self.extr_lic(doc).full_name = namereturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets license name.\n Raises SPDXValueError if name is not str or utils.NoAssert\n Raises OrderError if no license id defined.", "id": "f3754:c8:m5"} {"signature": "def reset(self):", "body": "self.reset_creation_info()self.reset_document()self.reset_package()self.reset_file_stat()self.reset_reviews()self.reset_annotations()self.reset_extr_lics()", "docstring": "Resets builder's state for building new documents.\n Must be called between usage with different documents.", "id": "f3754:c9:m1"} {"signature": "def set_chksum(self, doc, chksum):", "body": "doc.ext_document_references[-].check_sum = checksum_from_sha1(chksum)", "docstring": "Sets the `check_sum` attribute of the `ExternalDocumentRef`\nobject.", "id": "f3754:c1:m2"} {"signature": "def set_doc_namespace(self, doc, namespace):", "body": "if not self.doc_namespace_set:self.doc_namespace_set = Trueif validations.validate_doc_namespace(namespace):doc.namespace = namespacereturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets the document namespace.\n Raise SPDXValueError if malformed value, CardinalityError\n if already defined.", "id": "f3754:c0:m6"} {"signature": "def set_pkg_source_info(self, doc, text):", "body": "self.assert_package_exists()if not self.package_source_info_set:self.package_source_info_set = Trueif validations.validate_pkg_src_info(text):doc.package.source_info = str_from_text(text)return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets the package's source information, if not already set.\n text - Free form text.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.\n SPDXValueError if text is not free form text.", "id": "f3754:c6:m11"} {"signature": "def set_file_copyright(self, doc, text):", "body": "if self.has_package(doc) and self.has_file(doc):if not self.file_copytext_set:self.file_copytext_set = Trueif validations.validate_file_cpyright(text):if isinstance(text, string_types):self.file(doc).copyright = str_from_text(text)else:self.file(doc).copyright = text return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Raises OrderError if no package or file defined.\n Raises SPDXValueError if not free form text or NONE or NO_ASSERT.\n Raises CardinalityError if more than one.", "id": "f3754:c7:m9"} {"signature": "def set_pkg_summary(self, doc, text):", "body": "self.assert_package_exists()if not self.package_summary_set:self.package_summary_set = Trueif validations.validate_pkg_summary(text):doc.package.summary = str_from_text(text)else:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Set's the package summary.\n Raises SPDXValueError if text is not free form text.\n Raises CardinalityError if summary already set.\n Raises OrderError if no package previously defined.", "id": "f3754:c6:m17"} {"signature": "def add_reviewer(self, doc, reviewer):", "body": "self.reset_reviews()if validations.validate_reviewer(reviewer):doc.add_review(review.Review(reviewer=reviewer))return Trueelse:raise SPDXValueError('')", "docstring": "Adds a reviewer to the SPDX Document.\n Reviwer is an entity created by an EntityBuilder.\n Raises SPDXValueError if not a valid reviewer type.", "id": "f3754:c4:m2"} {"signature": "def set_annotation_spdx_id(self, doc, spdx_id):", "body": "if len(doc.annotations) != :if not self.annotation_spdx_id_set:self.annotation_spdx_id_set = Truedoc.annotations[-].spdx_id = spdx_idreturn Trueelse:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets the annotation SPDX Identifier.\n Raises CardinalityError if already set. OrderError if no annotator\n defined before.", "id": "f3754:c5:m6"} {"signature": "def checksum_from_sha1(value):", "body": "CHECKSUM_RE = re.compile('', re.UNICODE)match = CHECKSUM_RE.match(value)if match:return checksum.Algorithm(identifier='', value=match.group())else:return None", "docstring": "Return an spdx.checksum.Algorithm instance representing the SHA1\nchecksum or None if does not match CHECKSUM_RE.", "id": "f3754:m0"} {"signature": "def set_file_comment(self, doc, text):", "body": "if self.has_package(doc) and self.has_file(doc):if not self.file_comment_set:self.file_comment_set = Trueif validations.validate_file_comment(text):self.file(doc).comment = str_from_text(text)return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Raises OrderError if no package or no file defined.\nRaises CardinalityError if more than one comment set.\nRaises SPDXValueError if text is not free form text.", "id": "f3754:c7:m3"} {"signature": "def reset_reviews(self):", "body": "self.review_date_set = Falseself.review_comment_set = False", "docstring": "Resets the builder's state to allow building new reviews.", "id": "f3754:c4:m1"} {"signature": "def extr_lic(self, doc):", "body": "return doc.extracted_licenses[-]", "docstring": "Retrieves last license in extracted license list", "id": "f3754:c8:m1"} {"signature": "def set_created_date(self, doc, created):", "body": "if not self.created_date_set:self.created_date_set = Truedate = utils.datetime_from_iso_format(created)if date is not None:doc.creation_info.created = datereturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets created date, Raises CardinalityError if\n created date already set.\n Raises SPDXValueError if created is not a date.", "id": "f3754:c3:m2"} {"signature": "def set_file_license_in_file(self, doc, lic):", "body": "if self.has_package(doc) and self.has_file(doc):if validations.validate_file_lics_in_file(lic):self.file(doc).add_lics(lic)return Trueelse:raise SPDXValueError('')else:raise OrderError('')", "docstring": "Raises OrderError if no package or file defined.\nRaises SPDXValueError if malformed value.", "id": "f3754:c7:m7"} {"signature": "def add_file_dep(self, doc, value):", "body": "if self.has_package(doc) and self.has_file(doc):self.file(doc).add_depend(value)else:raise OrderError('')", "docstring": "Raises OrderError if no package or file defined.", "id": "f3754:c7:m12"} {"signature": "def add_annotation_type(self, doc, annotation_type):", "body": "if len(doc.annotations) != :if not self.annotation_type_set:self.annotation_type_set = Trueif validations.validate_annotation_type(annotation_type):doc.annotations[-].annotation_type = annotation_typereturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets the annotation type. Raises CardinalityError if\n already set. OrderError if no annotator defined before.\n Raises SPDXValueError if invalid value.", "id": "f3754:c5:m5"} {"signature": "def build_person(self, doc, entity):", "body": "match = self.person_re.match(entity)if match and validations.validate_person_name(match.group(self.PERSON_NAME_GROUP)):name = match.group(self.PERSON_NAME_GROUP).strip()email = match.group(self.PERSON_EMAIL_GROUP)if (email is not None) and (len(email) != ):return creationinfo.Person(name=name, email=email.strip())else:return creationinfo.Person(name=name, email=None)else:raise SPDXValueError('')", "docstring": "Builds an organization object of of a string representation.\n Returns built organization. Raises SPDXValueError if failed to extract\n name.", "id": "f3754:c2:m2"} {"signature": "def set_doc_comment(self, doc, comment):", "body": "if not self.doc_comment_set:self.doc_comment_set = Trueif validations.validate_doc_comment(comment):doc.comment = str_from_text(comment)return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets document comment, Raises CardinalityError if\n comment already set.\n Raises SPDXValueError if comment is not free form text.", "id": "f3754:c0:m5"} {"signature": "def reset_annotations(self):", "body": "self.annotation_date_set = Falseself.annotation_comment_set = Falseself.annotation_type_set = Falseself.annotation_spdx_id_set = False", "docstring": "Resets the builder's state to allow building new annotations.", "id": "f3754:c5:m1"} {"signature": "def set_pkg_license_declared(self, doc, lic):", "body": "self.assert_package_exists()if not self.package_license_declared_set:self.package_license_declared_set = Trueif validations.validate_lics_conc(lic):doc.package.license_declared = licreturn Trueelse:raise SPDXValueError('')else:raise CardinalityError('')", "docstring": "Sets the package's declared license.\n Raises SPDXValueError if data malformed.\n Raises OrderError if no package previously defined.\n Raises CardinalityError if already set.", "id": "f3754:c6:m14"} {"signature": "def set_doc_name(self, doc, name):", "body": "if not self.doc_name_set:doc.name = nameself.doc_name_set = Truereturn Trueelse:raise CardinalityError('')", "docstring": "Sets the document name.\n Raises CardinalityError if already defined.", "id": "f3754:c0:m3"} {"signature": "def set_pkg_chk_sum(self, doc, chk_sum):", "body": "self.assert_package_exists()if not self.package_chk_sum_set:self.package_chk_sum_set = Truedoc.package.check_sum = checksum_from_sha1(chk_sum)return Trueelse:raise CardinalityError('')", "docstring": "Sets the package check sum, if not already set.\n chk_sum - A string\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.", "id": "f3754:c6:m10"} {"signature": "def add_review_comment(self, doc, comment):", "body": "if len(doc.reviews) != :if not self.review_comment_set:self.review_comment_set = Trueif validations.validate_review_comment(comment):doc.reviews[-].comment = str_from_text(comment)return Trueelse:raise SPDXValueError('')else:raise CardinalityError('')else:raise OrderError('')", "docstring": "Sets the review comment. Raises CardinalityError if\n already set. OrderError if no reviewer defined before.\n Raises SPDXValueError if comment is not free form text.", "id": "f3754:c4:m4"} {"signature": "def t_text(self, t):", "body": "t.lexer.text_start = t.lexer.lexpos - len('')t.lexer.begin('')", "docstring": "r':\\s*", "id": "f3755:c0:m0"} {"signature": "def t_newline(self, t):", "body": "t.lexer.lineno += len(t.value)", "docstring": "r'\\n+", "id": "f3755:c0:m15"} {"signature": "def t_PERSON_VALUE(self, t):", "body": "t.value = t.value[:].strip()return t", "docstring": "r':\\s*Person:.+", "id": "f3755:c0:m10"} {"signature": "def t_DOC_URI(self, t):", "body": "t.value = t.value.strip()return t", "docstring": "r'\\s*((ht|f)tps?:\\/\\/\\S*)", "id": "f3755:c0:m6"} {"signature": "def t_comment(self, t):", "body": "pass", "docstring": "r'\\#.*", "id": "f3755:c0:m14"} {"signature": "def t_LINE_OR_KEYWORD_VALUE(self, t):", "body": "t.value = t.value[:].strip()if t.value in self.reserved.keys():t.type = self.reserved[t.value]else:t.type = ''return t", "docstring": "r':.+", "id": "f3755:c0:m13"} {"signature": "def build(self, **kwargs):", "body": "self.yacc = yacc.yacc(module=self, **kwargs)", "docstring": "Must be called before parse.", "id": "f3756:c4:m8"} {"signature": "def t_LP(self, t):", "body": "return t", "docstring": "r'\\(", "id": "f3756:c3:m0"} {"signature": "def parse(self, data):", "body": "try:return self.yacc.parse(data, lexer=self.lex)except:return None", "docstring": "Parses a license list and returns a License or None if it failed.", "id": "f3756:c4:m9"} {"signature": "def t_whitespace(self, t):", "body": "pass", "docstring": "r'\\s+", "id": "f3756:c3:m4"} {"signature": "def t_AND(self, t):", "body": "t.value = t.value.strip()return t", "docstring": "r'\\s(and|AND)\\s", "id": "f3756:c3:m2"} {"signature": "def p_conjunction_1(self, p):", "body": "p[] = document.LicenseConjunction(p[], p[])", "docstring": "conjunction : conjunction AND license_atom", "id": "f3756:c4:m3"} {"signature": "def p_disjunction_1(self, p):", "body": "p[] = document.LicenseDisjunction(p[], p[])", "docstring": "disjunction : disjunction OR conjunction", "id": "f3756:c4:m1"} {"signature": "def p_conjunction_2(self, p):", "body": "p[] = p[]", "docstring": "conjunction : license_atom", "id": "f3756:c4:m4"} {"signature": "def t_LICENSE(self, t):", "body": "t.value = t.value.strip()return t", "docstring": "r'[A-Za-z.0-9\\-+]+", "id": "f3756:c3:m5"} {"signature": "def datetime_from_iso_format(string):", "body": "match = DATE_ISO_REGEX.match(string)if match:date = datetime.datetime(year=int(match.group(DATE_ISO_YEAR_GRP)),month=int(match.group(DATE_ISO_MONTH_GRP)),day=int(match.group(DATE_ISO_DAY_GRP)),hour=int(match.group(DATE_ISO_HOUR_GRP)),second=int(match.group(DATE_ISO_SEC_GRP)),minute=int(match.group(DATE_ISO_MIN_GRP)))return dateelse:return None", "docstring": "Return a datetime object from an iso 8601 representation.\nReturn None if string is non conforming.", "id": "f3756:m1"} {"signature": "def token(self):", "body": "return self.lexer.token()", "docstring": "Get the next token or None if exhausted input.", "id": "f3756:c3:m8"} {"signature": "def p_license_atom_1(self, p):", "body": "p[] = document.License.from_identifier(p[])", "docstring": "license_atom : LICENSE", "id": "f3756:c4:m5"} {"signature": "def validate(self, messages):", "body": "messages = self.validate_annotator(messages)messages = self.validate_annotation_date(messages)messages = self.validate_annotation_type(messages)messages = self.validate_spdx_id(messages)return messages", "docstring": "Returns True if all the fields are valid.\n Appends any error messages to messages parameter.", "id": "f3757:c0:m6"} {"signature": "def get_temp_file(extension=''):", "body": "if extension and not extension.startswith(''):extension = '' + extensionfile_name = '' + extensiontemp_dir = tempfile.mkdtemp()return os.path.join(temp_dir, file_name)", "docstring": "Return a unique new temporary file location to a non-existing\ntemporary file that can safely be created without a risk of name\ncollision.", "id": "f3762:m0"} {"signature": "def sort_nested(data):", "body": "if isinstance(data, dict):new_data = {}for k, v in data.items():if isinstance(v, list):v = sorted(v)if isinstance(v, dict):v = sort_nested(v)new_data[k] = vreturn new_dataelif isinstance(data, list):new_data = []for v in sorted(data):if isinstance(v, list):v = sort_nested(v)if isinstance(v, dict):v = sort_nested(v)new_data.append(v)return new_data", "docstring": "Return a new dict with any nested list sorted recursively.", "id": "f3764:m4"} {"signature": "def check_rdf_scan(expected_file, result_file, regen=False):", "body": "import jsonresult = load_and_clean_rdf(result_file)if regen:expected = resultwith codecs.open(expected_file, '', encoding='') as o:json.dump(expected, o, indent=)else:with codecs.open(expected_file, '', encoding='') as i:expected = sort_nested(json.load(i))assert expected == result", "docstring": "Check that expected and result_file are equal.\nBoth are paths to SPDX RDF XML files, UTF-8 encoded.", "id": "f3764:m5"} {"signature": "def load_and_clean_tv(location):", "body": "content = codecs.open(location, encoding='').read()content = [l for l in content.splitlines(False)if l and l.strip() and not l.startswith(('', '',))]return ''.join(content)", "docstring": "Return a mapping for the SPDX TV file at location suitable for\ncomparison. The file content is cleaned from variable parts such as\ndates, generated UUIDs and versions", "id": "f3764:m6"} {"signature": "def strip_variable_text(rdf_text):", "body": "replace_nid = re.compile('').subrdf_text = replace_nid('', rdf_text)replace_creation = re.compile('', re.DOTALL).subrdf_text = replace_creation('', rdf_text)replace_pcc = re.compile('', re.DOTALL).subrdf_text = replace_pcc('', rdf_text)return rdf_text", "docstring": "Return rdf_text stripped from variable parts such as rdf nodeids", "id": "f3764:m2"} {"signature": "def load_and_clean_rdf(location):", "body": "content = codecs.open(location, encoding='').read()content = strip_variable_text(content)data = xmltodict.parse(content, dict_constructor=dict)return sort_nested(data)", "docstring": "Return plain Python nested data for the SPDX RDF file at location\nsuitable for comparison. The file content is cleaned from variable\nparts such as dates, generated UUIDs and versions\n\nNOTE: we use plain dicts to avoid ordering issues in XML. the SPDX\ntool and lxml do not seem to return a consistent ordering that is\nneeded for tests.", "id": "f3764:m3"} {"signature": "def __init__(self, default_args):", "body": "self._default_args = default_args", "docstring": ":param default_args: default arguments\n:type default_args: string or list of string", "id": "f3777:c0:m0"} {"signature": "@propertydef timeout(self):", "body": "return self._timeout", "docstring": ":return: seconds to wait I/O.\n:rtype: float", "id": "f3780:c0:m20"} {"signature": "def send_keys(self, keys, wait=True):", "body": "self._process.stdin.write(bytearray(keys, self._encoding))self._process.stdin.flush()if wait:self.wait()", "docstring": "Send a raw key sequence to *Vim*.\n\n.. note:: *Vim* style key sequence notation (like ````)\n is not recognized.\n Use escaped characters (like ``'\\033'``) instead.\n\nExample:\n\n>>> import headlessvim\n>>> with headlessvim.open() as vim:\n... vim.send_keys('ispam\\033')\n... str(vim.display_lines()[0].strip())\n...\n'spam'\n\n:param strgin keys: key sequence to send\n:param boolean wait: whether if wait a response", "id": "f3780:c0:m9"} {"signature": "def close(self):", "body": "self._tempfile.close()self._process.terminate()if self._process.is_alive():self._process.kill()", "docstring": "Disconnect and close *Vim*.", "id": "f3780:c0:m5"} {"signature": "def install_plugin(self, dir, entry_script=None):", "body": "self.runtimepath.append(dir)if entry_script is not None:self.command(''.format(entry_script), False)", "docstring": "Install *Vim* plugin.\n\n:param string dir: the root directory contains *Vim* script\n:param string entry_script: path to the initializing script", "id": "f3780:c0:m11"} {"signature": "def wait(self, timeout=None):", "body": "if timeout is None:timeout = self._timeoutwhile self._process.check_readable(timeout):self._flush()", "docstring": "Wait for response until timeout.\nIf timeout is specified to None, ``self.timeout`` is used.\n\n:param float timeout: seconds to wait I/O", "id": "f3780:c0:m10"} {"signature": "def display_lines(self):", "body": "return self._screen.display", "docstring": "Shows the terminal screen splitted by newlines.\n\nAlmost equals to ``self.display().splitlines()``\n\n:return: screen as a list of strings\n:rtype: list of string", "id": "f3780:c0:m8"} {"signature": "@propertydef encoding(self):", "body": "return self._encoding", "docstring": ":return: internal encoding of *Vim*.\n:rtype: string", "id": "f3780:c0:m17"} {"signature": "@propertydef executable(self):", "body": "return self._process.executable", "docstring": ":return: the absolute path to the process.\n:rtype: string", "id": "f3780:c0:m15"} {"signature": "@propertydef stdin(self):", "body": "return self._stdin", "docstring": ":return: file-like object representing the standard input\n of the process\n:rtype: flie-like object", "id": "f3781:c0:m7"} {"signature": "@propertydef executable(self):", "body": "return self._executable", "docstring": ":return: the absolute path to the process.\n:rtype: strIng", "id": "f3781:c0:m5"} {"signature": "def terminate(self):", "body": "with self._close():self._process.terminate()", "docstring": "Terminate this process.\nUse this method rather than ``self.kill``.", "id": "f3781:c0:m1"} {"signature": "def is_alive(self):", "body": "return self._process.poll() is None", "docstring": "Check if the process is alive.\n\n:return: True if the process is alive, else False\n:rtype: boolean", "id": "f3781:c0:m4"} {"signature": "@propertydef args(self):", "body": "return self._args", "docstring": ":return: launch arguments of the process.\n:rtype: string or list of string", "id": "f3781:c0:m6"} {"signature": "@propertydef stdout(self):", "body": "return self._stdout", "docstring": ":return: non blocking file-like object\n representing the standard output of the process\n:rtype: file-like object", "id": "f3781:c0:m8"} {"signature": "def __init__(self, executable, args, env):", "body": "self._executable = distutils.spawn.find_executable(executable)self._args = argsself._env = envself._open_process()", "docstring": ":param str executable: command name to execute *Vim*\n:param args: arguments to execute *Vim*\n:type args: None or string or list of string\n:param env: environment variables to execute *Vim*\n:type env: None or dict of (string, string)", "id": "f3781:c0:m0"} {"signature": "def run(self):", "body": "run_once = Truewhile (run_once or self._threaded) and self.end is False:self.service_tx_queue()self.parse_messages()run_once = Falseif self._threaded:time.sleep(self._timeout)if self._threaded:logger.info('')", "docstring": "Receives the serial data into the self._raw buffer\n:return:", "id": "f3786:c0:m23"} {"signature": "def _remove_esc_chars(self, raw_message):", "body": "message = []escape_next = Falsefor c in raw_message:if escape_next:message.append(c ^ self._ESC_XOR)escape_next = Falseelse:if c == self._ESC:escape_next = Trueelse:message.append(c)return message", "docstring": "Removes any escape characters from the message\n:param raw_message: a list of bytes containing the un-processed data\n:return: a message that has the escaped characters appropriately un-escaped", "id": "f3788:c0:m6"} {"signature": "def rx(self):", "body": "if not self._threaded:self.run()try:return tuple(self._messages.pop())except IndexError:return None", "docstring": "Receive a series of bytes that have been verified\n:return: a series of bytes as a tuple or None if empty", "id": "f3788:c0:m2"} {"signature": "def run(self):", "body": "run_once = Truewhile run_once or self._threaded:waiting = self._port.in_waitingif waiting > :temp = [int(c) for c in self._port.read(waiting)]self._raw += tempself._parse_raw_data()run_once = Falseif self._threaded:time.sleep(self._timeout)", "docstring": "Receives the serial data into the self._raw buffer\n:return:", "id": "f3788:c0:m7"} {"signature": "def _parse_raw_data(self):", "body": "if self._START_OF_FRAME in self._raw and self._END_OF_FRAME in self._raw:while self._raw[] != self._START_OF_FRAME and len(self._raw) > :self._raw.pop()if self._raw[] == self._START_OF_FRAME:self._raw.pop()eof_index = self._raw.index(self._END_OF_FRAME)raw_message = self._raw[:eof_index]self._raw = self._raw[eof_index:]logger.debug(''.format(raw_message))message = self._remove_esc_chars(raw_message)logger.debug(''.format(message))expected_checksum = (message[-] << ) | message[-]logger.debug(''.format(expected_checksum))message = message[:-] logger.debug(''.format(message))sum1, sum2 = self._fletcher16_checksum(message)calculated_checksum = (sum2 << ) | sum1if expected_checksum == calculated_checksum:message = message[:] logger.debug(''.format(message))self._messages.append(message)else:logger.warning(''.format(message))logger.debug(''.format(expected_checksum, calculated_checksum))try:while self._raw[] != self._START_OF_FRAME and len(self._raw) > :self._raw.pop()except IndexError:pass", "docstring": "Parses the incoming data and determines if it is valid. Valid\ndata gets placed into self._messages\n:return: None", "id": "f3788:c0:m4"} {"signature": "def convert_to_float(value):", "body": "try:ret_val = float(value)return ret_val, Trueexcept ValueError:return , False", "docstring": "Convert a string to FLOAT", "id": "f3794:m5"} {"signature": "def convert_words_to_uint(high_word, low_word):", "body": "try:low_num = int(low_word)if low_num < :low_num = abs(low_num) + **number = (int(high_word) << ) | low_numreturn number, Trueexcept:return , False", "docstring": "Convert two words to a floating point", "id": "f3794:m7"} {"signature": "def check_pid(pid, debug):", "body": "try:os.kill(pid, )if debug > :print(\"\")return Trueexcept OSError:if debug > :print(\"\")return False", "docstring": "This function will check whether a PID is currently running", "id": "f3794:m2"} {"signature": "def convert_int32(high_word, low_word):", "body": "return convert_words_to_uint(high_word, low_word)", "docstring": "Convert two words to a 32 bit unsigned integer", "id": "f3794:m6"} {"signature": "def check_pidfile(pidfile, debug):", "body": "if os.path.isfile(pidfile):pidfile_handle = open(pidfile, '')try:pid = int(pidfile_handle.read())pidfile_handle.close()if check_pid(pid, debug):return Trueexcept:passos.unlink(pidfile)pid = str(os.getpid())open(pidfile, '').write(pid)return False", "docstring": "Check that a process is not running more than once, using PIDFILE", "id": "f3794:m1"} {"signature": "def run_program(prog_list, debug, shell):", "body": "try:if not shell:process = Popen(prog_list, stdout=PIPE, stderr=PIPE)stdout, stderr = process.communicate()retcode = process.returncodeif debug >= :print(\"\", \"\".join(prog_list))print(\"\", retcode)print(\"\", stdout)print(\"\", stderr)return bool(retcode)else:command = \"\".join(prog_list)os.system(command)return Trueexcept:return False", "docstring": "Run a program and check program return code Note that some commands don't work\n well with Popen. So if this function is specifically called with 'shell=True',\n then it will run the old 'os.system'. In which case, there is no program output", "id": "f3794:m10"} {"signature": "def contains(self, logger, level, message, is_regex=False):", "body": "for record in self.records:if record.name != logger or record.levelno != level:continueif is_regex:if re.search(message, (record.msg % record.args)):return Trueelse:if message in (record.msg % record.args):return Truereturn False", "docstring": "Checks whether a message has been logged to a specific logger with a\nspecific level.\n\n:param logger: The logger.\n:param level: The log level.\n:param messgae: The message contents.\n:param is_regex: Whether the expected message is a regex or not.\n Non-regex messages are simply tested for inclusion.", "id": "f3795:c0:m4"} {"signature": "def emit(self, record):", "body": "self.records.append(record)", "docstring": "Overrides :py:meth:`logging.Handler.emit`.", "id": "f3795:c0:m1"} {"signature": "@contextmanagerdef environment(**kwargs):", "body": "old_values = {}nonexistent = set()for key in kwargs:if key not in os.environ:nonexistent.add(key)else:old_values[key] = os.environ[key]os.environ[key] = kwargs[key]try:yieldfinally:for key in old_values:os.environ[key] = old_values[key]for key in nonexistent:os.environ.pop(key)", "docstring": "Context manager to tempolrarily change environment variables. On exit all\nvariables are set to their original value.", "id": "f3795:m0"} {"signature": "def check_file(self, filename):", "body": "can_read = super(SecuredConfig, self).check_file(filename)if not can_read:return Falsemode = get_stat(filename).st_modeif (mode & stat.S_IRGRP) or (mode & stat.S_IROTH):msg = \"\"self._log.warning(msg, filename)return Falsereturn True", "docstring": "Overrides :py:meth:`.Config.check_file`", "id": "f3801:c1:m0"} {"signature": "def load(self, reload=False, require_load=False):", "body": "if reload: self.config = Noneif self.config: self._log.debug('''')returnpath = self._effective_path()config_filename = self._effective_filename()self._active_path = [join(_, config_filename) for _ in path]for dirname in path:conf_name = join(dirname, config_filename)readable = self.check_file(conf_name)if readable:action = '' if self._loaded_files else ''self._log.info('', action, conf_name)self.read(conf_name)if conf_name == expanduser(\"\" % (self.group_name, self.app_name, self.filename)):self._log.warning(\"\"\"\"\"\"\"\"\"\"\"\"\"\", expanduser(\"\"), self.group_name,self.app_name, expanduser(\"\"), self.group_name,self.app_name)self._loaded_files.append(conf_name)if not self._loaded_files and not require_load:self._log.warning(\"\",config_filename,path)elif not self._loaded_files and require_load:raise IOError(\"\"\"\" % (config_filename, path))", "docstring": "Searches for an appropriate config file. If found, loads the file into\nthe current instance. This method can also be used to reload a\nconfiguration. Note that you may want to set ``reload`` to ``True`` to\nclear the configuration before loading in that case. Without doing\nthat, values will remain available even if they have been removed from\nthe config files.\n\n:param reload: if set to ``True``, the existing values are cleared\n before reloading.\n:param require_load: If set to ``True`` this will raise a\n :py:exc:`IOError` if no config file has been found\n to load.", "id": "f3801:c0:m9"} {"signature": "def get_xdg_dirs(self):", "body": "config_dirs = getenv('', '')if config_dirs:self._log.debug('', config_dirs)output = []for path in reversed(config_dirs.split('')):output.append(join(path, self.group_name, self.app_name))return outputreturn ['' % (self.group_name, self.app_name)]", "docstring": "Returns a list of paths specified by the XDG_CONFIG_DIRS environment\nvariable or the appropriate default.\n\nThe list is sorted by precedence, with the most important item coming\n*last* (required by the existing config_resolver logic).", "id": "f3801:c0:m3"} {"signature": "def get_xdg_home(self):", "body": "config_home = getenv('', '')if config_home:self._log.debug('', config_home)return expanduser(join(config_home, self.group_name, self.app_name))return expanduser('' % (self.group_name, self.app_name))", "docstring": "Returns the value specified in the XDG_CONFIG_HOME environment variable\nor the appropriate default.", "id": "f3801:c0:m4"} {"signature": "def _effective_path(self):", "body": "path = (['' % (self.group_name, self.app_name)] +self.get_xdg_dirs() +[expanduser('' % (self.group_name, self.app_name)),self.get_xdg_home(),join(getcwd(), ''.format(self.group_name), self.app_name)])if self.search_path:path = self.search_path.split(pathsep)env_path = getenv(self.env_path_name)if env_path and env_path.startswith(''):additional_paths = env_path[:].split(pathsep)self._log.info('''',additional_paths,self.env_path_name)path.extend(additional_paths)elif env_path:self._log.info(\"\"\"\",env_path,self.env_path_name)path = env_path.split(pathsep)return path", "docstring": "Returns a list of paths to search for config files in reverse order of\nprecedence. In other words: the last path element will override the\nsettings from the first one.", "id": "f3801:c0:m6"} {"signature": "def _validate_num_channels(input_filepath_list, combine_type):", "body": "channels = [file_info.channels(f) for f in input_filepath_list]if not core.all_equal(channels):raise IOError(\"\"\"\"\"\".format(combine_type))", "docstring": "Check if files in input file list have the same number of channels", "id": "f3808:m2"} {"signature": "def set_input_format(self, file_type=None, rate=None, bits=None,channels=None, encoding=None, ignore_length=None):", "body": "if file_type is not None and not isinstance(file_type, list):raise ValueError(\"\")if file_type is not None:if not all([f in VALID_FORMATS for f in file_type]):raise ValueError(''''.format(VALID_FORMATS))else:file_type = []if rate is not None and not isinstance(rate, list):raise ValueError(\"\")if rate is not None:if not all([is_number(r) and r > for r in rate]):raise ValueError('')else:rate = []if bits is not None and not isinstance(bits, list):raise ValueError(\"\")if bits is not None:if not all([isinstance(b, int) and b > for b in bits]):raise ValueError('')else:bits = []if channels is not None and not isinstance(channels, list):raise ValueError(\"\")if channels is not None:if not all([isinstance(c, int) and c > for c in channels]):raise ValueError('')else:channels = []if encoding is not None and not isinstance(encoding, list):raise ValueError(\"\")if encoding is not None:if not all([e in ENCODING_VALS for e in encoding]):raise ValueError(''''.format(ENCODING_VALS))else:encoding = []if ignore_length is not None and not isinstance(ignore_length, list):raise ValueError(\"\")if ignore_length is not None:if not all([isinstance(l, bool) for l in ignore_length]):raise ValueError(\"\")else:ignore_length = []max_input_arg_len = max([len(file_type), len(rate), len(bits), len(channels),len(encoding), len(ignore_length)])input_format = []for _ in range(max_input_arg_len):input_format.append([])for i, f in enumerate(file_type):input_format[i].extend(['', ''.format(f)])for i, r in enumerate(rate):input_format[i].extend(['', ''.format(r)])for i, b in enumerate(bits):input_format[i].extend(['', ''.format(b)])for i, c in enumerate(channels):input_format[i].extend(['', ''.format(c)])for i, e in enumerate(encoding):input_format[i].extend(['', ''.format(e)])for i, l in enumerate(ignore_length):if l is True:input_format[i].append('')self.input_format = input_formatreturn self", "docstring": "Sets input file format arguments. This is primarily useful when\n dealing with audio files without a file extension. Overwrites any\n previously set input file arguments.\n\n If this function is not explicity called the input format is inferred\n from the file extension or the file's header.\n\n Parameters\n ----------\n file_type : list of str or None, default=None\n The file type of the input audio file. Should be the same as what\n the file extension would be, for ex. 'mp3' or 'wav'.\n rate : list of float or None, default=None\n The sample rate of the input audio file. If None the sample rate\n is inferred.\n bits : list of int or None, default=None\n The number of bits per sample. If None, the number of bits per\n sample is inferred.\n channels : list of int or None, default=None\n The number of channels in the audio file. If None the number of\n channels is inferred.\n encoding : list of str or None, default=None\n The audio encoding type. Sometimes needed with file-types that\n support more than one encoding type. One of:\n * signed-integer : PCM data stored as signed (\u2018two\u2019s\n complement\u2019) integers. Commonly used with a 16 or 24\u2212bit\n encoding size. A value of 0 represents minimum signal\n power.\n * unsigned-integer : PCM data stored as unsigned integers.\n Commonly used with an 8-bit encoding size. A value of 0\n represents maximum signal power.\n * floating-point : PCM data stored as IEEE 753 single precision\n (32-bit) or double precision (64-bit) floating-point\n (\u2018real\u2019) numbers. A value of 0 represents minimum signal\n power.\n * a-law : International telephony standard for logarithmic\n encoding to 8 bits per sample. It has a precision\n equivalent to roughly 13-bit PCM and is sometimes encoded\n with reversed bit-ordering.\n * u-law : North American telephony standard for logarithmic\n encoding to 8 bits per sample. A.k.a. \u03bc-law. It has a\n precision equivalent to roughly 14-bit PCM and is sometimes\n encoded with reversed bit-ordering.\n * oki-adpcm : OKI (a.k.a. VOX, Dialogic, or Intel) 4-bit ADPCM;\n it has a precision equivalent to roughly 12-bit PCM. ADPCM\n is a form of audio compression that has a good compromise\n between audio quality and encoding/decoding speed.\n * ima-adpcm : IMA (a.k.a. DVI) 4-bit ADPCM; it has a precision\n equivalent to roughly 13-bit PCM.\n * ms-adpcm : Microsoft 4-bit ADPCM; it has a precision\n equivalent to roughly 14-bit PCM.\n * gsm-full-rate : GSM is currently used for the vast majority\n of the world\u2019s digital wireless telephone calls. It\n utilises several audio formats with different bit-rates and\n associated speech quality. SoX has support for GSM\u2019s\n original 13kbps \u2018Full Rate\u2019 audio format. It is usually\n CPU-intensive to work with GSM audio.\n ignore_length : list of bool or None, default=None\n If True, overrides an (incorrect) audio length given in an audio\n file\u2019s header. If this option is given then SoX will keep reading\n audio until it reaches the end of the input file.", "id": "f3808:c0:m3"} {"signature": "def rate(self, samplerate, quality=''):", "body": "quality_vals = ['', '', '', '', '']if not is_number(samplerate) or samplerate <= :raise ValueError(\"\")if quality not in quality_vals:raise ValueError(\"\".format(''.join(quality_vals)))effect_args = ['',''.format(quality),''.format(samplerate)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Change the audio sampling rate (i.e. resample the audio) to any\n given `samplerate`. Better the resampling quality = slower runtime.\n\n Parameters\n ----------\n samplerate : float\n Desired sample rate.\n quality : str\n Resampling quality. One of:\n * q : Quick - very low quality,\n * l : Low,\n * m : Medium,\n * h : High (default),\n * v : Very high\n\n See Also\n --------\n upsample, downsample, convert", "id": "f3809:c0:m43"} {"signature": "def delay(self, positions):", "body": "if not isinstance(positions, list):raise ValueError(\"\")if not all((is_number(p) and p >= ) for p in positions):raise ValueError(\"\")effect_args = ['']effect_args.extend([''.format(p) for p in positions])self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Delay one or more audio channels such that they start at the given\n positions.\n\n Parameters\n ----------\n positions: list of floats\n List of times (in seconds) to delay each audio channel.\n If fewer positions are given than the number of channels, the\n remaining channels will be unaffected.", "id": "f3809:c0:m20"} {"signature": "def noisered(self, profile_path, amount=):", "body": "if not os.path.exists(profile_path):raise IOError(\"\".format(profile_path))if not is_number(amount) or amount < or amount > :raise ValueError(\"\")effect_args = ['',profile_path,''.format(amount)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Reduce noise in the audio signal by profiling and filtering.\n This effect is moderately effective at removing consistent\n background noise such as hiss or hum.\n\n Parameters\n ----------\n profile_path : str\n Path to a noise profile file.\n This file can be generated using the `noiseprof` effect.\n amount : float, default=0.5\n How much noise should be removed is specified by amount. Should\n be between 0 and 1. Higher numbers will remove more noise but\n present a greater likelihood of removing wanted components of\n the audio signal.\n\n See Also\n --------\n noiseprof", "id": "f3809:c0:m36"} {"signature": "def vad(self, location=, normalize=True, activity_threshold=,min_activity_duration=, initial_search_buffer=,max_gap=, initial_pad=):", "body": "if location not in [-, ]:raise ValueError(\"\")if not isinstance(normalize, bool):raise ValueError(\"\")if not is_number(activity_threshold):raise ValueError(\"\")if not is_number(min_activity_duration) or min_activity_duration < :raise ValueError(\"\")if not is_number(initial_search_buffer) or initial_search_buffer < :raise ValueError(\"\")if not is_number(max_gap) or max_gap < :raise ValueError(\"\")if not is_number(initial_pad) or initial_pad < :raise ValueError(\"\")effect_args = []if normalize:effect_args.append('')if location == -:effect_args.append('')effect_args.extend(['','', ''.format(activity_threshold),'', ''.format(min_activity_duration),'', ''.format(initial_search_buffer),'', ''.format(max_gap),'', ''.format(initial_pad)])if location == -:effect_args.append('')self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Voice Activity Detector. Attempts to trim silence and quiet\n background sounds from the ends of recordings of speech. The algorithm\n currently uses a simple cepstral power measurement to detect voice, so\n may be fooled by other things, especially music.\n\n The effect can trim only from the front of the audio, so in order to\n trim from the back, the reverse effect must also be used.\n\n Parameters\n ----------\n location : 1 or -1, default=1\n If 1, trims silence from the beginning\n If -1, trims silence from the end\n normalize : bool, default=True\n If true, normalizes audio before processing.\n activity_threshold : float, default=7.0\n The measurement level used to trigger activity detection. This may\n need to be cahnged depending on the noise level, signal level, and\n other characteristics of the input audio.\n min_activity_duration : float, default=0.25\n The time constant (in seconds) used to help ignore short bursts of\n sound.\n initial_search_buffer : float, default=1.0\n The amount of audio (in seconds) to search for quieter/shorter\n bursts of audio to include prior to the detected trigger point.\n max_gap : float, default=0.25\n The allowed gap (in seconds) between quiteter/shorter bursts of\n audio to include prior to the detected trigger point\n initial_pad : float, default=0.0\n The amount of audio (in seconds) to preserve before the trigger\n point and any found quieter/shorter bursts.\n\n See Also\n --------\n silence\n\n Examples\n --------\n >>> tfm = sox.Transformer()\n\n Remove silence from the beginning of speech\n\n >>> tfm.vad(initial_pad=0.3)\n\n Remove silence from the end of speech\n\n >>> tfm.vad(location=-1, initial_pad=0.2)", "id": "f3809:c0:m61"} {"signature": "def set_input_format(self, file_type=None, rate=None, bits=None,channels=None, encoding=None, ignore_length=False):", "body": "if file_type not in VALID_FORMATS + [None]:raise ValueError(''.format(VALID_FORMATS))if not is_number(rate) and rate is not None:raise ValueError('')if rate is not None and rate <= :raise ValueError('')if not isinstance(bits, int) and bits is not None:raise ValueError('')if bits is not None and bits <= :raise ValueError('')if not isinstance(channels, int) and channels is not None:raise ValueError('')if channels is not None and channels <= :raise ValueError('')if encoding not in ENCODING_VALS + [None]:raise ValueError(''.format(ENCODING_VALS))if not isinstance(ignore_length, bool):raise ValueError('')input_format = []if file_type is not None:input_format.extend(['', ''.format(file_type)])if rate is not None:input_format.extend(['', ''.format(rate)])if bits is not None:input_format.extend(['', ''.format(bits)])if channels is not None:input_format.extend(['', ''.format(channels)])if encoding is not None:input_format.extend(['', ''.format(encoding)])if ignore_length:input_format.append('')self.input_format = input_formatreturn self", "docstring": "Sets input file format arguments. This is primarily useful when\n dealing with audio files without a file extension. Overwrites any\n previously set input file arguments.\n\n If this function is not explicity called the input format is inferred\n from the file extension or the file's header.\n\n Parameters\n ----------\n file_type : str or None, default=None\n The file type of the input audio file. Should be the same as what\n the file extension would be, for ex. 'mp3' or 'wav'.\n rate : float or None, default=None\n The sample rate of the input audio file. If None the sample rate\n is inferred.\n bits : int or None, default=None\n The number of bits per sample. If None, the number of bits per\n sample is inferred.\n channels : int or None, default=None\n The number of channels in the audio file. If None the number of\n channels is inferred.\n encoding : str or None, default=None\n The audio encoding type. Sometimes needed with file-types that\n support more than one encoding type. One of:\n * signed-integer : PCM data stored as signed (\u2018two\u2019s\n complement\u2019) integers. Commonly used with a 16 or 24\u2212bit\n encoding size. A value of 0 represents minimum signal\n power.\n * unsigned-integer : PCM data stored as unsigned integers.\n Commonly used with an 8-bit encoding size. A value of 0\n represents maximum signal power.\n * floating-point : PCM data stored as IEEE 753 single precision\n (32-bit) or double precision (64-bit) floating-point\n (\u2018real\u2019) numbers. A value of 0 represents minimum signal\n power.\n * a-law : International telephony standard for logarithmic\n encoding to 8 bits per sample. It has a precision\n equivalent to roughly 13-bit PCM and is sometimes encoded\n with reversed bit-ordering.\n * u-law : North American telephony standard for logarithmic\n encoding to 8 bits per sample. A.k.a. \u03bc-law. It has a\n precision equivalent to roughly 14-bit PCM and is sometimes\n encoded with reversed bit-ordering.\n * oki-adpcm : OKI (a.k.a. VOX, Dialogic, or Intel) 4-bit ADPCM;\n it has a precision equivalent to roughly 12-bit PCM. ADPCM\n is a form of audio compression that has a good compromise\n between audio quality and encoding/decoding speed.\n * ima-adpcm : IMA (a.k.a. DVI) 4-bit ADPCM; it has a precision\n equivalent to roughly 13-bit PCM.\n * ms-adpcm : Microsoft 4-bit ADPCM; it has a precision\n equivalent to roughly 14-bit PCM.\n * gsm-full-rate : GSM is currently used for the vast majority\n of the world\u2019s digital wireless telephone calls. It\n utilises several audio formats with different bit-rates and\n associated speech quality. SoX has support for GSM\u2019s\n original 13kbps \u2018Full Rate\u2019 audio format. It is usually\n CPU-intensive to work with GSM audio.\n ignore_length : bool, default=False\n If True, overrides an (incorrect) audio length given in an audio\n file\u2019s header. If this option is given then SoX will keep reading\n audio until it reaches the end of the input file.", "id": "f3809:c0:m2"} {"signature": "def dcshift(self, shift=):", "body": "if not is_number(shift) or shift < - or shift > :raise ValueError('')effect_args = ['', ''.format(shift)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply a DC shift to the audio.\n\n Parameters\n ----------\n shift : float\n Amount to shift audio between -2 and 2. (Audio is between -1 and 1)\n\n See Also\n --------\n highpass", "id": "f3809:c0:m18"} {"signature": "def contrast(self, amount=):", "body": "if not is_number(amount) or amount < or amount > :raise ValueError('')effect_args = ['', ''.format(amount)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Comparable with compression, this effect modifies an audio signal to\n make it sound louder.\n\n Parameters\n ----------\n amount : float\n Amount of enhancement between 0 and 100.\n\n See Also\n --------\n compand, mcompand", "id": "f3809:c0:m16"} {"signature": "def gain(self, gain_db=, normalize=True, limiter=False, balance=None):", "body": "if not is_number(gain_db):raise ValueError(\"\")if not isinstance(normalize, bool):raise ValueError(\"\")if not isinstance(limiter, bool):raise ValueError(\"\")if balance not in [None, '', '', '']:raise ValueError(\"\")effect_args = ['']if balance is not None:effect_args.append(''.format(balance))if normalize:effect_args.append('')if limiter:effect_args.append('')effect_args.append(''.format(gain_db))self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply amplification or attenuation to the audio signal.\n\n Parameters\n ----------\n gain_db : float, default=0.0\n Gain adjustment in decibels (dB).\n normalize : bool, default=True\n If True, audio is normalized to gain_db relative to full scale.\n If False, simply adjusts the audio power level by gain_db.\n limiter : bool, default=False\n If True, a simple limiter is invoked to prevent clipping.\n balance : str or None, default=None\n Balance gain across channels. Can be one of:\n * None applies no balancing (default)\n * 'e' applies gain to all channels other than that with the\n highest peak level, such that all channels attain the same\n peak level\n * 'B' applies gain to all channels other than that with the\n highest RMS level, such that all channels attain the same\n RMS level\n * 'b' applies gain with clipping protection to all channels other\n than that with the highest RMS level, such that all channels\n attain the same RMS level\n If normalize=True, 'B' and 'b' are equivalent.\n\n See Also\n --------\n loudness", "id": "f3809:c0:m29"} {"signature": "def pitch(self, n_semitones, quick=False):", "body": "if not is_number(n_semitones):raise ValueError(\"\")if n_semitones < - or n_semitones > :logger.warning(\"\"\"\")if not isinstance(quick, bool):raise ValueError(\"\")effect_args = ['']if quick:effect_args.append('')effect_args.append(''.format(n_semitones * ))self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Pitch shift the audio without changing the tempo.\n\n This effect uses the WSOLA algorithm. The audio is chopped up into\n segments which are then shifted in the time domain and overlapped\n (cross-faded) at points where their waveforms are most similar as\n determined by measurement of least squares.\n\n Parameters\n ----------\n n_semitones : float\n The number of semitones to shift. Can be positive or negative.\n quick : bool, default=False\n If True, this effect will run faster but with lower sound quality.\n\n See Also\n --------\n bend, speed, tempo", "id": "f3809:c0:m42"} {"signature": "def stat(self, input_filepath, scale=None, rms=False):", "body": "effect_args = ['', '', '']if scale is not None:if not is_number(scale) or scale <= :raise ValueError(\"\")effect_args.extend(['', ''.format(scale)])if rms:effect_args.append('')_, _, stat_output = self.build(input_filepath, None, extra_args=effect_args, return_output=True)stat_dict = {}lines = stat_output.split('')for line in lines:split_line = line.split()if len(split_line) == :continuevalue = split_line[-]key = ''.join(split_line[:-])stat_dict[key.strip('')] = valuereturn stat_dict", "docstring": "Display time and frequency domain statistical information about the\n audio. Audio is passed unmodified through the SoX processing chain.\n\n Unlike other Transformer methods, this does not modify the transformer\n effects chain. Instead it computes statistics on the output file that\n would be created if the build command were invoked.\n\n Note: The file is downmixed to mono prior to computation.\n\n Parameters\n ----------\n input_filepath : str\n Path to input file to compute stats on.\n scale : float or None, default=None\n If not None, scales the input by the given scale factor.\n rms : bool, default=False\n If True, scales all values by the average rms amplitude.\n\n Returns\n -------\n stat_dict : dict\n Dictionary of statistics.\n\n See Also\n --------\n stats, power_spectrum, sox.file_info", "id": "f3809:c0:m51"} {"signature": "def bandreject(self, frequency, width_q=):", "body": "if not is_number(frequency) or frequency <= :raise ValueError(\"\")if not is_number(width_q) or width_q <= :raise ValueError(\"\")effect_args = ['', ''.format(frequency), ''.format(width_q)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply a two-pole Butterworth band-reject filter with the given\n central frequency, and (3dB-point) band-width. The filter rolls off at\n 6dB per octave (20dB per decade) and is described in detail in\n http://musicdsp.org/files/Audio-EQ-Cookbook.txt\n\n Parameters\n ----------\n frequency : float\n The filter's center frequency in Hz.\n width_q : float, default=2.0\n The filter's width as a Q-factor.\n constant_skirt : bool, default=False\n If True, selects constant skirt gain (peak gain = width_q).\n If False, selects constant 0dB peak gain.\n\n See Also\n --------\n bandreject, sinc", "id": "f3809:c0:m9"} {"signature": "def echos(self, gain_in=, gain_out=, n_echos=, delays=[],decays=[]):", "body": "if not is_number(gain_in) or gain_in <= or gain_in > :raise ValueError(\"\")if not is_number(gain_out) or gain_out <= or gain_out > :raise ValueError(\"\")if not isinstance(n_echos, int) or n_echos <= :raise ValueError(\"\")if not isinstance(delays, list):raise ValueError(\"\")if len(delays) != n_echos:raise ValueError(\"\")if any((not is_number(p) or p <= ) for p in delays):raise ValueError(\"\")if not isinstance(decays, list):raise ValueError(\"\")if len(decays) != n_echos:raise ValueError(\"\")if any((not is_number(p) or p <= or p > ) for p in decays):raise ValueError(\"\")effect_args = ['', ''.format(gain_in), ''.format(gain_out)]for i in range(n_echos):effect_args.extend([''.format(delays[i]),''.format(decays[i])])self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Add a sequence of echoes to the audio.\n\n Like the echo effect, echos stand for \u2018ECHO in Sequel\u2019, that is the\n first echos takes the input, the second the input and the first echos,\n the third the input and the first and the second echos, ... and so on.\n Care should be taken using many echos; a single echos has the same\n effect as a single echo.\n\n Parameters\n ----------\n gain_in : float, default=0.8\n Input volume, between 0 and 1\n gain_out : float, default=0.9\n Output volume, between 0 and 1\n n_echos : int, default=1\n Number of reflections\n delays : list, default=[60]\n List of delays in miliseconds\n decays : list, default=[0.4]\n List of decays, relative to gain in between 0 and 1\n\n See Also\n --------\n echo, reverb, chorus", "id": "f3809:c0:m24"} {"signature": "def reverb(self, reverberance=, high_freq_damping=, room_scale=,stereo_depth=, pre_delay=, wet_gain=, wet_only=False):", "body": "if (not is_number(reverberance) or reverberance < orreverberance > ):raise ValueError(\"\")if (not is_number(high_freq_damping) or high_freq_damping < orhigh_freq_damping > ):raise ValueError(\"\")if (not is_number(room_scale) or room_scale < orroom_scale > ):raise ValueError(\"\")if (not is_number(stereo_depth) or stereo_depth < orstereo_depth > ):raise ValueError(\"\")if not is_number(pre_delay) or pre_delay < :raise ValueError(\"\")if not is_number(wet_gain):raise ValueError(\"\")if not isinstance(wet_only, bool):raise ValueError(\"\")effect_args = ['']if wet_only:effect_args.append('')effect_args.extend([''.format(reverberance),''.format(high_freq_damping),''.format(room_scale),''.format(stereo_depth),''.format(pre_delay),''.format(wet_gain)])self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Add reverberation to the audio using the \u2018freeverb\u2019 algorithm.\n A reverberation effect is sometimes desirable for concert halls that\n are too small or contain so many people that the hall\u2019s natural\n reverberance is diminished. Applying a small amount of stereo reverb\n to a (dry) mono signal will usually make it sound more natural.\n\n Parameters\n ----------\n reverberance : float, default=50\n Percentage of reverberance\n high_freq_damping : float, default=50\n Percentage of high-frequency damping.\n room_scale : float, default=100\n Scale of the room as a percentage.\n stereo_depth : float, default=100\n Stereo depth as a percentage.\n pre_delay : float, default=0\n Pre-delay in milliseconds.\n wet_gain : float, default=0\n Amount of wet gain in dB\n wet_only : bool, default=False\n If True, only outputs the wet signal.\n\n See Also\n --------\n echo", "id": "f3809:c0:m46"} {"signature": "def channels(self, n_channels):", "body": "if not isinstance(n_channels, int) or n_channels <= :raise ValueError('')effect_args = ['', ''.format(n_channels)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Change the number of channels in the audio signal. If decreasing the\n number of channels it mixes channels together, if increasing the number\n of channels it duplicates.\n\n Note: This overrides arguments used in the convert effect!\n\n Parameters\n ----------\n n_channels : int\n Desired number of channels.\n\n See Also\n --------\n convert", "id": "f3809:c0:m13"} {"signature": "def stats(self, input_filepath):", "body": "effect_args = ['', '', '']_, _, stats_output = self.build(input_filepath, None, extra_args=effect_args, return_output=True)stats_dict = {}lines = stats_output.split('')for line in lines:split_line = line.split()if len(split_line) == :continuevalue = split_line[-]key = ''.join(split_line[:-])stats_dict[key] = valuereturn stats_dict", "docstring": "Display time domain statistical information about the audio\n channels. Audio is passed unmodified through the SoX processing chain.\n Statistics are calculated and displayed for each audio channel\n\n Unlike other Transformer methods, this does not modify the transformer\n effects chain. Instead it computes statistics on the output file that\n would be created if the build command were invoked.\n\n Note: The file is downmixed to mono prior to computation.\n\n Parameters\n ----------\n input_filepath : str\n Path to input file to compute stats on.\n\n Returns\n -------\n stats_dict : dict\n List of frequency (Hz), amplitude pairs.\n\n See Also\n --------\n stat, sox.file_info", "id": "f3809:c0:m53"} {"signature": "def power_spectrum(self, input_filepath):", "body": "effect_args = ['', '', '', '']_, _, stat_output = self.build(input_filepath, None, extra_args=effect_args, return_output=True)power_spectrum = []lines = stat_output.split('')for line in lines:split_line = line.split()if len(split_line) != :continuefreq, amp = split_linepower_spectrum.append([float(freq), float(amp)])return power_spectrum", "docstring": "Calculates the power spectrum (4096 point DFT). This method\n internally invokes the stat command with the -freq option.\n\n Note: The file is downmixed to mono prior to computation.\n\n Parameters\n ----------\n input_filepath : str\n Path to input file to compute stats on.\n\n Returns\n -------\n power_spectrum : list\n List of frequency (Hz), amplitude pairs.\n\n See Also\n --------\n stat, stats, sox.file_info", "id": "f3809:c0:m52"} {"signature": "def chorus(self, gain_in=, gain_out=, n_voices=, delays=None,decays=None, speeds=None, depths=None, shapes=None):", "body": "if not is_number(gain_in) or gain_in <= or gain_in > :raise ValueError(\"\")if not is_number(gain_out) or gain_out <= or gain_out > :raise ValueError(\"\")if not isinstance(n_voices, int) or n_voices <= :raise ValueError(\"\")if not (delays is None or isinstance(delays, list)):raise ValueError(\"\")if delays is not None:if len(delays) != n_voices:raise ValueError(\"\")if any((not is_number(p) or p < ) for p in delays):raise ValueError(\"\")else:delays = [random.uniform(, ) for _ in range(n_voices)]if not (decays is None or isinstance(decays, list)):raise ValueError(\"\")if decays is not None:if len(decays) != n_voices:raise ValueError(\"\")if any((not is_number(p) or p <= or p > ) for p in decays):raise ValueError(\"\")else:decays = [random.uniform(, ) for _ in range(n_voices)]if not (speeds is None or isinstance(speeds, list)):raise ValueError(\"\")if speeds is not None:if len(speeds) != n_voices:raise ValueError(\"\")if any((not is_number(p) or p <= ) for p in speeds):raise ValueError(\"\")else:speeds = [random.uniform(, ) for _ in range(n_voices)]if not (depths is None or isinstance(depths, list)):raise ValueError(\"\")if depths is not None:if len(depths) != n_voices:raise ValueError(\"\")if any((not is_number(p) or p <= ) for p in depths):raise ValueError(\"\")else:depths = [random.uniform(, ) for _ in range(n_voices)]if not (shapes is None or isinstance(shapes, list)):raise ValueError(\"\")if shapes is not None:if len(shapes) != n_voices:raise ValueError(\"\")if any((p not in ['', '']) for p in shapes):raise ValueError(\"\")else:shapes = [random.choice(['', '']) for _ in range(n_voices)]effect_args = ['', ''.format(gain_in), ''.format(gain_out)]for i in range(n_voices):effect_args.extend([''.format(delays[i]),''.format(decays[i]),''.format(speeds[i]),''.format(depths[i]),''.format(shapes[i])])self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Add a chorus effect to the audio. This can makeasingle vocal sound\n like a chorus, but can also be applied to instrumentation.\n\n Chorus resembles an echo effect with a short delay, but whereas with\n echo the delay is constant, with chorus, it is varied using sinusoidal\n or triangular modulation. The modulation depth defines the range the\n modulated delay is played before or after the delay. Hence the delayed\n sound will sound slower or faster, that is the delayed sound tuned\n around the original one, like in a chorus where some vocals are\n slightly off key.\n\n Parameters\n ----------\n gain_in : float, default=0.3\n The time in seconds over which the instantaneous level of the input\n signal is averaged to determine increases in volume.\n gain_out : float, default=0.8\n The time in seconds over which the instantaneous level of the input\n signal is averaged to determine decreases in volume.\n n_voices : int, default=3\n The number of voices in the chorus effect.\n delays : list of floats > 20 or None, default=None\n If a list, the list of delays (in miliseconds) of length n_voices.\n If None, the individual delay parameters are chosen automatically\n to be between 40 and 60 miliseconds.\n decays : list of floats or None, default=None\n If a list, the list of decays (as a fraction of gain_in) of length\n n_voices.\n If None, the individual decay parameters are chosen automatically\n to be between 0.3 and 0.4.\n speeds : list of floats or None, default=None\n If a list, the list of modulation speeds (in Hz) of length n_voices\n If None, the individual speed parameters are chosen automatically\n to be between 0.25 and 0.4 Hz.\n depths : list of floats or None, default=None\n If a list, the list of depths (in miliseconds) of length n_voices.\n If None, the individual delay parameters are chosen automatically\n to be between 1 and 3 miliseconds.\n shapes : list of 's' or 't' or None, deault=None\n If a list, the list of modulation shapes - 's' for sinusoidal or\n 't' for triangular - of length n_voices.\n If None, the individual shapes are chosen automatically.", "id": "f3809:c0:m14"} {"signature": "def trim(self, start_time, end_time=None):", "body": "if not is_number(start_time) or start_time < :raise ValueError(\"\")effect_args = ['',''.format(start_time)]if end_time is not None:if not is_number(end_time) or end_time < :raise ValueError(\"\")if start_time >= end_time:raise ValueError(\"\")effect_args.append(''.format(end_time - start_time))self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Excerpt a clip from an audio file, given the start timestamp and end timestamp of the clip within the file, expressed in seconds. If the end timestamp is set to `None` or left unspecified, it defaults to the duration of the audio file.\n\n Parameters\n ----------\n start_time : float\n Start time of the clip (seconds)\n end_time : float or None, default=None\n End time of the clip (seconds)", "id": "f3809:c0:m59"} {"signature": "def overdrive(self, gain_db=, colour=):", "body": "if not is_number(gain_db):raise ValueError('')if not is_number(colour):raise ValueError('')effect_args = ['',''.format(gain_db),''.format(colour)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply non-linear distortion.\n\n Parameters\n ----------\n gain_db : float, default=20\n Controls the amount of distortion (dB).\n colour : float, default=20\n Controls the amount of even harmonic content in the output (dB).", "id": "f3809:c0:m39"} {"signature": "def sinc(self, filter_type='', cutoff_freq=,stop_band_attenuation=, transition_bw=None,phase_response=None):", "body": "filter_types = ['', '', '', '']if filter_type not in filter_types:raise ValueError(\"\".format(''.join(filter_types)))if not (is_number(cutoff_freq) or isinstance(cutoff_freq, list)):raise ValueError(\"\")if filter_type in ['', ''] and isinstance(cutoff_freq, list):raise ValueError(\"\"\"\")if filter_type in ['', ''] and is_number(cutoff_freq):raise ValueError(\"\"\"\")if is_number(cutoff_freq) and cutoff_freq <= :raise ValueError(\"\")if isinstance(cutoff_freq, list):if len(cutoff_freq) != :raise ValueError(\"\")if any([not is_number(f) or f <= for f in cutoff_freq]):raise ValueError(\"\")cutoff_freq = sorted(cutoff_freq)if not is_number(stop_band_attenuation) or stop_band_attenuation < :raise ValueError(\"\")if not (is_number(transition_bw) orisinstance(transition_bw, list) or transition_bw is None):raise ValueError(\"\")if filter_type in ['', ''] and isinstance(transition_bw, list):raise ValueError(\"\"\"\")if is_number(transition_bw) and transition_bw <= :raise ValueError(\"\")if isinstance(transition_bw, list):if any([not is_number(f) or f <= for f in transition_bw]):raise ValueError(\"\")if len(transition_bw) != :raise ValueError(\"\")if phase_response is not None and not is_number(phase_response):raise ValueError(\"\")if (is_number(phase_response) and(phase_response < or phase_response > )):raise ValueError(\"\")effect_args = ['']effect_args.extend(['', ''.format(stop_band_attenuation)])if phase_response is not None:effect_args.extend(['', ''.format(phase_response)])if filter_type == '':if transition_bw is not None:effect_args.extend(['', ''.format(transition_bw)])effect_args.append(''.format(cutoff_freq))elif filter_type == '':effect_args.append(''.format(cutoff_freq))if transition_bw is not None:effect_args.extend(['', ''.format(transition_bw)])else:if is_number(transition_bw):effect_args.extend(['', ''.format(transition_bw)])elif isinstance(transition_bw, list):effect_args.extend(['', ''.format(transition_bw[])])if filter_type == '':effect_args.append(''.format(cutoff_freq[], cutoff_freq[]))elif filter_type == '':effect_args.append(''.format(cutoff_freq[], cutoff_freq[]))if isinstance(transition_bw, list):effect_args.extend(['', ''.format(transition_bw[])])self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply a sinc kaiser-windowed low-pass, high-pass, band-pass, or\n band-reject filter to the signal.\n\n Parameters\n ----------\n filter_type : str, default='high'\n Type of filter. One of:\n - 'high' for a high-pass filter\n - 'low' for a low-pass filter\n - 'pass' for a band-pass filter\n - 'reject' for a band-reject filter\n cutoff_freq : float or list, default=3000\n A scalar or length 2 list indicating the filter's critical\n frequencies. The critical frequencies are given in Hz and must be\n positive. For a high-pass or low-pass filter, cutoff_freq\n must be a scalar. For a band-pass or band-reject filter, it must be\n a length 2 list.\n stop_band_attenuation : float, default=120\n The stop band attenuation in dB\n transition_bw : float, list or None, default=None\n The transition band-width in Hz.\n If None, sox's default of 5% of the total bandwith is used.\n If a float, the given transition bandwith is used for both the\n upper and lower bands (if applicable).\n If a list, the first argument is used for the lower band and the\n second for the upper band.\n phase_response : float or None\n The filter's phase response between 0 (minimum) and 100 (maximum).\n If None, sox's default phase repsonse is used.\n\n See Also\n --------\n band, bandpass, bandreject, highpass, lowpass", "id": "f3809:c0:m49"} {"signature": "def fade(self, fade_in_len=, fade_out_len=, fade_shape=''):", "body": "fade_shapes = ['', '', '', '', '']if fade_shape not in fade_shapes:raise ValueError(\"\".format(\"\".join(fade_shapes)))if not is_number(fade_in_len) or fade_in_len < :raise ValueError(\"\")if not is_number(fade_out_len) or fade_out_len < :raise ValueError(\"\")effect_args = []if fade_in_len > :effect_args.extend(['', ''.format(fade_shape), ''.format(fade_in_len)])if fade_out_len > :effect_args.extend(['', '', ''.format(fade_shape),''.format(fade_out_len), ''])if len(effect_args) > :self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Add a fade in and/or fade out to an audio file.\n Default fade shape is 1/4 sine wave.\n\n Parameters\n ----------\n fade_in_len : float, default=0.0\n Length of fade-in (seconds). If fade_in_len = 0,\n no fade in is applied.\n fade_out_len : float, defaut=0.0\n Length of fade-out (seconds). If fade_out_len = 0,\n no fade in is applied.\n fade_shape : str, default='q'\n Shape of fade. Must be one of\n * 'q' for quarter sine (default),\n * 'h' for half sine,\n * 't' for linear,\n * 'l' for logarithmic\n * 'p' for inverted parabola.\n\n See Also\n --------\n splice", "id": "f3809:c0:m26"} {"signature": "def vol(self, gain, gain_type='', limiter_gain=None):", "body": "if not is_number(gain):raise ValueError('')if limiter_gain is not None:if (not is_number(limiter_gain) orlimiter_gain <= or limiter_gain >= ):raise ValueError('')if gain_type in ['', ''] and gain < :raise ValueError(\"\")effect_args = ['']effect_args.append(''.format(gain))if gain_type == '':effect_args.append('')elif gain_type == '':effect_args.append('')elif gain_type == '':effect_args.append('')else:raise ValueError('')if limiter_gain is not None:if gain_type in ['', ''] and gain > :effect_args.append(''.format(limiter_gain))elif gain_type == '' and gain > :effect_args.append(''.format(limiter_gain))self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply an amplification or an attenuation to the audio signal.\n\n Parameters\n ----------\n gain : float\n Interpreted according to the given `gain_type`.\n If `gain_type' = 'amplitude', `gain' is a positive amplitude ratio.\n If `gain_type' = 'power', `gain' is a power (voltage squared).\n If `gain_type' = 'db', `gain' is in decibels.\n gain_type : string, default='amplitude'\n Type of gain. One of:\n - 'amplitude'\n - 'power'\n - 'db'\n limiter_gain : float or None, default=None\n If specified, a limiter is invoked on peaks greater than\n `limiter_gain' to prevent clipping.\n `limiter_gain` should be a positive value much less than 1.\n\n See Also\n --------\n gain, compand", "id": "f3809:c0:m62"} {"signature": "def stretch(self, factor, window=):", "body": "if not is_number(factor) or factor <= :raise ValueError(\"\")if factor < or factor > :logger.warning(\"\"\"\")if abs(factor - ) > :logger.warning(\"\"\"\")if not is_number(window) or window <= :raise ValueError(\"\")effect_args = ['', ''.format(factor), ''.format(window)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Change the audio duration (but not its pitch).\n **Unless factor is close to 1, use the tempo effect instead.**\n\n This effect is broadly equivalent to the tempo effect with search set\n to zero, so in general, its results are comparatively poor; it is\n retained as it can sometimes out-perform tempo for small factors.\n\n Parameters\n ----------\n factor : float\n The ratio of the new tempo to the old tempo.\n For ex. 1.1 speeds up the tempo by 10%; 0.9 slows it down by 10%.\n Note - this argument is the inverse of what is passed to the sox\n stretch effect for consistency with tempo.\n window : float, default=20\n Window size in miliseconds\n\n See Also\n --------\n tempo, speed, pitch", "id": "f3809:c0:m54"} {"signature": "def set_output_format(self, file_type=None, rate=None, bits=None,channels=None, encoding=None, comments=None,append_comments=True):", "body": "if file_type not in VALID_FORMATS + [None]:raise ValueError(''.format(VALID_FORMATS))if not is_number(rate) and rate is not None:raise ValueError('')if rate is not None and rate <= :raise ValueError('')if not isinstance(bits, int) and bits is not None:raise ValueError('')if bits is not None and bits <= :raise ValueError('')if not isinstance(channels, int) and channels is not None:raise ValueError('')if channels is not None and channels <= :raise ValueError('')if encoding not in ENCODING_VALS + [None]:raise ValueError(''.format(ENCODING_VALS))if comments is not None and not isinstance(comments, str):raise ValueError('')if not isinstance(append_comments, bool):raise ValueError('')output_format = []if file_type is not None:output_format.extend(['', ''.format(file_type)])if rate is not None:output_format.extend(['', ''.format(rate)])if bits is not None:output_format.extend(['', ''.format(bits)])if channels is not None:output_format.extend(['', ''.format(channels)])if encoding is not None:output_format.extend(['', ''.format(encoding)])if comments is not None:if append_comments:output_format.extend(['', comments])else:output_format.extend(['', comments])self.output_format = output_formatreturn self", "docstring": "Sets output file format arguments. These arguments will overwrite\n any format related arguments supplied by other effects (e.g. rate).\n\n If this function is not explicity called the output format is inferred\n from the file extension or the file's header.\n\n Parameters\n ----------\n file_type : str or None, default=None\n The file type of the output audio file. Should be the same as what\n the file extension would be, for ex. 'mp3' or 'wav'.\n rate : float or None, default=None\n The sample rate of the output audio file. If None the sample rate\n is inferred.\n bits : int or None, default=None\n The number of bits per sample. If None, the number of bits per\n sample is inferred.\n channels : int or None, default=None\n The number of channels in the audio file. If None the number of\n channels is inferred.\n encoding : str or None, default=None\n The audio encoding type. Sometimes needed with file-types that\n support more than one encoding type. One of:\n * signed-integer : PCM data stored as signed (\u2018two\u2019s\n complement\u2019) integers. Commonly used with a 16 or 24\u2212bit\n encoding size. A value of 0 represents minimum signal\n power.\n * unsigned-integer : PCM data stored as unsigned integers.\n Commonly used with an 8-bit encoding size. A value of 0\n represents maximum signal power.\n * floating-point : PCM data stored as IEEE 753 single precision\n (32-bit) or double precision (64-bit) floating-point\n (\u2018real\u2019) numbers. A value of 0 represents minimum signal\n power.\n * a-law : International telephony standard for logarithmic\n encoding to 8 bits per sample. It has a precision\n equivalent to roughly 13-bit PCM and is sometimes encoded\n with reversed bit-ordering.\n * u-law : North American telephony standard for logarithmic\n encoding to 8 bits per sample. A.k.a. \u03bc-law. It has a\n precision equivalent to roughly 14-bit PCM and is sometimes\n encoded with reversed bit-ordering.\n * oki-adpcm : OKI (a.k.a. VOX, Dialogic, or Intel) 4-bit ADPCM;\n it has a precision equivalent to roughly 12-bit PCM. ADPCM\n is a form of audio compression that has a good compromise\n between audio quality and encoding/decoding speed.\n * ima-adpcm : IMA (a.k.a. DVI) 4-bit ADPCM; it has a precision\n equivalent to roughly 13-bit PCM.\n * ms-adpcm : Microsoft 4-bit ADPCM; it has a precision\n equivalent to roughly 14-bit PCM.\n * gsm-full-rate : GSM is currently used for the vast majority\n of the world\u2019s digital wireless telephone calls. It\n utilises several audio formats with different bit-rates and\n associated speech quality. SoX has support for GSM\u2019s\n original 13kbps \u2018Full Rate\u2019 audio format. It is usually\n CPU-intensive to work with GSM audio.\n comments : str or None, default=None\n If not None, the string is added as a comment in the header of the\n output audio file. If None, no comments are added.\n append_comments : bool, default=True\n If True, comment strings are appended to SoX's default comments. If\n False, the supplied comment replaces the existing comment.", "id": "f3809:c0:m3"} {"signature": "def oops(self):", "body": "effect_args = ['']self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Out Of Phase Stereo effect. Mixes stereo to twin-mono where each\n mono channel contains the difference between the left and right stereo\n channels. This is sometimes known as the 'karaoke' effect as it often\n has the effect of removing most or all of the vocals from a recording.", "id": "f3809:c0:m38"} {"signature": "def compand(self, attack_time=, decay_time=, soft_knee_db=,tf_points=[(-, -), (-, -), (, )],):", "body": "if not is_number(attack_time) or attack_time <= :raise ValueError(\"\")if not is_number(decay_time) or decay_time <= :raise ValueError(\"\")if attack_time > decay_time:logger.warning(\"\"\"\"\"\"\"\")if not (is_number(soft_knee_db) or soft_knee_db is None):raise ValueError(\"\")if not isinstance(tf_points, list):raise TypeError(\"\")if len(tf_points) == :raise ValueError(\"\")if any(not isinstance(pair, tuple) for pair in tf_points):raise ValueError(\"\")if any(len(pair) != for pair in tf_points):raise ValueError(\"\")if any(not (is_number(p[]) and is_number(p[])) for p in tf_points):raise ValueError(\"\")if any((p[] > or p[] > ) for p in tf_points):raise ValueError(\"\")if len(tf_points) > len(set([p[] for p in tf_points])):raise ValueError(\"\")tf_points = sorted(tf_points,key=lambda tf_points: tf_points[])transfer_list = []for point in tf_points:transfer_list.extend([\"\".format(point[]), \"\".format(point[])])effect_args = ['',\"\".format(attack_time, decay_time)]if soft_knee_db is not None:effect_args.append(\"\".format(soft_knee_db, \"\".join(transfer_list)))else:effect_args.append(\"\".join(transfer_list))self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Compand (compress or expand) the dynamic range of the audio.\n\n Parameters\n ----------\n attack_time : float, default=0.3\n The time in seconds over which the instantaneous level of the input\n signal is averaged to determine increases in volume.\n decay_time : float, default=0.8\n The time in seconds over which the instantaneous level of the input\n signal is averaged to determine decreases in volume.\n soft_knee_db : float or None, default=6.0\n The ammount (in dB) for which the points at where adjacent line\n segments on the transfer function meet will be rounded.\n If None, no soft_knee is applied.\n tf_points : list of tuples\n Transfer function points as a list of tuples corresponding to\n points in (dB, dB) defining the compander's transfer function.\n\n See Also\n --------\n mcompand, contrast", "id": "f3809:c0:m15"} {"signature": "def allpass(self, frequency, width_q=):", "body": "if not is_number(frequency) or frequency <= :raise ValueError(\"\")if not is_number(width_q) or width_q <= :raise ValueError(\"\")effect_args = ['', ''.format(frequency), ''.format(width_q)]self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply a two-pole all-pass filter. An all-pass filter changes the\n audio\u2019s frequency to phase relationship without changing its frequency\n to amplitude relationship. The filter is described in detail in at\n http://musicdsp.org/files/Audio-EQ-Cookbook.txt\n\n Parameters\n ----------\n frequency : float\n The filter's center frequency in Hz.\n width_q : float, default=2.0\n The filter's width as a Q-factor.\n\n See Also\n --------\n equalizer, highpass, lowpass, sinc", "id": "f3809:c0:m7"} {"signature": "def phaser(self, gain_in=, gain_out=, delay=, decay=, speed=,modulation_shape=''):", "body": "if not is_number(gain_in) or gain_in <= or gain_in > :raise ValueError(\"\")if not is_number(gain_out) or gain_out <= or gain_out > :raise ValueError(\"\")if not is_number(delay) or delay <= or delay > :raise ValueError(\"\")if not is_number(decay) or decay < or decay > :raise ValueError(\"\")if not is_number(speed) or speed < or speed > :raise ValueError(\"\")if modulation_shape not in ['', '']:raise ValueError(\"\")effect_args = ['',''.format(gain_in),''.format(gain_out),''.format(delay),''.format(decay),''.format(speed)]if modulation_shape == '':effect_args.append('')elif modulation_shape == '':effect_args.append('')self.effects.extend(effect_args)self.effects_log.append('')return self", "docstring": "Apply a phasing effect to the audio.\n\n Parameters\n ----------\n gain_in : float, default=0.8\n Input volume between 0 and 1\n gain_out: float, default=0.74\n Output volume between 0 and 1\n delay : float, default=3\n Delay in miliseconds between 0 and 5\n decay : float, default=0.4\n Decay relative to gain_in, between 0.1 and 0.5.\n speed : float, default=0.5\n Modulation speed in Hz, between 0.1 and 2\n modulation_shape : str, defaul='sinusoidal'\n Modulation shpae. One of 'sinusoidal' or 'triangular'\n\n See Also\n --------\n flanger, tremolo", "id": "f3809:c0:m41"} {"signature": "def build(self, input_filepath, output_filepath, extra_args=None,return_output=False):", "body": "file_info.validate_input_file(input_filepath)if output_filepath is not None:file_info.validate_output_file(output_filepath)else:output_filepath = ''if input_filepath == output_filepath:raise ValueError(\"\")args = []args.extend(self.globals)args.extend(self.input_format)args.append(input_filepath)args.extend(self.output_format)args.append(output_filepath)args.extend(self.effects)if extra_args is not None:if not isinstance(extra_args, list):raise ValueError(\"\")args.extend(extra_args)status, out, err = sox(args)if status != :raise SoxError(\"\".format(out, err))else:logger.info(\"\",output_filepath,\"\".join(self.effects_log))if out is not None:logger.info(\"\".format(out))if return_output:return status, out, errelse:return True", "docstring": "Builds the output_file by executing the current set of commands.\n\n Parameters\n ----------\n input_filepath : str\n Path to input audio file.\n output_filepath : str or None\n Path to desired output file. If a file already exists at the given\n path, the file will be overwritten.\n If None, no file will be created.\n extra_args : list or None, default=None\n If a list is given, these additional arguments are passed to SoX\n at the end of the list of effects.\n Don't use this argument unless you know exactly what you're doing!\n return_output : bool, default=False\n If True, returns the status and information sent to stderr and\n stdout as a tuple (status, stdout, stderr).\n Otherwise returns True on success.", "id": "f3809:c0:m5"} {"signature": "def num_samples(input_filepath):", "body": "validate_input_file(input_filepath)output = soxi(input_filepath, '')if output == '':logger.warning(\"\", input_filepath)return int(output)", "docstring": "Show number of samples (0 if unavailable).\n\nParameters\n----------\ninput_filepath : str\n Path to audio file.\n\nReturns\n-------\nn_samples : int\n total number of samples in audio file.\n Returns 0 if empty or unavailable", "id": "f3811:m6"} {"signature": "def validate_input_file_list(input_filepath_list):", "body": "if not isinstance(input_filepath_list, list):raise TypeError(\"\")elif len(input_filepath_list) < :raise ValueError(\"\")for input_filepath in input_filepath_list:validate_input_file(input_filepath)", "docstring": "Input file list validation function. Checks that object is a list and\n contains valid filepaths that can be processed by SoX.\n\n Parameters\n ----------\n input_filepath_list : list\n A list of filepaths.", "id": "f3811:m10"} {"signature": "def comments(input_filepath):", "body": "validate_input_file(input_filepath)output = soxi(input_filepath, '')return str(output)", "docstring": "Show file comments (annotations) if available.\n\nParameters\n----------\ninput_filepath : str\n Path to audio file.\n\nReturns\n-------\ncomments : str\n File comments from header.\n If no comments are present, returns an empty string.", "id": "f3811:m2"} {"signature": "def stat(filepath):", "body": "stat_output = _stat_call(filepath)stat_dictionary = _parse_stat(stat_output)return stat_dictionary", "docstring": "Returns a dictionary of audio statistics.\n\n Parameters\n ----------\n filepath : str\n File path.\n\n Returns\n -------\n stat_dictionary : dict\n Dictionary of audio statistics.", "id": "f3811:m14"} {"signature": "def validate_output_file(output_filepath):", "body": "nowrite_conditions = [bool(os.path.dirname(output_filepath)) ornot os.access(os.getcwd(), os.W_OK),not os.access(os.path.dirname(output_filepath), os.W_OK)]if all(nowrite_conditions):raise IOError(\"\".format(output_filepath))ext = file_extension(output_filepath)if ext not in VALID_FORMATS:logger.info(\"\", \"\".join(VALID_FORMATS))logger.warning(\"\".format(ext))if os.path.exists(output_filepath):logger.warning('',output_filepath)", "docstring": "Output file validation function. Checks that file can be written, and\n has a valid file extension. Throws a warning if the path already exists,\n as it will be overwritten on build.\n\n Parameters\n ----------\n output_filepath : str\n The output filepath.\n\n Returns:\n --------\n output_filepath : str\n The output filepath.", "id": "f3811:m11"} {"signature": "def info(filepath):", "body": "info_dictionary = {'': channels(filepath),'': sample_rate(filepath),'': bitrate(filepath),'': duration(filepath),'': num_samples(filepath),'': encoding(filepath),'': silent(filepath)}return info_dictionary", "docstring": "Get a dictionary of file information\n\n Parameters\n ----------\n filepath : str\n File path.\n\n Returns:\n --------\n info_dictionary : dict\n Dictionary of file information. Fields are:\n * channels\n * sample_rate\n * bitrate\n * duration\n * num_samples\n * encoding\n * silent", "id": "f3811:m13"} {"signature": "def channels(input_filepath):", "body": "validate_input_file(input_filepath)output = soxi(input_filepath, '')return int(output)", "docstring": "Show number of channels.\n\nParameters\n----------\ninput_filepath : str\n Path to audio file.\n\nReturns\n-------\nchannels : int\n number of channels", "id": "f3811:m1"} {"signature": "def sample_rate(input_filepath):", "body": "validate_input_file(input_filepath)output = soxi(input_filepath, '')return float(output)", "docstring": "Show sample-rate.\n\nParameters\n----------\ninput_filepath : str\n Path to audio file.\n\nReturns\n-------\nsamplerate : float\n number of samples/second", "id": "f3811:m7"} {"signature": "def _stat_call(filepath):", "body": "validate_input_file(filepath)args = ['', filepath, '', '']_, _, stat_output = sox(args)return stat_output", "docstring": "Call sox's stat function.\n\n Parameters\n ----------\n filepath : str\n File path.\n\n Returns\n -------\n stat_output : str\n Sox output from stderr.", "id": "f3811:m15"} {"signature": "def silent(input_filepath, threshold=):", "body": "validate_input_file(input_filepath)stat_dictionary = stat(input_filepath)mean_norm = stat_dictionary['']if mean_norm is not float(''):if mean_norm >= threshold:return Falseelse:return Trueelse:return True", "docstring": "Determine if an input file is silent.\n\nParameters\n----------\ninput_filepath : str\n The input filepath.\nthreshold : float\n Threshold for determining silence\n\nReturns\n-------\nis_silent : bool\n True if file is determined silent.", "id": "f3811:m8"} {"signature": "def duration(input_filepath):", "body": "validate_input_file(input_filepath)output = soxi(input_filepath, '')if output == '':logger.warning(\"\", input_filepath)return float(output)", "docstring": "Show duration in seconds (0 if unavailable).\n\nParameters\n----------\ninput_filepath : str\n Path to audio file.\n\nReturns\n-------\nduration : float\n Duration of audio file in seconds.\n If unavailable or empty, returns 0.", "id": "f3811:m3"} {"signature": "def _get_valid_formats():", "body": "if NO_SOX:return []so = subprocess.check_output(['', ''])if type(so) is not str:so = str(so, encoding='')so = so.split('')idx = [i for i in range(len(so)) if '' in so[i]][]formats = so[idx].split('')[:]return formats", "docstring": "Calls SoX help for a lists of audio formats available with the current\n install of SoX.\n\n Returns:\n --------\n formats : list\n List of audio file extensions that SoX can process.", "id": "f3813:m1"} {"signature": "def is_number(var):", "body": "try:float(var)return Trueexcept ValueError:return Falseexcept TypeError:return False", "docstring": "Check if variable is a numeric value.\n\n Parameters\n ----------\n var : object\n\n Returns:\n --------\n bool\n True if var is numeric, False otherwise.", "id": "f3813:m4"} {"signature": "def get(self, request):", "body": "pass", "docstring": "responses:\n 200:\n description: A list of organisations.\n examples:\n [{\"name\": \"Foo Corp.\"}, {\"name\": \"Acme Ltd.\"}]", "id": "f3834:c0:m0"} {"signature": "@app.route(\"\")def regular_docstring_and_schema(request):", "body": "pass", "docstring": "This a regular docstring example (not included in schema)\n\n---\n\nresponses:\n 200:\n description: This is included in the schema.", "id": "f3834:m3"} {"signature": "@app.route(\"\")def regular_docstring(request):", "body": "pass", "docstring": "This a regular docstring example (not included in schema)", "id": "f3834:m4"} {"signature": "def get_long_description():", "body": "with open(\"\", encoding=\"\") as f:return f.read()", "docstring": "Return the README.", "id": "f3842:m1"} {"signature": "def get_version(package):", "body": "with open(os.path.join(package, \"\")) as f:return re.search(\"\", f.read()).group()", "docstring": "Return package version as listed in `__version__` in `init.py`.", "id": "f3842:m0"} {"signature": "async def on_connect(self, websocket: WebSocket) -> None:", "body": "await websocket.accept()", "docstring": "Override to handle an incoming websocket connection", "id": "f3845:c1:m4"} {"signature": "def append(self, key: str, value: str) -> None:", "body": "append_key = key.lower().encode(\"\")append_value = value.encode(\"\")self._list.append((append_key, append_value))", "docstring": "Append a header, preserving any duplicate entries.", "id": "f3859:c10:m5"} {"signature": "def setdefault(self, key: str, value: str) -> str:", "body": "set_key = key.lower().encode(\"\")set_value = value.encode(\"\")for idx, (item_key, item_value) in enumerate(self._list):if item_key == set_key:return item_value.decode(\"\")self._list.append((set_key, set_value))return value", "docstring": "If the header `key` does not exist, then set it to `value`.\nReturns the header value.", "id": "f3859:c10:m3"} {"signature": "def parse_docstring(self, func_or_method: typing.Callable) -> dict:", "body": "docstring = func_or_method.__doc__if not docstring:return {}docstring = docstring.split(\"\")[-]parsed = yaml.safe_load(docstring)if not isinstance(parsed, dict):return {}return parsed", "docstring": "Given a function, parse the docstring as YAML and return a dictionary of info.", "id": "f3862:c2:m2"} {"signature": "async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:", "body": "assert scope[\"\"] == \"\"if not self.config_checked:await self.check_config()self.config_checked = Truepath = self.get_path(scope)response = await self.get_response(path, scope)await response(scope, receive, send)", "docstring": "The ASGI entry point.", "id": "f3863:c1:m2"} {"signature": "def get_path(self, scope: Scope) -> str:", "body": "return os.path.normpath(os.path.join(*scope[\"\"].split(\"\")))", "docstring": "Given the ASGI scope, return the `path` string to serve up,\nwith OS specific path seperators, and any '..', '.' components removed.", "id": "f3863:c1:m3"} {"signature": "def is_not_modified(self, response_headers: Headers, request_headers: Headers) -> bool:", "body": "try:if_none_match = request_headers[\"\"]etag = response_headers[\"\"]if if_none_match == etag:return Trueexcept KeyError:passtry:if_modified_since = parsedate(request_headers[\"\"])last_modified = parsedate(response_headers[\"\"])if (if_modified_since is not Noneand last_modified is not Noneand if_modified_since >= last_modified):return Trueexcept KeyError:passreturn False", "docstring": "Given the request and response headers, return `True` if an HTTP\n\"Not Modified\" response could be returned instead.", "id": "f3863:c1:m8"} {"signature": "def _run(self) -> None:", "body": "scope = self.scopereceive = self._asgi_receivesend = self._asgi_sendtry:self._loop.run_until_complete(self.app(scope, receive, send))except BaseException as exc:self._send_queue.put(exc)", "docstring": "The sub-thread in which the websocket session runs.", "id": "f3871:c5:m3"} {"signature": "async def send(self, message: Message) -> None:", "body": "if self.application_state == WebSocketState.CONNECTING:message_type = message[\"\"]assert message_type in {\"\", \"\"}if message_type == \"\":self.application_state = WebSocketState.DISCONNECTEDelse:self.application_state = WebSocketState.CONNECTEDawait self._send(message)elif self.application_state == WebSocketState.CONNECTED:message_type = message[\"\"]assert message_type in {\"\", \"\"}if message_type == \"\":self.application_state = WebSocketState.DISCONNECTEDawait self._send(message)else:raise RuntimeError('')", "docstring": "Send ASGI websocket messages, ensuring valid state transitions.", "id": "f3872:c2:m2"} {"signature": "@propertydef width(self):", "body": "return self.size[]", "docstring": "Get the current terminal width.", "id": "f3874:c0:m5"} {"signature": "def set_text(self, point, text):", "body": "if not self.option.legend:returnif not isinstance(point, Point):point = Point(point)for offset, char in enumerate(str(text)):self.screen.canvas[point.y][point.x + offset] = char", "docstring": "Set a text value in the screen canvas.", "id": "f3874:c3:m13"} {"signature": "@propertydef colors(self):", "body": "number = curses.tigetnum('') or return if number == else number", "docstring": "Get the number of colors supported by this terminal.", "id": "f3874:c0:m1"} {"signature": "def __init__(self, dg_option=None, ostream=None, data=None):", "body": "self.dg_option = dg_optionif self.dg_option == None:self.dg_option = DOption()self.ostream = ostreamif self.ostream == None:try:self.ostream = sys.stdout.bufferexcept AttributeError:self.ostream = sys.stdoutif self.dg_option.mode == '':self.dg = HorizontalBarGraph(self.dg_option.size,self.dg_option)elif self.dg_option.mode == '':self.dg = VerticalBarGraph(self.dg_option.size,self.dg_option)else:self.dg = AxisGraph(self.dg_option.size,self.dg_option)self.dg.update(data[], data[])", "docstring": "Handle some of the setup functions for the graph in the\n diagram package. Specifically hide all of the requirements that\n are computed in run() inside diagram.py.", "id": "f3874:c10:m0"} {"signature": "def human(self, size, base=, units=''):", "body": "sign = '' if size >= else ''size = abs(size)if size < :return '' % (sign, size)for i, suffix in enumerate(units):unit = ** (i + )if size < unit:return ('' % (sign,size / float(unit) * base,suffix,)).strip()raise OverflowError", "docstring": "Convert the input ``size`` to human readable, short form.", "id": "f3874:c3:m6"} {"signature": "def set(self, point):", "body": "if not isinstance(point, Point):point = Point(point)rx = self.round(point.x)ry = self.round(point.y)item = Point((rx >> , min(ry >> , self.size.y)))self.screen[item] |= self.pixels[ry & ][rx & ]", "docstring": "Set pixel at (x, y) point.", "id": "f3874:c5:m7"} {"signature": "def render(self, stream):", "body": "raise NotImplementedError()", "docstring": "Render the graph to the selected output stream.", "id": "f3874:c3:m11"} {"signature": "def usage_function(parser):", "body": "parser.print_usage()print('')print('')for function in sorted(FUNCTION):doc = FUNCTION[function].__doc__.strip().splitlines()[]print('' % (function + '', doc))return ", "docstring": "Show usage and available curve functions.", "id": "f3874:m2"} {"signature": "def __getitem__(self, point):", "body": "if not isinstance(point, Point):point = Point(point)return self.canvas[point.y][point.x]", "docstring": "Get a point value or None.", "id": "f3874:c2:m6"} {"signature": "@propertydef height(self):", "body": "return self.size.y", "docstring": "Get the buffer height.", "id": "f3874:c2:m2"} {"signature": "@propertydef encoding(self):", "body": "_, encoding = locale.getdefaultlocale()return encoding", "docstring": "Get the current terminal encoding.", "id": "f3874:c0:m2"} {"signature": "@propertydef null(self):", "body": "if not self.option.axis:return -else:return self.screen.height - (-self.minimum * / self.extents * self.size.y)", "docstring": "Zero crossing value.", "id": "f3874:c5:m6"} {"signature": "def usage_palette(parser):", "body": "parser.print_usage()print('')print('')for palette in sorted(PALETTE):print('' % (palette,))return ", "docstring": "Show usage and available palettes.", "id": "f3874:m3"} {"signature": "def _normalised_python(self):", "body": "dx = (self.screen.width / float(len(self.points)))oy = (self.screen.height)for x, point in enumerate(self.points):y = (point - self.minimum) * / self.extents * self.size.yyield Point((dx * x,min(oy, oy - y),))", "docstring": "Normalised data points using pure Python.", "id": "f3874:c5:m4"} {"signature": "@propertydef maximum_points(self):", "body": "return self.size.x", "docstring": "Maximum width.", "id": "f3874:c5:m5"} {"signature": "def consume_line(self, line):", "body": "data = RE_VALUE_KEY.split(line.strip(), )if len(data) == :return float(data[]), Noneelse:return float(data[]), data[].strip()", "docstring": "Consume data from a line.", "id": "f3874:c3:m2"} {"signature": "def color(self, index):", "body": "if self.colors == :if index >= :return self.csi('') + self.csi('', index - )else:return self.csi('') + self.csi('', index)else:return self.csi('', index)", "docstring": "Get the escape sequence for indexed color ``index``.\n\n The ``index`` is a color index in the 256 color space. The color space\n consists of:\n\n * 0x00-0x0f: default EGA colors\n * 0x10-0xe7: 6x6x6 RGB cubes\n * 0xe8-0xff: gray scale ramp", "id": "f3874:c0:m7"} {"signature": "def __init__(self):", "body": "curses.setupterm()", "docstring": "Initialize curses.", "id": "f3874:c0:m0"} {"signature": "def __contains__(self, point):", "body": "if not isinstance(point, Point):point = Point(point)if point.y not in self.canvas:return Falseelse:return point.x in self.canvas[point.y]", "docstring": "Check if a point has a value.", "id": "f3874:c2:m3"} {"signature": "def _function_argument(self, value):", "body": "if value in FUNCTION_CONSTANT:return FUNCTION_CONSTANT[value]else:return float(value)", "docstring": "Resolve function, convert to float if not found.", "id": "f3874:c3:m8"} {"signature": "def filter_savitzky_golay(y, window_size=, order=, deriv=, rate=):", "body": "try:window_size = np.abs(np.int(window_size))order = np.abs(np.int(order))except ValueError:raise ValueError('')if window_size % != or window_size < :raise ValueError('')if window_size < order + :raise ValueError('')order_range = range(order + )half_window = (window_size - ) // minimum = np.min(y)maximum = np.max(y)b = np.mat([[k ** i for i in order_range]for k in range(-half_window, half_window + )])m = np.linalg.pinv(b).A[deriv] * rate ** deriv * math.factorial(deriv)firstvals = y[] - np.abs(y[:half_window+][::-] - y[])lastvals = y[-] + np.abs(y[-half_window-:-][::-] - y[-])y = np.concatenate((firstvals, y, lastvals))return np.clip(np.convolve(m[::-], y, mode=''),minimum,maximum,)", "docstring": "Smooth (and optionally differentiate) with a Savitzky-Golay filter.", "id": "f3874:m1"} {"signature": "@propertydef normalised(self):", "body": "if np is None:return self._normalised_python()else:return self._normalised_numpy()", "docstring": "Normalised data points.", "id": "f3874:c5:m2"} {"signature": "@propertydef scale(self):", "body": "return ", "docstring": "Graph scale.", "id": "f3874:c3:m3"} {"signature": "@propertydef size(self):", "body": "for fd in range():cr = self._ioctl_GWINSZ(fd)if cr:breakif not cr:try:fd = os.open(os.ctermid(), os.O_RDONLY)cr = self._ioctl_GWINSZ(fd)os.close(fd)except Exception:passif not cr:env = os.environcr = (env.get('', ), env.get('', ))return int(cr[]), int(cr[])", "docstring": "Get the current terminal size.", "id": "f3874:c0:m4"} {"signature": "def fetch_deputies(data_dir):", "body": "deputies = DeputiesDataset()df = deputies.fetch()save_to_csv(df, data_dir, \"\")holders = df.condition == ''substitutes = df.condition == ''log.info(\"\", len(df))log.info(\"\", len(df[holders]))log.info(\"\", len(df[substitutes]))return df", "docstring": ":param data_dir: (str) directory in which the output file will be saved", "id": "f3892:m0"} {"signature": "def fetch(self):", "body": "xml = urllib.request.urlopen(self.URL)tree = ET.ElementTree(file=xml)records = self._parse_deputies(tree.getroot())df = pd.DataFrame(records, columns=('','','','','','','','','','','',''))return self._translate(df)", "docstring": "Fetches the list of deputies for the current term.", "id": "f3892:c0:m0"} {"signature": "def fetch_session_start_times(data_dir, pivot, session_dates):", "body": "session_start_times = SessionStartTimesDataset()df = session_start_times.fetch(pivot, session_dates)save_to_csv(df, data_dir, \"\")log.info(\"\", len(session_dates))found = pd.to_datetime(df[''], format=\"\").dt.date.unique()log.info(\"\", len(found))return df", "docstring": ":param data_dir: (str) directory in which the output file will be saved\n:param pivot: (int) congressperson document to use as a pivot for scraping the data\n:param session_dates: (list) datetime objects to fetch the start times for", "id": "f3894:m0"} {"signature": "def fetch(self, pivot, session_dates):", "body": "records = self._all_start_times(pivot, session_dates)return pd.DataFrame(records, columns=('','',''))", "docstring": ":param pivot: (int) a congressperson document to use as a pivot for scraping the data\n:param session_dates: (list) datetime objects to fetch the start times for", "id": "f3894:c0:m0"} {"signature": "def fetch(self, start_date, end_date):", "body": "records = []for two_months_range in self._generate_ranges(start_date, end_date):log.debug(two_months_range)for record in self._fetch_missions_for_range(two_months_range[], two_months_range[]):records.append(record)df = pd.DataFrame(records, columns=['','','','','','','',''])translate_column(df, '', {'': '','': '','': '','': ''})translate_column(df, '', {'': '','': ''})return df.drop_duplicates()", "docstring": "Fetches official missions within the given date range", "id": "f3895:c0:m0"} {"signature": "def fetch(self, deputies, start_date, end_date):", "body": "log.debug(\"\".format(len(deputies), start_date, end_date))records = self._all_presences(deputies, start_date, end_date)df = pd.DataFrame(records, columns=('','','','','','','','','',''))return self._translate(df)", "docstring": ":param deputies: (pandas.DataFrame) a dataframe with deputies data\n:param date_start: (str) date in the format dd/mm/yyyy\n:param date_end: (str) date in the format dd/mm/yyyy", "id": "f3897:c0:m1"} {"signature": "def fetch_presences(data_dir, deputies, date_start, date_end):", "body": "presences = PresencesDataset()df = presences.fetch(deputies, date_start, date_end)save_to_csv(df, data_dir, \"\")log.info(\"\", len(df))log.info(\"\", len(df[df.presence == '']))log.info(\"\", len(df[df.presence == '']))return df", "docstring": ":param data_dir: (str) directory in which the output file will be saved\n:param deputies: (pandas.DataFrame) a dataframe with deputies data\n:param date_start: (str) a date in the format dd/mm/yyyy\n:param date_end: (str) a date in the format dd/mm/yyyy", "id": "f3897:m0"} {"signature": "def translate_column(df, column, translations):", "body": "df[column] = df[column].astype('')translations = [translations[cat]for cat in df[column].cat.categories]df[column].cat.rename_categories(translations, inplace=True)", "docstring": ":param df: (pandas.Dataframe) the dataframe to be translated\n:param column: (str) the column to be translated\n:param translations: (dict) a dictionary of the strings to be categorized and translated", "id": "f3902:m3"} {"signature": "def render(self, obj):", "body": "self.obj = objattrs = ''.join(['' % (attr_name, attr.resolve(obj))if isinstance(attr, Accessor)else '' % (attr_name, attr)for attr_name, attr in self.attrs.items()])return mark_safe(u'' % (attrs, self.text))", "docstring": "Render link as HTML output tag .", "id": "f3914:c1:m4"} {"signature": "def get_days_span(self, month_index):", "body": "is_first_month = month_index == is_last_month = month_index == self.__len__() - y = int(self.start_date.year + (self.start_date.month + month_index) / )m = int((self.start_date.month + month_index) % or )total = calendar.monthrange(y, m)[]if is_first_month and is_last_month:return (self.end_date - self.start_date).days + else:if is_first_month:return total - self.start_date.day + elif is_last_month:return self.end_date.dayelse:return total", "docstring": "Calculate how many days the month spans.", "id": "f3915:c5:m4"} {"signature": "def get_context_data(self, **kwargs):", "body": "sEcho = self.query_data[\"\"]context = super(BaseListView, self).get_context_data(**kwargs)queryset = context[\"\"]if queryset is not None:total_length = self.get_queryset_length(queryset)queryset = self.filter_queryset(queryset)display_length = self.get_queryset_length(queryset)queryset = self.sort_queryset(queryset)queryset = self.paging_queryset(queryset)values_list = self.convert_queryset_to_values_list(queryset)context = {\"\": sEcho,\"\": total_length,\"\": display_length,\"\": values_list,}else:context = {\"\": sEcho,\"\": ,\"\": ,\"\": [],}return context", "docstring": "Get context data for datatable server-side response.\nSee http://www.datatables.net/usage/server-side", "id": "f3923:c1:m7"} {"signature": "def render_to_json_response(self, context, **response_kwargs):", "body": "return HttpResponse(self.convert_context_to_json(context),content_type='',**response_kwargs)", "docstring": "Returns a JSON response, transforming 'context' to make the payload.", "id": "f3923:c0:m0"} {"signature": "def get_meta_image(self):", "body": "return None", "docstring": "Get the image to use for this object.\nCan be None if there is no relevant image.", "id": "f3960:c0:m3"} {"signature": "def train_encoder(X, y, fold_count, encoder):", "body": "kf = StratifiedKFold(n_splits=fold_count, shuffle=True, random_state=)encoder = deepcopy(encoder) imputer = SimpleImputer(strategy='')scaler = StandardScaler()folds = []fit_encoder_time = score_encoder_time = for train_index, test_index in kf.split(X, y):X_train, X_test = X.iloc[train_index, :].reset_index(drop=True), X.iloc[test_index, :].reset_index(drop=True)y_train, y_test = y[train_index].reset_index(drop=True), y[test_index].reset_index(drop=True)start_time = time.time()X_train = encoder.fit_transform(X_train, y_train)fit_encoder_time += time.time() - start_timeX_train = imputer.fit_transform(X_train)X_train = scaler.fit_transform(X_train)start_time = time.time()X_test = encoder.transform(X_test)score_encoder_time += time.time() - start_timeX_test = imputer.transform(X_test)X_test = scaler.transform(X_test)folds.append([X_train, y_train, X_test, y_test])return folds, fit_encoder_time/fold_count, score_encoder_time/fold_count", "docstring": "Defines folds and performs the data preprocessing (categorical encoding, NaN imputation, normalization)\nReturns a list with {X_train, y_train, X_test, y_test}, average fit_encoder_time and average score_encoder_time\n\nNote: We normalize all features (not only numerical features) because otherwise SVM would\n get stuck for hours on ordinal encoded cylinder.bands.arff dataset due to presence of\n unproportionally high values.\n\nNote: The fold count is variable because there are datasets, which have less than 10 samples in the minority class.\n\nNote: We do not use pipelines because of:\n https://github.com/scikit-learn/scikit-learn/issues/11832", "id": "f3962:m0"} {"signature": "def train_model(folds, model):", "body": "scores = []fit_model_time = score_model_time = for X_train, y_train, X_test, y_test in folds:start_time = time.time()with ignore_warnings(category=ConvergenceWarning): model.fit(X_train, y_train)fit_model_time += time.time() - start_timeprediction_train_proba = model.predict_proba(X_train)[:, ]prediction_train = (prediction_train_proba >= ).astype('')start_time = time.time()prediction_test_proba = model.predict_proba(X_test)[:, ]score_model_time += time.time() - start_timeprediction_test = (prediction_test_proba >= ).astype('')with warnings.catch_warnings():warnings.simplefilter(\"\")scores.append([sklearn.metrics.matthews_corrcoef(y_test, prediction_test),sklearn.metrics.matthews_corrcoef(y_train, prediction_train),sklearn.metrics.roc_auc_score(y_test, prediction_test_proba),sklearn.metrics.roc_auc_score(y_train, prediction_train_proba),sklearn.metrics.brier_score_loss(y_test, prediction_test_proba),sklearn.metrics.brier_score_loss(y_train, prediction_train_proba)])return np.mean(scores, axis=), fit_model_time/len(folds), score_model_time/len(folds)", "docstring": "Evaluation with:\n Matthews correlation coefficient: represents thresholding measures\n AUC: represents ranking measures\n Brier score: represents calibration measures", "id": "f3962:m1"} {"signature": "def main(loader, name):", "body": "scores = []raw_scores_ds = {}X, y, mapping = loader()clf = linear_model.LogisticRegression(solver='', multi_class='', max_iter=, random_state=)encoders = (set(category_encoders.__all__) - {''}) for encoder_name in encoders:encoder = getattr(category_encoders, encoder_name)start_time = time.time()score, stds, raw_scores, dim = score_models(clf, X, y, encoder)scores.append([encoder_name, name, dim, score, stds, time.time() - start_time])raw_scores_ds[encoder_name] = raw_scoresgc.collect()results = pd.DataFrame(scores, columns=['', '', '', '', '', ''])raw = pd.DataFrame.from_dict(raw_scores_ds)ax = raw.plot(kind='', return_type='')plt.title('' % (name,))plt.ylabel('')for tick in ax.get_xticklabels():tick.set_rotation()plt.grid()plt.tight_layout()plt.show()return results, raw", "docstring": "Here we iterate through the datasets and score them with a classifier using different encodings.", "id": "f3967:m1"} {"signature": "def get_splice_data():", "body": "df = pd.read_csv('')X = df.reindex(columns=[x for x in df.columns.values if x != ''])X[''] = X[''].map(lambda x: list(str(x).strip()))for idx in range():X['' % (idx, )] = X[''].map(lambda x: x[idx])del X['']y = df.reindex(columns=[''])y = preprocessing.LabelEncoder().fit_transform(y.values.reshape(-, ))mapping = Nonereturn X, y, mapping", "docstring": "Load the mushroom dataset, split it into X and y, and then call the label encoder to get an integer y column.\n\n:return:", "id": "f3969:m2"} {"signature": "def create_dataset(n_rows=, extras=False, has_none=True):", "body": "random.seed()ds = [[random.random(), random.choice([float(''), float(''), float(''), -, , , -, math.pi]), row, str(row), random.choice(['', '']) if extras else '', random.choice(['', '', '']), random.choice(['', '', '', None]) if has_none else random.choice(['', '', '']), random.choice(['', '', '', '']) if extras else random.choice(['', '', '']), random.choice([, , -]), random.choice(['', '', '']), random.choice(['', '', '', np.nan]) ] for row in range(n_rows)]df = pd.DataFrame(ds, columns=['', '', '', '', '', '', '', '', , '', ''])df[''] = pd.Categorical(df[''], categories=['', '', ''])df[''] = pd.Categorical(df[''], categories=['', '', ''])return df", "docstring": "Creates a dataset with some categorical variables.", "id": "f3981:m2"} {"signature": "def verify_inverse_transform(x, x_inv):", "body": "assert x.equals(x_inv)", "docstring": "Verify x is equal to x_inv. The test returns true for NaN.equals(NaN) as it should.", "id": "f3981:m3"} {"signature": "def fit(self, X, y=None, **kwargs):", "body": "X = util.convert_input(X)self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')_, categories = self.ordinal_encoding(X,mapping=self.mapping,cols=self.cols,handle_unknown=self.handle_unknown,handle_missing=self.handle_missing)self.mapping = categoriesX_temp = self.transform(X, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3990:c0:m2"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError(\"\")else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f3990:c0:m6"} {"signature": "def fit(self, X, y, **kwargs):", "body": "X = util.convert_input(X)y = util.convert_input_vector(y, X.index)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)X_ordinal = self.ordinal_encoder.transform(X)self.mapping = self.fit_target_encoding(X_ordinal, y)X_temp = self.transform(X, override_return_df=True)self.feature_names = list(X_temp.columns)if self.drop_invariant:self.drop_cols = []X_temp = self.transform(X)generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n self : encoder\n Returns self.", "id": "f3991:c0:m1"} {"signature": "def fit_transform(self, X, y=None, **fit_params):", "body": "return self.fit(X, y, **fit_params).transform(X, y)", "docstring": "Encoders that utilize the target must make sure that the training data are transformed with:\n transform(X, y)\nand not with:\n transform(X)", "id": "f3991:c0:m4"} {"signature": "def transform(self, X, y=None, override_return_df=False):", "body": "if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')if self._dim is None:raise ValueError('')X = util.convert_input(X)if X.shape[] != self._dim:raise ValueError('' % (X.shape[], self._dim,))if y is not None:y = util.convert_input_vector(y, X.index)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")if not self.cols:return XX = self.ordinal_encoder.transform(X)if self.handle_unknown == '':if X[self.cols].isin([-]).any().any():raise ValueError('')X = self.target_encode(X)if self.drop_invariant:for col in self.drop_cols:X.drop(col, , inplace=True)if self.return_df or override_return_df:return Xelse:return X.values", "docstring": "Perform the transformation to new categorical data.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n y : array-like, shape = [n_samples] when transform by leave one out\n None, when transform without target info (such as transform test set)\n\n Returns\n -------\n p : array, shape = [n_samples, n_numeric + N]\n Transformed values with encoding applied.", "id": "f3991:c0:m3"} {"signature": "def fit(self, X, y=None, **kwargs):", "body": "X = util.convert_input(X)self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)self.mapping = self.fit_base_n_encoding(X)X_temp = self.transform(X, override_return_df=True)self._encoded_columns = X_temp.columns.valuesself.feature_names = list(X_temp.columns)if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3992:c0:m1"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f3992:c0:m10"} {"signature": "def basen_encode(self, X_in, cols=None):", "body": "X = X_in.copy(deep=True)cols = X.columns.values.tolist()for switch in self.mapping:col = switch.get('')mod = switch.get('')base_df = mod.reindex(X[col])base_df.set_index(X.index, inplace=True)X = pd.concat([base_df, X], axis=)old_column_index = cols.index(col)cols[old_column_index: old_column_index + ] = mod.columnsreturn X.reindex(columns=cols)", "docstring": "Basen encoding encodes the integers as basen code with one column per digit.\n\nParameters\n----------\nX_in: DataFrame\ncols: list-like, default None\n Column names in the DataFrame to be encoded\n\nReturns\n-------\ndummies : DataFrame", "id": "f3992:c0:m6"} {"signature": "def transform(self, X, override_return_df=False):", "body": "if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')if self._dim is None:raise ValueError('')X = util.convert_input(X)if X.shape[] != self._dim:raise ValueError('' % (X.shape[], self._dim,))if not self.cols:return XX_out = self.ordinal_encoder.transform(X)if self.handle_unknown == '':if X_out[self.cols].isin([-]).any().any():raise ValueError('')X_out = self.basen_encode(X_out, cols=self.cols)if self.drop_invariant:for col in self.drop_cols:X_out.drop(col, , inplace=True)if self.return_df or override_return_df:return X_outelse:return X_out.values", "docstring": "Perform the transformation to new categorical data.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n\n Returns\n -------\n\n p : array, shape = [n_samples, n_numeric + N]\n Transformed values with encoding applied.", "id": "f3992:c0:m3"} {"signature": "def col_transform(self, col, digits):", "body": "if col is None or float(col) < :return Noneelse:col = self.number_to_base(int(col), self.base, digits)if len(col) == digits:return colelse:return [ for _ in range(digits - len(col))] + col", "docstring": "The lambda body to transform the column values", "id": "f3992:c0:m8"} {"signature": "def inverse_transform(self, X_in):", "body": "X = X_in.copy(deep=True)X = util.convert_input(X)if self._dim is None:raise ValueError('')X = self.basen_to_integer(X, self.cols, self.base)if X.shape[] != self._dim:if self.drop_invariant:raise ValueError(\"\"\"\" % (X.shape[],))else:raise ValueError('' % (X.shape[], self._dim,))if not self.cols:return X if self.return_df else X.valuesfor switch in self.ordinal_encoder.mapping:column_mapping = switch.get('')inverse = pd.Series(data=column_mapping.index, index=column_mapping.get_values())X[switch.get('')] = X[switch.get('')].map(inverse).astype(switch.get(''))if self.handle_unknown == '' and self.handle_missing == '':for col in self.cols:if X[switch.get('')].isnull().any():warnings.warn(\"\"\"\" % (col,))return X if self.return_df else X.values", "docstring": "Perform the inverse transformation to encoded data.\n\nParameters\n----------\nX_in : array-like, shape = [n_samples, n_features]\n\nReturns\n-------\np: array, the same size of X_in", "id": "f3992:c0:m4"} {"signature": "def fit(self, X, y=None, **kwargs):", "body": "X = util.convert_input(X)self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)ordinal_mapping = self.ordinal_encoder.category_mappingmappings_out = []for switch in ordinal_mapping:values = switch.get('')col = switch.get('')column_mapping = self.fit_polynomial_coding(col, values, self.handle_missing, self.handle_unknown)mappings_out.append({'': switch.get(''), '': column_mapping, })self.mapping = mappings_outX_temp = self.transform(X, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3994:c0:m1"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError(\"\")else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f3995:c0:m5"} {"signature": "def fit(self, X, y, **kwargs):", "body": "X = util.convert_input(X)y = util.convert_input_vector(y, X.index).astype(float)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")self._dim = X.shape[]if self.use_default_cols:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')categories = self.fit_leave_one_out(X, y,cols=self.cols)self.mapping = categoriesX_temp = self.transform(X, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3996:c0:m1"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f3996:c0:m7"} {"signature": "def fit(self, X, y, **kwargs):", "body": "X = util.convert_input(X)y = util.convert_input_vector(y, X.index).astype(float)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")unique = y.unique()if len(unique) != :raise ValueError(\"\" + str(len(unique)) + \"\")if y.isnull().any():raise ValueError(\"\")if np.max(unique) < :raise ValueError(\"\")if np.min(unique) > :raise ValueError(\"\")self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)X_ordinal = self.ordinal_encoder.transform(X)self.mapping = self._train(X_ordinal, y)X_temp = self.transform(X, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and binary y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Binary target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3997:c0:m1"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f3998:c0:m8"} {"signature": "def fit(self, X, y=None, **kwargs):", "body": "X = util.convert_input(X)self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)self.mapping = self.generate_mapping()X_temp = self.transform(X, override_return_df=True)self.feature_names = list(X_temp.columns)if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f3998:c0:m2"} {"signature": "def get_generated_cols(X_original, X_transformed, to_transform):", "body": "original_cols = list(X_original.columns)if len(to_transform) > :[original_cols.remove(c) for c in to_transform]current_cols = list(X_transformed.columns)if len(original_cols) > :[current_cols.remove(c) for c in original_cols]return current_cols", "docstring": "Returns a list of the generated/transformed columns.\n\nArguments:\n X_original: df\n the original (input) DataFrame.\n X_transformed: df\n the transformed (current) DataFrame.\n to_transform: [str]\n a list of columns that were transformed (as in the original DataFrame), commonly self.cols.\n\nOutput:\n a list of columns that were transformed (as in the current DataFrame).", "id": "f3999:m5"} {"signature": "def convert_input_vector(y, index):", "body": "if y is None:return Noneif isinstance(y, pd.Series):return yelif isinstance(y, np.ndarray):if len(np.shape(y))==: return pd.Series(y, name='', index=index)elif len(np.shape(y))== and np.shape(y)[]==: return pd.Series(y[, :], name='', index=index)elif len(np.shape(y))== and np.shape(y)[]==: return pd.Series(y[:, ], name='', index=index)else:raise ValueError('' % (str(np.shape(y))))elif np.isscalar(y):return pd.Series([y], name='', index=index)elif isinstance(y, list):if len(y)== or (len(y)> and not isinstance(y[], list)): return pd.Series(y, name='', index=index)elif len(y)> and isinstance(y[], list) and len(y[])==: flatten = lambda y: [item for sublist in y for item in sublist]return pd.Series(flatten(y), name='', index=index)elif len(y)== and isinstance(y[], list): return pd.Series(y[], name='', index=index)else:raise ValueError('')elif isinstance(y, pd.DataFrame):if len(list(y))==: return pd.Series(y, name='')if len(list(y))==: return y.iloc[:, ]else:raise ValueError('' % (str(y.shape)))else:return pd.Series(y, name='', index=index)", "docstring": "Unite target data type into a Series.\nIf the target is a Series or a DataFrame, we preserve its index.\nBut if the target does not contain index attribute, we use the index from the argument.", "id": "f3999:m4"} {"signature": "def get_obj_cols(df):", "body": "obj_cols = []for idx, dt in enumerate(df.dtypes):if dt == '' or is_category(dt):obj_cols.append(df.columns.values[idx])return obj_cols", "docstring": "Returns names of 'object' columns in the DataFrame.", "id": "f3999:m1"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4000:c0:m4"} {"signature": "@staticmethoddef hashing_trick(X_in, hashing_method='', N=, cols=None, make_copy=False):", "body": "try:if hashing_method not in hashlib.algorithms_available:raise ValueError('' % (hashing_method,''.join([str(x) for x in hashlib.algorithms_available])))except Exception as e:try:_ = hashlib.new(hashing_method)except Exception as e:raise ValueError('')if make_copy:X = X_in.copy(deep=True)else:X = X_inif cols is None:cols = X.columns.valuesdef hash_fn(x):tmp = [ for _ in range(N)]for val in x.values:if val is not None:hasher = hashlib.new(hashing_method)if sys.version_info[] == :hasher.update(str(val))else:hasher.update(bytes(str(val), ''))tmp[int(hasher.hexdigest(), ) % N] += return pd.Series(tmp, index=new_cols)new_cols = ['' % d for d in range(N)]X_cat = X.loc[:, cols]X_num = X.loc[:, [x for x in X.columns.values if x not in cols]]X_cat = X_cat.apply(hash_fn, axis=)X_cat.columns = new_colsX = pd.concat([X_cat, X_num], axis=)return X", "docstring": "A basic hashing implementation with configurable dimensionality/precision\n\n Performs the hashing trick on a pandas dataframe, `X`, using the hashing method from hashlib\n identified by `hashing_method`. The number of output dimensions (`N`), and columns to hash (`cols`) are\n also configurable.\n\n Parameters\n ----------\n\n X_in: pandas dataframe\n description text\n hashing_method: string, optional\n description text\n N: int, optional\n description text\n cols: list, optional\n description text\n make_copy: bool, optional\n description text\n\n Returns\n -------\n\n out : dataframe\n A hashing encoded dataframe.\n\n References\n ----------\n Cite the relevant literature, e.g. [1]_. You may also cite these\n references in the notes section above.\n .. [1] Kilian Weinberger; Anirban Dasgupta; John Langford; Alex Smola; Josh Attenberg (2009). Feature Hashing\n for Large Scale Multitask Learning. Proc. ICML.", "id": "f4000:c0:m3"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError(\"\")else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4001:c0:m6"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4002:c0:m5"} {"signature": "def fit_transform(self, X, y=None, **fit_params):", "body": "return self.fit(X, y, **fit_params).transform(X, y)", "docstring": "Encoders that utilize the target must make sure that the training data are transformed with:\n transform(X, y)\nand not with:\n transform(X)", "id": "f4003:c0:m3"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError(\"\")else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4003:c0:m9"} {"signature": "def transform(self, X, y=None, override_return_df=False):", "body": "if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')if self._dim is None:raise ValueError('')X = util.convert_input(X)if X.shape[] != self._dim:raise ValueError('' % (X.shape[], self._dim,))if y is not None:y = util.convert_input_vector(y, X.index).astype(float)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")if not self.cols:return XX = X.copy(deep=True)X = self.ordinal_encoder.transform(X)if self.handle_unknown == '':if X[self.cols].isin([-]).any().any():raise ValueError('')X = self._score(X, y)if self.drop_invariant:for col in self.drop_cols:X.drop(col, , inplace=True)if self.return_df or override_return_df:return Xelse:return X.values", "docstring": "Perform the transformation to new categorical data. When the data are used for model training,\n it is important to also pass the target in order to apply leave one out.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n y : array-like, shape = [n_samples] when transform by leave one out\n None, when transform without target information (such as transform test set)\n\n\n\n Returns\n -------\n\n p : array, shape = [n_samples, n_numeric + N]\n Transformed values with encoding applied.", "id": "f4003:c0:m2"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4004:c0:m7"} {"signature": "def fit(self, X, y, **kwargs):", "body": "X = util.convert_input(X)y = util.convert_input_vector(y, X.index).astype(float)if X.shape[] != y.shape[]:raise ValueError(\"\" + str(X.shape[]) + \"\" + str(y.shape[]) + \"\")self._dim = X.shape[]if self.use_default_cols:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')categories = self.fit_leave_one_out(X, y,cols=self.cols)self.mapping = categoriesX_temp = self.transform(X, y, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fit encoder according to X and y.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f4004:c0:m1"} {"signature": "def fit_transform(self, X, y=None, **fit_params):", "body": "return self.fit(X, y, **fit_params).transform(X, y)", "docstring": "Encoders that utilize the target must make sure that the training data are transformed with:\n transform(X, y)\nand not with:\n transform(X)", "id": "f4004:c0:m3"} {"signature": "def inverse_transform(self, X_in):", "body": "return self.base_n_encoder.inverse_transform(X_in)", "docstring": "Perform the inverse transformation to encoded data.\n\nParameters\n----------\nX_in : array-like, shape = [n_samples, n_features]\n\nReturns\n-------\np: array, the same size of X_in", "id": "f4005:c0:m3"} {"signature": "def transform(self, X, override_return_df=False):", "body": "return self.base_n_encoder.transform(X)", "docstring": "Perform the transformation to new categorical data.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n\n Returns\n -------\n\n p : array, shape = [n_samples, n_numeric + N]\n Transformed values with encoding applied.", "id": "f4005:c0:m2"} {"signature": "def transform(self, X, override_return_df=False):", "body": "if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')if self._dim is None:raise ValueError('')X = util.convert_input(X)if X.shape[] != self._dim:raise ValueError('' % (X.shape[], self._dim, ))if not self.cols:return XX = self.ordinal_encoder.transform(X)if self.handle_unknown == '':if X[self.cols].isin([-]).any().any():raise ValueError('')X = self.backward_difference_coding(X, mapping=self.mapping)if self.drop_invariant:for col in self.drop_cols:X.drop(col, , inplace=True)if self.return_df or override_return_df:return Xelse:return X.values", "docstring": "Perform the transformation to new categorical data.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n\n Returns\n -------\n\n p : array, shape = [n_samples, n_numeric + N]\n Transformed values with encoding applied.", "id": "f4006:c0:m2"} {"signature": "def get_feature_names(self):", "body": "if not isinstance(self.feature_names, list):raise ValueError('')else:return self.feature_names", "docstring": "Returns the names of all transformed / added columns.\n\nReturns\n-------\nfeature_names: list\n A list with all feature names transformed or added.\n Note: potentially dropped features are not included!", "id": "f4006:c0:m5"} {"signature": "def fit(self, X, y=None, **kwargs):", "body": "X = util.convert_input(X)self._dim = X.shape[]if self.cols is None:self.cols = util.get_obj_cols(X)else:self.cols = util.convert_cols_to_list(self.cols)if self.handle_missing == '':if X[self.cols].isnull().any().bool():raise ValueError('')self.ordinal_encoder = OrdinalEncoder(verbose=self.verbose,cols=self.cols,handle_unknown='',handle_missing='')self.ordinal_encoder = self.ordinal_encoder.fit(X)ordinal_mapping = self.ordinal_encoder.category_mappingmappings_out = []for switch in ordinal_mapping:values = switch.get('')col = switch.get('')column_mapping = self.fit_backward_difference_coding(col, values, self.handle_missing, self.handle_unknown)mappings_out.append({'': col, '': column_mapping, })self.mapping = mappings_outX_temp = self.transform(X, override_return_df=True)self.feature_names = X_temp.columns.tolist()if self.drop_invariant:self.drop_cols = []generated_cols = util.get_generated_cols(X, X_temp, self.cols)self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= ]try:[self.feature_names.remove(x) for x in self.drop_cols]except KeyError as e:if self.verbose > :print(\"\"\"\".format(e))return self", "docstring": "Fits an ordinal encoder to produce a consistent mapping across applications and optionally finds\n generally invariant columns to drop consistently.\n\n Parameters\n ----------\n\n X : array-like, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n y : array-like, shape = [n_samples]\n Target values.\n\n Returns\n -------\n\n self : encoder\n Returns self.", "id": "f4006:c0:m1"} {"signature": "def djeffify_html(rendered_string):", "body": "parser = DjeffParser()parser.feed(rendered_string)return parser.djhtml", "docstring": "This function contains the core logic for a\nmiddleware, template tag or Template engine approach", "id": "f4011:m1"} {"signature": "def djeffify_string(string_to_djeff):", "body": "string_to_djeff = re.sub(r'', '', string_to_djeff, flags=re.IGNORECASE) string_to_djeff = re.sub(r'', '', string_to_djeff, flags=re.IGNORECASE) string_to_djeff = re.sub(r'', '', string_to_djeff, flags=re.IGNORECASE) return string_to_djeff", "docstring": "Djeffifies string_to_djeff", "id": "f4011:m0"} {"signature": "def __init__(self, convert_charrefs=True, *args, **kwargs):", "body": "try:HTMLParser.__init__(self, convert_charrefs=convert_charrefs)except TypeError:HTMLParser.__init__(self)self.djhtml = ''", "docstring": "Explicitly set convert_charrefs to keep deprecation warnings at bay.\n\nSee:\nhttps://docs.python.org/3/library/html.parser.html#html.parser.HTMLParser", "id": "f4011:c2:m0"} {"signature": "def prepend_line(filepath, line):", "body": "with open(filepath) as f:lines = f.readlines()lines.insert(, line)with open(filepath, '') as f:f.writelines(lines)", "docstring": "Rewrite a file adding a line to its beginning.", "id": "f4013:m3"} {"signature": "def facts(self, **kwargs):", "body": "return self.__api.facts(query=EqualsOperator(\"\", self.name),**kwargs)", "docstring": "Get all facts of this node. Additional arguments may also be\n specified that will be passed to the query function.", "id": "f4024:c4:m4"} {"signature": "def events(self, **kwargs):", "body": "return self.__api.events(query=EqualsOperator(\"\", self.hash_),**kwargs)", "docstring": "Get all events for this report. Additional arguments may also be\n specified that will be passed to the query function.", "id": "f4024:c1:m4"} {"signature": "def resource(self, type_, title, **kwargs):", "body": "resources = self.__api.resources(type_=type_,title=title,query=EqualsOperator(\"\", self.name),**kwargs)return next(resource for resource in resources)", "docstring": "Get a resource matching the supplied type and title. Additional\n arguments may also be specified that will be passed to the query\n function.", "id": "f4024:c4:m7"} {"signature": "def connect(host='', port=, ssl_verify=False, ssl_key=None,ssl_cert=None, timeout=, protocol=None, url_path='',username=None, password=None, token=None):", "body": "return BaseAPI(host=host, port=port,timeout=timeout, ssl_verify=ssl_verify, ssl_key=ssl_key,ssl_cert=ssl_cert, protocol=protocol, url_path=url_path,username=username, password=password, token=token)", "docstring": "Connect with PuppetDB. This will return an object allowing you\n to query the API through its methods.\n\n :param host: (Default: 'localhost;) Hostname or IP of PuppetDB.\n :type host: :obj:`string`\n\n :param port: (Default: '8080') Port on which to talk to PuppetDB.\n :type port: :obj:`int`\n\n :param ssl_verify: (optional) Verify PuppetDB server certificate.\n :type ssl_verify: :obj:`bool` or :obj:`string` True, False or filesystem \\\n path to CA certificate.\n\n :param ssl_key: (optional) Path to our client secret key.\n :type ssl_key: :obj:`None` or :obj:`string` representing a filesystem\\\n path.\n\n :param ssl_cert: (optional) Path to our client certificate.\n :type ssl_cert: :obj:`None` or :obj:`string` representing a filesystem\\\n path.\n\n :param timeout: (Default: 10) Number of seconds to wait for a response.\n :type timeout: :obj:`int`\n\n :param protocol: (optional) Explicitly specify the protocol to be used\n (especially handy when using HTTPS with ssl_verify=False and\n without certs)\n :type protocol: :obj:`None` or :obj:`string`\n\n :param url_path: (Default: '/') The URL path where PuppetDB is served\n :type url_path: :obj:`None` or :obj:`string`\n\n :param username: (optional) The username to use for HTTP basic\n authentication\n :type username: :obj:`None` or :obj:`string`\n\n :param password: (optional) The password to use for HTTP basic\n authentication\n :type password: :obj:`None` or :obj:`string`\n\n :param token: (optional) The x-auth token to use for X-Authentication\n :type token: :obj:`None` or :obj:`string`", "id": "f4025:m0"} {"signature": "def versioncmp(v1, v2):", "body": "def normalize(v):\"\"\"\"\"\"return [int(x) for x in re.sub(r'', '', v).split(\"\")]try:return cmp(normalize(v1), normalize(v2))except NameError:return (normalize(v1) > normalize(v2)) - (normalize(v1) < normalize(v2))", "docstring": "Compares two objects, x and y, and returns an integer according to the\n outcome. The return value is negative if x < y, zero if x == y and\n positive if x > y.\n\n :param v1: The first object to compare.\n :param v2: The second object to compare.\n\n :returns: -1, 0 or 1.\n :rtype: :obj:`int`", "id": "f4027:m1"} {"signature": "def _normalize_resource_type(self, type_):", "body": "return ''.join([s.capitalize() for s in type_.split('')])", "docstring": "Normalizes the type passed to the api by capitalizing each part\n of the type. For example:\n\n sysctl::value -> Sysctl::Value\n user -> User", "id": "f4028:c0:m4"} {"signature": "def _query(self, endpoint, path=None, query=None,order_by=None, limit=None, offset=None, include_total=False,summarize_by=None, count_by=None, count_filter=None,request_method=''):", "body": "log.debug(''''''.format(endpoint, path, query, limit,offset, summarize_by, count_by,count_filter))url = self._url(endpoint, path=path)payload = {}if query is not None:payload[''] = queryif order_by is not None:payload[PARAMETERS['']] = order_byif limit is not None:payload[''] = limitif include_total is True:payload[PARAMETERS['']] =json.dumps(include_total)if offset is not None:payload[''] = offsetif summarize_by is not None:payload[PARAMETERS['']] = summarize_byif count_by is not None:payload[PARAMETERS['']] = count_byif count_filter is not None:payload[PARAMETERS['']] = count_filterif not (payload):payload = Noneif not self.token:auth = (self.username, self.password)else:auth = Nonetry:if request_method.upper() == '':r = self._session.get(url, params=payload,verify=self.ssl_verify,cert=(self.ssl_cert, self.ssl_key),timeout=self.timeout,auth=auth)elif request_method.upper() == '':r = self._session.post(url,data=json.dumps(payload, default=str),verify=self.ssl_verify,cert=(self.ssl_cert, self.ssl_key),timeout=self.timeout,auth=auth)else:log.error(\"\".format(request_method))raise APIErrorr.raise_for_status()if '' in r.headers:self.last_total = r.headers['']else:self.last_total = Nonejson_body = r.json()if json_body is not None:return json_bodyelse:del json_bodyraise EmptyResponseErrorexcept requests.exceptions.Timeout:log.error(\"\".format(ERROR_STRINGS[''],self.host, self.port,self.protocol.upper()))raiseexcept requests.exceptions.ConnectionError:log.error(\"\".format(ERROR_STRINGS[''],self.host, self.port,self.protocol.upper()))raiseexcept requests.exceptions.HTTPError as err:log.error(\"\".format(err.response.text,self.host, self.port,self.protocol.upper()))raise", "docstring": "This method actually querries PuppetDB. Provided an endpoint and an\n optional path and/or query it will fire a request at PuppetDB. If\n PuppetDB can be reached and answers within the timeout we'll decode\n the response and give it back or raise for the HTTP Status Code\n PuppetDB gave back.\n\n :param endpoint: The PuppetDB API endpoint we want to query.\n :type endpoint: :obj:`string`\n :param path: An additional path if we don't wish to query the\\\n bare endpoint.\n :type path: :obj:`string`\n :param query: (optional) A query to further narrow down the resultset.\n :type query: :obj:`string`\n :param order_by: (optional) Set the order parameters for the resultset.\n :type order_by: :obj:`string`\n :param limit: (optional) Tell PuppetDB to limit it's response to this\\\n number of objects.\n :type limit: :obj:`int`\n :param offset: (optional) Tell PuppetDB to start it's response from\\\n the given offset. This is useful for implementing pagination\\\n but is not supported just yet.\n :type offset: :obj:`string`\n :param include_total: (optional) Include the total number of results\n :type order_by: :obj:`bool`\n :param summarize_by: (optional) Specify what type of object you'd like\\\n to see counts at the event-counts and aggregate-event-counts \\\n endpoints\n :type summarize_by: :obj:`string`\n :param count_by: (optional) Specify what type of object is counted\n :type count_by: :obj:`string`\n :param count_filter: (optional) Specify a filter for the results\n :type count_filter: :obj:`string`\n\n :raises: :class:`~pypuppetdb.errors.EmptyResponseError`\n\n :returns: The decoded response from PuppetDB\n :rtype: :obj:`dict` or :obj:`list`", "id": "f4028:c0:m6"} {"signature": "def event_counts(self, summarize_by, **kwargs):", "body": "return self._query('',summarize_by=summarize_by,**kwargs)", "docstring": "Get event counts from puppetdb.\n\n :param summarize_by: (Required) The object type to be counted on.\n Valid values are 'containing_class', 'resource'\n and 'certname'.\n :type summarize_by: :obj:`string`\n :param count_by: (Optional) The object type that is counted when\n building the counts of 'successes', 'failures',\n 'noops' and 'skips'. Support values are 'certname'\n and 'resource' (default)\n :type count_by: :obj:`string`\n :param count_filter: (Optional) A JSON query that is applied to the\n event-counts output but before the results are\n aggregated. Supported operators are `=`, `>`,\n `<`, `>=`, and `<=`. Supported fields are\n `failures`, `successes`, `noops`, and `skips`.\n :type count_filter: :obj:`string`\n :param \\*\\*kwargs: The rest of the keyword arguments are passed\n to the _query function.\n\n :returns: A list of dictionaries containing the results.\n :rtype: :obj:`list`", "id": "f4028:c0:m19"} {"signature": "def current_version(self):", "body": "return self._query('')['']", "docstring": "Get version information about the running PuppetDB server.\n\n :returns: A string representation of the PuppetDB version.\n :rtype: :obj:`string`", "id": "f4028:c0:m22"} {"signature": "def fact_names(self):", "body": "return self._query('')", "docstring": "Get a list of all known facts.", "id": "f4028:c0:m23"} {"signature": "def _url(self, endpoint, path=None):", "body": "log.debug(''.format(endpoint, path))try:endpoint = ENDPOINTS[endpoint]except KeyError:raise APIErrorurl = ''.format(base_url=self.base_url,endpoint=endpoint,)if path is not None:url = ''.format(url, quote(path))return url", "docstring": "The complete URL we will end up querying. Depending on the\n endpoint we pass in this will result in different URL's with\n different prefixes.\n\n :param endpoint: The PuppetDB API endpoint we want to query.\n :type endpoint: :obj:`string`\n :param path: An additional path if we don't wish to query the\\\n bare endpoint.\n :type path: :obj:`string`\n\n :returns: A URL constructed from :func:`base_url` with the\\\n apropraite API version/prefix and the rest of the path added\\\n to it.\n :rtype: :obj:`string`", "id": "f4028:c0:m5"} {"signature": "def nodes(self, unreported=, with_status=False, **kwargs):", "body": "nodes = self._query('', **kwargs)now = datetime.datetime.utcnow()if type(nodes) == dict:nodes = [nodes, ]if with_status:latest_events = self.event_counts(query=EqualsOperator(\"\", True),summarize_by='')for node in nodes:node[''] = Nonenode[''] = Noneif with_status:status = [s for s in latest_eventsif s[''][''] == node['']]try:node[''] = node['']if status:node[''] = status[]except KeyError:if status:node[''] = status = status[]if status[''] > :node[''] = ''if status[''] > :node[''] = ''if status[''] > :node[''] = ''else:node[''] = ''if node[''] is not None:try:last_report = json_to_datetime(node[''])last_report = last_report.replace(tzinfo=None)unreported_border = now - timedelta(hours=unreported)if last_report < unreported_border:delta = (now - last_report)node[''] = Truenode[''] = ''.format(delta.days,int(delta.seconds / ),int((delta.seconds % ) / ))except AttributeError:node[''] = Trueif not node['']:node[''] = Trueyield Node(self,name=node[''],deactivated=node[''],expired=node[''],report_timestamp=node[''],catalog_timestamp=node[''],facts_timestamp=node[''],status_report=node[''],noop=node.get(''),noop_pending=node.get(''),events=node[''],unreported=node.get(''),unreported_time=node.get(''),report_environment=node[''],catalog_environment=node[''],facts_environment=node[''],latest_report_hash=node.get(''),cached_catalog_status=node.get(''))", "docstring": "Query for nodes by either name or query. If both aren't\n provided this will return a list of all nodes. This method\n also fetches the nodes status and event counts of the latest\n report from puppetdb.\n\n :param with_status: (optional) include the node status in the\\\n returned nodes\n :type with_status: :bool:\n :param unreported: (optional) amount of hours when a node gets\n marked as unreported\n :type unreported: :obj:`None` or integer\n :param \\*\\*kwargs: The rest of the keyword arguments are passed\n to the _query function\n\n :returns: A generator yieling Nodes.\n :rtype: :class:`pypuppetdb.types.Node`", "id": "f4028:c0:m7"} {"signature": "@propertydef version(self):", "body": "return self.api_version", "docstring": "The version of the API we're querying against.\n\n :returns: Current API version.\n :rtype: :obj:`string`", "id": "f4028:c0:m1"} {"signature": "def catalog(self, node):", "body": "catalogs = self.catalogs(path=node)return next(x for x in catalogs)", "docstring": "Get the available catalog for a given node.\n\n :param node: (Required) The name of the PuppetDB node.\n :type: :obj:`string`\n\n :returns: An instance of Catalog\n :rtype: :class:`pypuppetdb.types.Catalog`", "id": "f4028:c0:m16"} {"signature": "def node(self, name):", "body": "nodes = self.nodes(path=name)return next(node for node in nodes)", "docstring": "Gets a single node from PuppetDB.\n\n :param name: The name of the node search.\n :type name: :obj:`string`\n\n :return: An instance of Node\n :rtype: :class:`pypuppetdb.types.Node`", "id": "f4028:c0:m8"} {"signature": "def metric(self, metric=None):", "body": "return self._query('', path=metric)", "docstring": "Query for a specific metrc.\n\n :param metric: The name of the metric we want.\n :type metric: :obj:`string`\n\n :returns: The return of :meth:`~pypuppetdb.api.BaseAPI._query`.", "id": "f4028:c0:m24"} {"signature": "def recursive_glob(base_directory, regex=None):", "body": "if regex is None:regex = ''files = glob(os.path.join(base_directory, regex))for path, dirlist, filelist in os.walk(base_directory):for ignored in IGNORE:try:dirlist.remove(ignored)except:passfor dir_name in dirlist:files.extend(glob(os.path.join(path, dir_name, regex)))return files", "docstring": "Uses glob to find all files that match the regex in base_directory.\n\n @param base_directory: string\n\n @param regex: string\n\n @return: set", "id": "f4053:m1"} {"signature": "def get_requirements(*args):", "body": "install_deps = []try:for fpath in args:install_deps.extend([str(d.req or d.url) for d in parse_requirements(fpath)])except:print(''.format(fpath))return [dep for dep in install_deps if dep != '']", "docstring": "Parse all requirements files given and return a list of the dependencies", "id": "f4053:m0"} {"signature": "def read_xl(xl_path: str):", "body": "xl_path, choice = _check_xl_path(xl_path)reader = XL_READERS[choice]return reader(xl_path)", "docstring": "Return the workbook from the Excel file in `xl_path`.", "id": "f4056:m4"} {"signature": "def concat_sheets(xl_path: str, sheetnames=None, add_tab_names=False):", "body": "xl_path, choice = _check_xl_path(xl_path)if sheetnames is None:sheetnames = get_sheet_list(xl_path)sheets = pd.read_excel(xl_path, sheetname=sheetnames)if add_tab_names:for tab in sheets:sheets[tab][''] = [tab] * len(sheets[tab])return pd.concat([sheets[tab] for tab in sheets])", "docstring": "Return a pandas DataFrame with the concat'ed\n content of the `sheetnames` from the Excel file in\n `xl_path`.\n\n Parameters\n ----------\n xl_path: str\n Path to the Excel file\n\n sheetnames: list of str\n List of existing sheet names of `xl_path`.\n If None, will use all sheets from `xl_path`.\n\n add_tab_names: bool\n If True will add a 'Tab' column which says from which\n tab the row comes from.\n\n Returns\n -------\n df: pandas.DataFrame", "id": "f4056:m6"} {"signature": "def duplicated(values: Sequence):", "body": "vals = pd.Series(values)return vals[vals.duplicated()]", "docstring": "Return the duplicated items in `values`", "id": "f4056:m10"} {"signature": "def _check_cols(df, col_names):", "body": "for col in col_names:if not hasattr(df, col):raise AttributeError(\"\".format(col,df.columns))", "docstring": "Raise an AttributeError if `df` does not have a column named as an item of\n the list of strings `col_names`.", "id": "f4056:m7"} {"signature": "def _openpyxl_read_xl(xl_path: str):", "body": "try:wb = load_workbook(filename=xl_path, read_only=True)except:raiseelse:return wb", "docstring": "Use openpyxl to read an Excel file.", "id": "f4056:m0"} {"signature": "def get_sheet_list(xl_path: str) -> List:", "body": "wb = read_xl(xl_path)if hasattr(wb, ''):return wb.sheetnameselse:return wb.sheet_names()", "docstring": "Return a list with the name of the sheets in\n the Excel file in `xl_path`.", "id": "f4056:m5"} {"signature": "def col_values(df, col_name):", "body": "_check_cols(df, [col_name])if '' in df[col_name] or pd.np.issubdtype(df[col_name].dtype, str): return [nom.lower() for nom in df[pd.notnull(df)][col_name] if not pd.isnull(nom)]else:return [nom for nom in df[pd.notnull(df)][col_name] if not pd.isnull(nom)]", "docstring": "Return a list of not null values from the `col_name` column of `df`.", "id": "f4056:m8"} {"signature": "def duplicated_rows(df, col_name):", "body": "_check_cols(df, [col_name])dups = df[pd.notnull(df[col_name]) & df.duplicated(subset=[col_name])]return dups", "docstring": "Return a DataFrame with the duplicated values of the column `col_name`\n in `df`.", "id": "f4056:m9"} {"signature": "def get_last_created_file(input_dir, glob_pattern=''):", "body": "return get_last_file(input_dir, glob_pattern, key=op.getctime)", "docstring": "Return the path to the last created file in `input_dir`.\n See `get_last_file` docstring for description of the parameters.", "id": "f4057:m16"} {"signature": "def recursive_glob(base_directory, regex=''):", "body": "files = glob(op.join(base_directory, regex))for path, dirlist, filelist in os.walk(base_directory):for dir_name in dirlist:files.extend(glob(op.join(path, dir_name, regex)))return files", "docstring": "Uses glob to find all files or folders that match the regex\nstarting from the base_directory.\n\nParameters\n----------\nbase_directory: str\n\nregex: str\n\nReturns\n-------\nfiles: list", "id": "f4057:m11"} {"signature": "def find_match(base_directory, regex=''):", "body": "return glob(op.join(base_directory, regex))", "docstring": "Uses glob to find all files that match the regex\nin base_directory.\n\n@param base_directory: string\n\n@param regex: string\n\n@return: set", "id": "f4057:m10"} {"signature": "def get_file_list(file_dir, regex=''):", "body": "file_list = os.listdir(file_dir)file_list.sort()if regex:file_list = search_list(file_list, regex)file_list = [op.join(file_dir, fname) for fname in file_list]return file_list", "docstring": "Creates a list of files that match the search_regex within file_dir.\nThe list of files will have file_dir as path prefix.\n\nParameters\n----------\n@param file_dir:\n\n@param search_regex:\n\nReturns:\n--------\nList of paths to files that match the search_regex", "id": "f4057:m4"} {"signature": "def recursive_dir_match(folder_path, regex=''):", "body": "outlist = []for root, dirs, files in os.walk(folder_path):outlist.extend([op.join(root, f) for f in dirsif re.match(regex, f)])return outlist", "docstring": "Returns absolute paths of folders that match the regex within folder_path and\nall its children folders.\n\nNote: The regex matching is done using the match function\nof the re module.\n\nParameters\n----------\nfolder_path: string\n\nregex: string\n\nReturns\n-------\nA list of strings.", "id": "f4057:m3"} {"signature": "def recursive_find_match(folder_path, regex=''):", "body": "outlist = []for root, dirs, files in os.walk(folder_path):outlist.extend([op.join(root, f) for f in filesif re.match(regex, f)])return outlist", "docstring": "Returns absolute paths of files that match the regex within folder_path and\nall its children folders.\n\nNote: The regex matching is done using the match function\nof the re module.\n\nParameters\n----------\nfolder_path: string\n\nregex: string\n\nReturns\n-------\nA list of strings.", "id": "f4057:m6"} {"signature": "def recursive_find_search(folder_path, regex=''):", "body": "outlist = []for root, dirs, files in os.walk(folder_path):outlist.extend([op.join(root, f) for f in filesif re.search(regex, f)])return outlist", "docstring": "Returns absolute paths of files that match the regex within file_dir and\nall its children folders.\n\nNote: The regex matching is done using the search function\nof the re module.\n\nParameters\n----------\nfolder_path: string\n\nregex: string\n\nReturns\n-------\nA list of strings.", "id": "f4057:m7"} {"signature": "def get_common_filepath(self, nodepath):", "body": "return commonprefix(self.get_node_filepaths(nodepath))", "docstring": "Returns the common filepath between all leaves in the filetree.", "id": "f4058:c1:m11"} {"signature": "def remove_hidden_files(file_lst):", "body": "return [fnom for fnom in file_lst if not fnom.startswith('')]", "docstring": "Removes the filenames that start with '.'\n\n:param file_lst: list of strings\n\n:return: list of strings", "id": "f4058:m1"} {"signature": "@staticmethoddef _import_config(filepath):", "body": "if not op.isfile(filepath):raise IOError(''''.format(filepath))cfg = import_pyfile(filepath)if not hasattr(cfg, ''):raise KeyError('')if not hasattr(cfg, ''):raise KeyError('')return cfg.root_path, cfg.filetree", "docstring": "Imports filetree and root_path variable values from the filepath.\n\n:param filepath:\n:return: root_path and filetree", "id": "f4058:c1:m7"} {"signature": "def get_root_nodes(self):", "body": "return self._filetree.keys()", "docstring": "Return a list of the names of the root nodes.", "id": "f4058:c1:m8"} {"signature": "def remove_nodes(self, pattern, adict):", "body": "mydict = self._filetree if adict is None else adictif isinstance(mydict, dict):for nom in mydict.keys():if isinstance(mydict[nom], dict):matchs = filter_list(mydict[nom], pattern)for nom in matchs:mydict = self.remove_nodes(pattern, mydict[nom])mydict.pop(nom)else:mydict[nom] = filter_list(mydict[nom], pattern)else:matchs = set(filter_list(mydict, pattern))mydict = set(mydict) - matchsreturn mydict", "docstring": "Remove the nodes that match the pattern.", "id": "f4058:c1:m12"} {"signature": "def filter_list(lst, pattern):", "body": "if is_fnmatch_regex(pattern) and not is_regex(pattern):log.info(''.format(pattern))filst = fnmatch.filter(lst, pattern)else:log.info(''.format(pattern))filst = match_list(lst, pattern)if filst:filst.sort()return filst", "docstring": "Filters the lst using pattern.\nIf pattern starts with '(' it will be considered a re regular expression,\notherwise it will use fnmatch filter.\n\n:param lst: list of strings\n\n:param pattern: string\n\n:return: list of strings\nFiltered list of strings", "id": "f4058:m0"} {"signature": "def get_possible_paths(base_path, path_regex):", "body": "if not path_regex:return []if len(path_regex) < :return []if path_regex[] == os.sep:path_regex = path_regex[:]rest_files = ''if os.sep in path_regex:node_names = path_regex.partition(os.sep)first_node = node_names[]rest_nodes = node_names[]folder_names = filter_list(os.listdir(base_path), first_node)for nom in folder_names:new_base = op.join(base_path, nom)if op.isdir(new_base):rest_files = get_possible_paths(new_base, rest_nodes)else:rest_files = filter_list(os.listdir(base_path), path_regex)files = []if rest_files:files = [op.join(base_path, f) for f in rest_files]return files", "docstring": "Looks for path_regex within base_path. Each match is append\nin the returned list.\npath_regex may contain subfolder structure.\nIf any part of the folder structure is a\n\n:param base_path: str\n\n:param path_regex: str\n\n:return list of strings", "id": "f4058:m5"} {"signature": "def populate_subtree(basepath, treemap, verbose=False):", "body": "file_nodes = OrderedDict()if isinstance(treemap, tuple):try:file_nodes = process_tuple_node(basepath, treemap)except:raise FileTreeMapError(''''.format(basepath, treemap))if isinstance(treemap, list):for node in treemap:try:file_nodes.update(process_tuple_node(basepath, node))except:raise FileTreeMapError(''''.format(basepath, node))elif isinstance(treemap, dict):for k in treemap.keys():cname = kchild_map = treemap[k]if isinstance(child_map, tuple) or isinstance(child_map, dict):try:file_nodes[cname] = populate_subtree(basepath, child_map)except:raise FileTreeMapError(''''.format(basepath,child_map))elif isinstance(child_map, str):if child_map[] == os.sep:raise FileTreeMapError(''''''.format(str(child_map),os.sep))subpaths = get_possible_paths(basepath, child_map)if subpaths:file_nodes[cname] = subpathsif verbose:log.info(''.format(basepath, file_nodes.keys()))return file_nodes", "docstring": ":param path: str\n\n:param treemap: dict\n\n:return: dict", "id": "f4058:m7"} {"signature": "def get_dict_leaves(data):", "body": "result = []if isinstance(data, dict):for item in data.values():result.extend(get_dict_leaves(item))elif isinstance(data, list):result.extend(data)else:result.append(data)return result", "docstring": "Given a nested dictionary, this returns all its leave elements in a list.\n\n:param adict:\n\n:return: list", "id": "f4058:m4"} {"signature": "@staticmethoddef create_folder(dirpath, overwrite=False):", "body": "if not overwrite:while op.exists(dirpath):dirpath += ''os.makedirs(dirpath, exist_ok=overwrite)return dirpath", "docstring": "Will create dirpath folder. If dirpath already exists and overwrite is False,\n will append a '+' suffix to dirpath until dirpath does not exist.", "id": "f4058:c1:m6"} {"signature": "def get_node_filepaths(self, nodepath):", "body": "files = self.get_node(nodepath)return get_dict_leaves(files)", "docstring": "Returns all leaves in filetree.", "id": "f4058:c1:m10"} {"signature": "def count_node_match(self, pattern, adict=None):", "body": "mydict = self._filetree if adict is None else adictk = if isinstance(mydict, dict):names = mydict.keys()k += len(filter_list(names, pattern))for nom in names:k += self.count_node_match(pattern, mydict[nom])else:k = len(filter_list(mydict, pattern))return k", "docstring": "Return the number of nodes that match the pattern.\n\n:param pattern:\n\n:param adict:\n:return: int", "id": "f4058:c1:m13"} {"signature": "def __init__(self, config_map):", "body": "for key in config_map:if config_map[key] == '':config_map[key] = Nonesetattr(self, key, config_map[key])", "docstring": ":param config_map: dict", "id": "f4059:c0:m0"} {"signature": "def ux_file_len(filepath):", "body": "p = subprocess.Popen(['', '', filepath], stdout=subprocess.PIPE,stderr=subprocess.PIPE)result, err = p.communicate()if p.returncode != :raise IOError(err)l = result.strip()l = int(l.split()[])return l", "docstring": "Returns the length of the file using the 'wc' GNU command\n\n Parameters\n ----------\n filepath: str\n\n Returns\n -------\n float", "id": "f4061:m15"} {"signature": "def get_abspath(folderpath):", "body": "if not op.exists(folderpath):raise FolderNotFound(folderpath)return op.abspath(folderpath)", "docstring": "Returns the absolute path of folderpath.\n If the path does not exist, will raise IOError.", "id": "f4061:m0"} {"signature": "def remove_all(filelist, folder=''):", "body": "if not folder:for f in filelist:os.remove(f)else:for f in filelist:os.remove(op.join(folder, f))", "docstring": "Deletes all files in filelist\n\n Parameters\n ----------\n filelist: list of str\n List of the file paths to be removed\n\n folder: str\n Path to be used as common directory for all file paths in filelist", "id": "f4061:m11"} {"signature": "def fileobj_size(file_obj):", "body": "file_obj.seek(, os.SEEK_END)return file_obj.tell()", "docstring": "Returns the length of the size of the file\n\n Parameters\n ----------\n file_obj: file-like object\n\n Returns\n -------\n float", "id": "f4061:m18"} {"signature": "def create_subjects_file(filelist, labels, output_file, split=''):", "body": "if len(filelist) != len(labels):raise ValueError(''''.format(len(filelist), len(labels)))lines = []for i, subj in enumerate(filelist):lab = labels[i]line = subj + split + str(lab)lines.append(line)lines = np.array(lines)np.savetxt(output_file, lines, fmt='')", "docstring": "Creates a file where each line is :.\n\n Parameters\n ----------\n filelist: list of str\n List of filepaths\n\n labels: list of int, str or labels that can be transformed with str()\n List of labels\n\n output_file: str\n Output file path\n\n split: str\n Split character for each line", "id": "f4061:m9"} {"signature": "def parse_subjects_list(filepath, datadir='', split='', labelsf=None):", "body": "labels = []subjs = []if datadir:datadir += op.sepwith open(filepath, '') as f:for s in f:line = s.strip().split(split)if len(line) == :labels.append(np.float(line[]))subjf = line[].strip()else:subjf = line.strip()if not op.isabs(subjf):subjs.append(datadir + subjf)else:subjs.append(subjf)if labelsf is not None:labels = np.loadtxt(labelsf)return [labels, subjs]", "docstring": "Parses a file with a list of: :.\n\n Parameters\n ----------\n filepath: str\n Path to file with a list of: :.\n Where ':' can be any split character\n\n datadir: str\n String to be path prefix of each line of the fname content,\n only in case the lines are relative file paths.\n\n split: str\n Split character for each line\n\n labelsf: str\n Path to file with a list of the labels if it is not included in\n fname. It will overwrite the labels from fname.\n\n Returns\n -------\n [labels, subjs] where labels is a list of labels and subjs a list of\n filepaths", "id": "f4061:m8"} {"signature": "def get_folder_subpath(path, folder_depth):", "body": "if path[] == op.sep:folder_depth += return op.sep.join(path.split(op.sep)[:folder_depth])", "docstring": "Returns a folder path of path with depth given by folder_dept:\n\nParameters\n----------\npath: str\n\nfolder_depth: int > 0\n\nReturns\n-------\nA folder path\n\nExample\n-------\n>>> get_folder_subpath('/home/user/mydoc/work/notes.txt', 3)\n>>> '/home/user/mydoc'", "id": "f4061:m12"} {"signature": "def get_temp_file(dirpath=None, suffix=''):", "body": "return tempfile.NamedTemporaryFile(dir=dirpath, suffix=suffix)", "docstring": "Uses tempfile to create a NamedTemporaryFile using\nthe default arguments.\n\nParameters\n----------\ndirpath: str\nDirectory where it must be created.\nIf dir is specified, the file will be created\nin that directory, otherwise, a default directory is used.\nThe default directory is chosen from a platform-dependent\nlist, but the user of the application can control the\ndirectory location by setting the TMPDIR, TEMP or TMP\nenvironment variables.\n\nsuffix: str\nFile name suffix.\nIt does not put a dot between the file name and the\nsuffix; if you need one, put it at the beginning of suffix.\n\nReturns\n-------\nfile object\n\nNote\n----\nPlease, close it once you have used the file.", "id": "f4061:m13"} {"signature": "def add_extension_if_needed(filepath, ext, check_if_exists=False):", "body": "if not filepath.endswith(ext):filepath += extif check_if_exists:if not op.exists(filepath):raise IOError('' + filepath)return filepath", "docstring": "Add the extension ext to fpath if it doesn't have it.\n\n Parameters\n ----------\n filepath: str\n File name or path\n\n ext: str\n File extension\n\n check_if_exists: bool\n\n Returns\n -------\n File name or path with extension added, if needed.", "id": "f4061:m4"} {"signature": "def grep_one(srch_str, filepath):", "body": "for line in open(filepath):if srch_str in line:return linereturn None", "docstring": "Return the first line in file defined by filepath\n that contains srch_str\n\n Parameters\n ----------\n srch_str: str\n\n filepath: str\n\n Returns\n ----------\n str", "id": "f4061:m7"} {"signature": "def join_path_to_filelist(path, filelist):", "body": "return [op.join(path, str(item)) for item in filelist]", "docstring": "Joins path to each line in filelist\n\n Parameters\n ----------\n path: str\n\n filelist: list of str\n\n Returns\n -------\n list of filepaths", "id": "f4061:m10"} {"signature": "def write_lines(filepath, lines):", "body": "with open(filepath, '') as f:f.writelines(lines)", "docstring": "Write the given lines to the file in filepath\n\n Parameters\n ----------\n filepath: str\n\n lines: list of str", "id": "f4061:m6"} {"signature": "def append_dict_values(list_of_dicts, keys=None):", "body": "if keys is None:keys = list(list_of_dicts[].keys())dict_of_lists = DefaultOrderedDict(list)for d in list_of_dicts:for k in keys:dict_of_lists[k].append(d[k])return dict_of_lists", "docstring": "Return a dict of lists from a list of dicts with the same keys.\nFor each dict in list_of_dicts with look for the values of the\ngiven keys and append it to the output dict.\n\nParameters\n----------\nlist_of_dicts: list of dicts\n\nkeys: list of str\n List of keys to create in the output dict\n If None will use all keys in the first element of list_of_dicts\nReturns\n-------\nDefaultOrderedDict of lists", "id": "f4062:m2"} {"signature": "def create_dataset(self, ds_name, data, attrs=None, dtype=None):", "body": "if ds_name in self._datasets:ds = self._datasets[ds_name]if ds.dtype != data.dtype:warnings.warn('')else:if dtype is None:dtype = data.dtypeds = self._group.create_dataset(ds_name, data.shape,dtype=dtype)if attrs is not None:for key in attrs:setattr(ds.attrs, key, attrs[key])ds.read_direct(data)self._datasets[ds_name] = dsreturn ds", "docstring": "Saves a Numpy array in a dataset in the HDF file, registers it as\nds_name and returns the h5py dataset.\n\n:param ds_name: string\nRegistration name of the dataset to be registered.\n\n:param data: Numpy ndarray\n\n:param dtype: dtype\nDatatype of the dataset\n\n:return: h5py dataset", "id": "f4063:c0:m6"} {"signature": "def __del__(self):", "body": "self._hdf_file.close()if self._remove_on_destroy:os.remove(self._fname)", "docstring": "Class destructor", "id": "f4063:c0:m2"} {"signature": "@staticmethoddef get_temp_file(dir=None, suffix=''):", "body": "return tempfile.NamedTemporaryFile(dir=dir, suffix=suffix)", "docstring": "Uses tempfile to create a NamedTemporaryFile using\nthe default arguments.\n\n@param dir: string\nDirectory where it must be created.\nIf dir is specified, the file will be created\nin that directory, otherwise, a default directory is used.\nThe default directory is chosen from a platform-dependent\nlist, but the user of the application can control the\ndirectory location by setting the TMPDIR, TEMP or TMP\nenvironment variables.\n\n@param suffix: string\nFile name suffix.\nIt does not put a dot between the file name and the\nsuffix; if you need one, put it at the beginning of suffix.\n\n@return: file object\n\n@note:\nClose it once you have used the file.", "id": "f4063:c0:m1"} {"signature": "def get(self, key):", "body": "node = self.get_node(key)if node is None:raise KeyError('' % key)if hasattr(node, ''):if '' in node.attrs:return self._read_group(node)return self._read_array(node)", "docstring": "Retrieve pandas object or group of Numpy ndarrays\nstored in file\n\nParameters\n----------\nkey : object\n\nReturns\n-------\nobj : type of object stored in file", "id": "f4063:c1:m3"} {"signature": "def create_empty_dataset(self, ds_name, dtype=np.float32):", "body": "if ds_name in self._datasets:return self._datasets[ds_name]ds = self._group.create_dataset(ds_name, (, ), maxshape=None,dtype=dtype)self._datasets[ds_name] = dsreturn ds", "docstring": "Creates a Dataset with unknown size.\nResize it before using.\n\n:param ds_name: string\n\n:param dtype: dtype\nDatatype of the dataset\n\n:return: h5py DataSet", "id": "f4063:c0:m5"} {"signature": "@staticmethoddef _fill_missing_values(df, range_values, fill_value=, fill_method=None):", "body": "idx_colnames = df.index.namesidx_colranges = [range_values[x] for x in idx_colnames]fullindex = pd.Index([p for p in product(*idx_colranges)],name=tuple(idx_colnames))fulldf = df.reindex(index=fullindex, fill_value=fill_value,method=fill_method)fulldf.index.names = idx_colnamesreturn fulldf, idx_colranges", "docstring": "Will get the names of the index colums of df, obtain their ranges from\nrange_values dict and return a reindexed version of df with the given\nrange values.\n\n:param df: pandas DataFrame\n\n:param range_values: dict or array-like\nMust contain for each index column of df an entry with all the values\nwithin the range of the column.\n\n:param fill_value: scalar or 'nearest', default 0\nValue to use for missing values. Defaults to 0, but can be any\n\"compatible\" value, e.g., NaN.\nThe 'nearest' mode will fill the missing value with the nearest value in\n the column.\n\n:param fill_method: {'backfill', 'bfill', 'pad', 'ffill', None}, default None\nMethod to use for filling holes in reindexed DataFrame\n'pad' / 'ffill': propagate last valid observation forward to next valid\n'backfill' / 'bfill': use NEXT valid observation to fill gap\n\n:return: pandas Dataframe and used column ranges\nreindexed DataFrame and dict with index column ranges", "id": "f4063:c1:m2"} {"signature": "def put(self, key, value, attrs=None, format=None, append=False, **kwargs):", "body": "if not isinstance(value, np.ndarray):super(NumpyHDFStore, self).put(key, value, format, append, **kwargs)else:group = self.get_node(key)if group is not None and not append:self._handle.removeNode(group, recursive=True)group = Noneif group is None:paths = key.split('')path = ''for p in paths:if not len(p):continuenew_path = pathif not path.endswith(''):new_path += ''new_path += pgroup = self.get_node(new_path)if group is None:group = self._handle.createGroup(path, p)path = new_pathds_name = kwargs.get('', self._array_dsname)ds = self._handle.createArray(group, ds_name, value)if attrs is not None:for key in attrs:setattr(ds.attrs, key, attrs[key])self._handle.flush()return ds", "docstring": "Store object in HDFStore\n\nParameters\n----------\nkey : str\n\nvalue : {Series, DataFrame, Panel, Numpy ndarray}\n\nformat : 'fixed(f)|table(t)', default is 'fixed'\n fixed(f) : Fixed format\n Fast writing/reading. Not-appendable, nor searchable\n\n table(t) : Table format\n Write as a PyTables Table structure which may perform worse but allow more flexible operations\n like searching/selecting subsets of the data\n\nappend : boolean, default False\n This will force Table format, append the input data to the\n existing.\n\nencoding : default None, provide an encoding for strings", "id": "f4063:c1:m4"} {"signature": "def save_variables_to_shelve(file_path, variables):", "body": "mashelf = shelve.open(file_path, '')for vn in variables.keys():try:mashelf[vn] = variables[vn]except KeyError as ke:raise Exception(''.format(vn)) from kemashelf.close()", "docstring": "Parameters\n----------\nfile_path: str\n\nvariables: dict\n Dictionary with objects. Object name -> object\n\nNotes\n-----\n Before calling this function, create a varlist this way:\n\n shelfvars = []\n for v in varnames:\n shelfvars.append(eval(v))\n\n #to_restore variables from shelf\n my_shelf = shelve.open(filename)\n for key in my_shelf:\n globals()[key]=my_shelf[key]\n my_shelf.close()", "id": "f4065:m2"} {"signature": "@staticmethoddef save_variables(filename, variables):", "body": "ext = get_extension(filename).lower()out_exts = {'', '', '', '', ''}output_file = filenameif not ext in out_exts:output_file = add_extension_if_needed(filename, '')ext = get_extension(filename)if ext == '' or ext == '':save_variables_to_shelve(output_file, variables)elif ext == '':save_variables_to_mat(output_file, variables)elif ext == '' or ext == '':from .hdf5 import save_variables_to_hdf5save_variables_to_hdf5(output_file, variables)else:raise ValueError(''.format(ext))", "docstring": "Save given variables in a file.\n Valid extensions: '.pyshelf' or '.shelf' (Python shelve)\n '.mat' (Matlab archive),\n '.hdf5' or '.h5' (HDF5 file)\n\n Parameters\n ----------\n filename: str\n Output file path.\n\n variables: dict\n Dictionary varname -> variable\n\n Raises\n ------\n ValueError: if the extension of the filesname is not recognized.", "id": "f4065:c0:m1"} {"signature": "@staticmethoddef save_varlist(filename, varnames, varlist):", "body": "variables = {}for i, vn in enumerate(varnames):variables[vn] = varlist[i]ExportData.save_variables(filename, variables)", "docstring": "Valid extensions '.pyshelf', '.mat', '.hdf5' or '.h5'\n\n@param filename: string\n\n@param varnames: list of strings\nNames of the variables\n\n@param varlist: list of objects\nThe objects to be saved", "id": "f4065:c0:m2"} {"signature": "def get_group_names(h5file, h5path=''):", "body": "return _get_node_names(h5file, h5path, node_type=h5py.Group)", "docstring": "Return the groups names within h5file/h5path\n\n Parameters\n ----------\n h5file: h5py.File or path to hdf5 file\n HDF5 file object\n\n h5path: str\n HDF5 group path to get the group names from\n\n Returns\n -------\n groupnames: list of str\n List of group names", "id": "f4066:m2"} {"signature": "def _get_node_names(h5file, h5path='', node_type=h5py.Dataset):", "body": "if isinstance(h5file, str):_h5file = get_h5file(h5file, mode='')else:_h5file = h5fileif not h5path.startswith(''):h5path = '' + h5pathnames = []try:h5group = _h5file.require_group(h5path)for node in _hdf5_walk(h5group, node_type=node_type):names.append(node.name)except:raise RuntimeError(''.format(_h5file.filename, h5path))finally:if isinstance(h5file, str):_h5file.close()return names", "docstring": "Return the node of type node_type names within h5path of h5file.\n\n Parameters\n ----------\n h5file: h5py.File\n HDF5 file object\n\n h5path: str\n HDF5 group path to get the group names from\n\n node_type: h5py object type\n HDF5 object type\n\n Returns\n -------\n names: list of str\n List of names", "id": "f4066:m7"} {"signature": "def get_h5file(file_path, mode=''):", "body": "if not op.exists(file_path):raise IOError(''.format(file_path))try:h5file = h5py.File(file_path, mode=mode)except:raiseelse:return h5file", "docstring": "Return the h5py.File given its file path.\n\n Parameters\n ----------\n file_path: string\n HDF5 file path\n\n mode: string\n r Readonly, file must exist\n r+ Read/write, file must exist\n w Create file, truncate if exists\n w- Create file, fail if exists\n a Read/write if exists, create otherwise (default)\n\n Returns\n -------\n h5file: h5py.File", "id": "f4066:m1"} {"signature": "def _get_nodes(h5file, h5path='', node_type=h5py.Dataset):", "body": "if isinstance(h5file, str):_h5file = get_h5file(h5file, mode='')else:_h5file = h5fileif not h5path.startswith(''):h5path = '' + h5pathnames = []try:h5group = _h5file.require_group(h5path)for node in _hdf5_walk(h5group, node_type=node_type):names.append(node)except:raise RuntimeError(''.format(str(node_type), _h5file.filename, h5path))finally:if isinstance(h5file, str):_h5file.close()return names", "docstring": "Return the nodes within h5path of the h5file.\n\n Parameters\n ----------\n h5file: h5py.File\n HDF5 file object\n\n h5path: str\n HDF5 group path to get the nodes from\n\n node_type: h5py object type\n The type of the nodes that you want to get\n\n Returns\n -------\n nodes: list of node_type objects", "id": "f4066:m8"} {"signature": "@staticmethoddef get_dcm_reader(store_metadata=True, header_fields=None):", "body": "if not store_metadata:return lambda fpath: fpathif header_fields is None:build_dcm = lambda fpath: DicomFile(fpath)else:dicom_header = namedtuple('', header_fields)build_dcm = lambda fpath: dicom_header._make(DicomFile(fpath).get_attributes(header_fields))return build_dcm", "docstring": "Creates a lambda function to read DICOM files.\nIf store_store_metadata is False, will only return the file path.\nElse if you give header_fields, will return only the set of of\nheader_fields within a DicomFile object or the whole DICOM file if\nNone.\n\n:return: function\nThis function has only one parameter: file_path", "id": "f4067:c1:m1"} {"signature": "def from_folders(self, folders):", "body": "self.items = []self._store_dicom_paths(folders)", "docstring": "Restart the self.items and stores all dicom file paths found\nwithin folders\n\nParameters\n----------\nfolders: str or list of str", "id": "f4067:c0:m2"} {"signature": "def from_set(self, fileset, check_if_dicoms=True):", "body": "if check_if_dicoms:self.items = []for f in fileset:if is_dicom_file(f):self.items.append(f)else:self.items = fileset", "docstring": "Overwrites self.items with the given set of files.\n Will filter the fileset and keep only Dicom files.\n\n Parameters\n ----------\n fileset: iterable of str\n Paths to files\n\n check_if_dicoms: bool\n Whether to check if the items in fileset are dicom file paths", "id": "f4067:c0:m3"} {"signature": "def __init__(self, folders, read_metadata=True, header_fields=None):", "body": "DicomFileSet.__init__(self, folders)self.read_dcm = self.get_dcm_reader(read_metadata, header_fields)", "docstring": ":param folders: str or list of strs\nPath or paths to folders to be searched for Dicom files\n\n:param read_metadata: bool\nIf True, will make a list of DicomFiles, otherwise will store\na simple DICOM header (namedtuples) with the fields specified\nin header_fields.\n\n:param header_fields: set of strings\nSet of header fields to be read from each DICOM file in a DicomHeader.\nIf store_metadata is False, this won't be used. Else and if this is\nNone, will store the whole DicomFile.", "id": "f4067:c1:m0"} {"signature": "def rename_file_group_to_serial_nums(file_lst):", "body": "file_lst.sort()c = for f in file_lst:dirname = get_abspath(f.dirname())fdest = f.joinpath(dirname, \"\".format(c) +OUTPUT_DICOM_EXTENSION)log.info(''.format(f, fdest))f.rename(fdest)c += ", "docstring": "Will rename all files in file_lst to a padded serial\n number plus its extension\n\n :param file_lst: list of path.py paths", "id": "f4067:m1"} {"signature": "def _store_dicom_paths(self, folders):", "body": "if isinstance(folders, str):folders = [folders]for folder in folders:if not os.path.exists(folder):raise FolderNotFound(folder)self.items.extend(list(find_all_dicom_files(folder)))", "docstring": "Search for dicoms in folders and save file paths into\n self.dicom_paths set.\n\n :param folders: str or list of str", "id": "f4067:c0:m1"} {"signature": "def update(self, dicomset):", "body": "if not isinstance(dicomset, DicomFileSet):raise ValueError('')self.items = list(set(self.items).update(dicomset))", "docstring": "Update this set with the union of itself and dicomset.\n\n Parameters\n ----------\n dicomset: DicomFileSet", "id": "f4067:c0:m4"} {"signature": "def set_dicom_file2(self, dcm_file):", "body": "self.dcmf2 = self._read_dcmfile(dcm_file)", "docstring": "Parameters\n----------\ndcm_file: str (path to file) or DicomFile or namedtuple", "id": "f4068:c1:m3"} {"signature": "def merge_groups(self, indices):", "body": "try:merged = merge_dict_of_lists(self.dicom_groups, indices,pop_later=True, copy=True)self.dicom_groups = mergedexcept IndexError:raise IndexError('')", "docstring": "Extend the lists within the DICOM groups dictionary.\n The indices will indicate which list have to be extended by which\n other list.\n\n Parameters\n ----------\n indices: list or tuple of 2 iterables of int, bot having the same len\n The indices of the lists that have to be merged, both iterables\n items will be read pair by pair, the first is the index to the\n list that will be extended with the list of the second index.\n The indices can be constructed with Numpy e.g.,\n indices = np.where(square_matrix)", "id": "f4068:c3:m7"} {"signature": "def group_dicom_files(dicom_file_paths, header_fields):", "body": "dist = SimpleDicomFileDistance(field_weights=header_fields)path_list = dicom_file_paths.copy()path_groups = DefaultOrderedDict(DicomFileSet)while len(path_list) > :file_path1 = path_list.pop()file_subgroup = [file_path1]dist.set_dicom_file1(file_path1)j = len(path_list)-while j >= :file_path2 = path_list[j]dist.set_dicom_file2(file_path2)if dist.transform():file_subgroup.append(file_path2)path_list.pop(j)j -= path_groups[file_path1].from_set(file_subgroup, check_if_dicoms=False)return path_groups", "docstring": "Gets a list of DICOM file absolute paths and returns a list of lists of\nDICOM file paths. Each group contains a set of DICOM files that have\nexactly the same headers.\n\nParameters\n----------\ndicom_file_paths: list of str\n List or set of DICOM file paths\n\nheader_fields: list of str\n List of header field names to check on the comparisons of the DICOM files.\n\nReturns\n-------\ndict of DicomFileSets\n The key is one filepath representing the group (the first found).", "id": "f4068:m0"} {"signature": "def fit(self, dcm_file1, dcm_file2):", "body": "self.set_dicom_file1(dcm_file1)self.set_dicom_file2(dcm_file2)", "docstring": "Parameters\n----------\ndcm_file1: str (path to file) or DicomFile or namedtuple\n\ndcm_file2: str (path to file) or DicomFile or namedtuple", "id": "f4068:c1:m1"} {"signature": "def get_groups_in_same_folder(self, folder_depth=):", "body": "group_pairs = []key_dicoms = list(self.dicom_groups.keys())idx = len(key_dicoms)while idx > :group1 = key_dicoms.pop()dir_group1 = get_folder_subpath(group1, folder_depth)for group in key_dicoms:if group.startswith(dir_group1):group_pairs.append((group1, group))idx -= return group_pairs", "docstring": "Returns a list of 2-tuples with pairs of dicom groups that\nare in the same folder within given depth.\n\nParameters\n----------\nfolder_depth: int\nPath depth to check for folder equality.\n\nReturns\n-------\nlist of tuples of str", "id": "f4068:c3:m3"} {"signature": "def remove_dcm2nii_underprocessed(filepaths):", "body": "cln_flist = []len_sorted = sorted(filepaths, key=len)for idx, fpath in enumerate(len_sorted):remove = Falsefname = op.basename(fpath)rest = len_sorted[idx+:]for rest_fpath in rest:rest_file = op.basename(rest_fpath)if rest_file.endswith(fname):remove = Truebreakif not remove:cln_flist.append(fpath)return cln_flist", "docstring": "Return a subset of `filepaths`. Keep only the files that have a basename longer than the\n others with same suffix.\n This works based on that dcm2nii appends a preffix character for each processing\n step it does automatically in the DICOM to NifTI conversion.\n\n Parameters\n ----------\n filepaths: iterable of str\n\n Returns\n -------\n cleaned_paths: iterable of str", "id": "f4069:m4"} {"signature": "def convert_dcm2nii(input_dir, output_dir, filename):", "body": "if not op.exists(input_dir):raise IOError(''.format(input_dir))if not op.exists(output_dir):raise IOError(''.format(output_dir))tmpdir = tempfile.TemporaryDirectory(prefix='')arguments = ''.format(tmpdir.name)try:call_out = call_dcm2nii(input_dir, arguments)except:raiseelse:log.info(''.format(input_dir))filenames = glob(op.join(tmpdir.name, ''))cleaned_filenames = remove_dcm2nii_underprocessed(filenames)filepaths = []for srcpath in cleaned_filenames:dstpath = op.join(output_dir, filename)realpath = copy_w_plus(srcpath, dstpath)filepaths.append(realpath)basename = op.basename(remove_ext(srcpath))aux_files = set(glob(op.join(tmpdir.name, '' .format(basename)))) -set(glob(op.join(tmpdir.name, ''.format(basename))))for aux_file in aux_files:aux_dstpath = copy_w_ext(aux_file, output_dir, remove_ext(op.basename(realpath)))filepaths.append(aux_dstpath)return filepaths", "docstring": "Call MRICron's `dcm2nii` to convert the DICOM files inside `input_dir`\n to Nifti and save the Nifti file in `output_dir` with a `filename` prefix.\n\n Parameters\n ----------\n input_dir: str\n Path to the folder that contains the DICOM files\n\n output_dir: str\n Path to the folder where to save the NifTI file\n\n filename: str\n Output file basename\n\n Returns\n -------\n filepaths: list of str\n List of file paths created in `output_dir`.", "id": "f4069:m3"} {"signature": "def treefall(iterable):", "body": "num_elems = len(iterable)for i in range(num_elems, -, -):for c in combinations(iterable, i):yield c", "docstring": "Generate all combinations of the elements of iterable and its subsets.\n\nParameters\n----------\niterable: list, set or dict or any iterable object\n\nReturns\n-------\nA generator of all possible combinations of the iterable.\n\nExample:\n-------\n>>> for i in treefall([1, 2, 3, 4, 5]): print(i)\n>>> (1, 2, 3)\n>>> (1, 2)\n>>> (1, 3)\n>>> (2, 3)\n>>> (1,)\n>>> (2,)\n>>> (3,)\n>>> ()", "id": "f4070:m0"} {"signature": "def get_attributes(self, attributes, default=''):", "body": "if isinstance(attributes, str):attributes = [attributes]attrs = [getattr(self, attr, default) for attr in attributes]if len(attrs) == :return attrs[]return tuple(attrs)", "docstring": "Return the attributes values from this DicomFile\n\n Parameters\n ----------\n attributes: str or list of str\n DICOM field names\n\n default: str\n Default value if the attribute does not exist.\n\n Returns\n -------\n Value of the field or list of values.", "id": "f4071:c0:m1"} {"signature": "def as_ndarray(arr, copy=False, dtype=None, order=''):", "body": "if order not in ('', '', '', '', None):raise ValueError(\"\".format(str(order)))if isinstance(arr, np.memmap):if dtype is None:if order in ('', '', None):ret = np.array(np.asarray(arr), copy=True)else:ret = np.array(np.asarray(arr), copy=True, order=order)else:if order in ('', '', None):ret = np.asarray(arr).astype(dtype)else:ret = _asarray(np.array(arr, copy=True), dtype=dtype, order=order)elif isinstance(arr, np.ndarray):ret = _asarray(arr, dtype=dtype, order=order)if np.may_share_memory(ret, arr) and copy:ret = ret.T.copy().T if ret.flags[''] else ret.copy()elif isinstance(arr, (list, tuple)):if order in (\"\", \"\"):ret = np.asarray(arr, dtype=dtype)else:ret = np.asarray(arr, dtype=dtype, order=order)else:raise ValueError(\"\".format(arr.__class__))return ret", "docstring": "Convert an arbitrary array to numpy.ndarray.\n\n In the case of a memmap array, a copy is automatically made to break the\n link with the underlying file (whatever the value of the \"copy\" keyword).\n\n The purpose of this function is mainly to get rid of memmap objects, but\n it can be used for other purposes. In particular, combining copying and\n casting can lead to performance improvements in some cases, by avoiding\n unnecessary copies.\n\n If not specified, input array order is preserved, in all cases, even when\n a copy is requested.\n\n Caveat: this function does not copy during bool to/from 1-byte dtype\n conversions. This can lead to some surprising results in some rare cases.\n Example:\n\n a = numpy.asarray([0, 1, 2], dtype=numpy.int8)\n b = as_ndarray(a, dtype=bool) # array([False, True, True], dtype=bool)\n c = as_ndarray(b, dtype=numpy.int8) # array([0, 1, 2], dtype=numpy.int8)\n\n The usually expected result for the last line would be array([0, 1, 1])\n because True evaluates to 1. Since there is no copy made here, the original\n array is recovered.\n\n Parameters\n ----------\n arr: array-like\n input array. Any value accepted by numpy.asarray is valid.\n\n copy: bool\n if True, force a copy of the array. Always True when arr is a memmap.\n\n dtype: any numpy dtype\n dtype of the returned array. Performing copy and type conversion at the\n same time can in some cases avoid an additional copy.\n\n order: string\n gives the order of the returned array.\n Valid values are: \"C\", \"F\", \"A\", \"K\", None.\n default is \"K\". See ndarray.copy() for more information.\n\n Returns\n -------\n ret: np.ndarray\n Numpy array containing the same data as arr, always of class\n numpy.ndarray, and with no link to any underlying file.", "id": "f4072:m1"} {"signature": "def _num_samples(x):", "body": "if not hasattr(x, '') and not hasattr(x, ''):if hasattr(x, ''):x = np.asarray(x)else:raise TypeError(\"\" % x)return x.shape[] if hasattr(x, '') else len(x)", "docstring": "Return number of samples in array-like x.", "id": "f4075:m3"} {"signature": "def warn_if_not_float(X, estimator=''):", "body": "if not isinstance(estimator, str):estimator = estimator.__class__.__name__if X.dtype.kind != '':warnings.warn(\"\"\"\" % (estimator, X.dtype))return Truereturn False", "docstring": "Warning utility function to check that data type is floating point.\n\n Returns True if a warning was raised (i.e. the input is not float) and\n False otherwise, for easier input validation.", "id": "f4075:m10"} {"signature": "def check_array(array, accept_sparse=None, dtype=None, order=None, copy=False,force_all_finite=True, ensure_2d=True, allow_nd=False):", "body": "if isinstance(accept_sparse, str):accept_sparse = [accept_sparse]if sp.issparse(array):array = _ensure_sparse_format(array, accept_sparse, dtype, order,copy, force_all_finite)else:if ensure_2d:array = np.atleast_2d(array)array = np.array(array, dtype=dtype, order=order, copy=copy)if not allow_nd and array.ndim >= :raise ValueError(\"\" %array.ndim)if force_all_finite:_assert_all_finite(array)return array", "docstring": "Input validation on an array, list, sparse matrix or similar.\n\n By default, the input is converted to an at least 2nd numpy array.\n\n Parameters\n ----------\n array : object\n Input object to check / convert.\n\n accept_sparse : string, list of string or None (default=None)\n String[s] representing allowed sparse matrix formats, such as 'csc',\n 'csr', etc. None means that sparse matrix input will raise an error.\n If the input is sparse but not in the allowed format, it will be\n converted to the first listed format.\n\n order : 'F', 'C' or None (default=None)\n Whether an array will be forced to be fortran or c-style.\n\n copy : boolean (default=False)\n Whether a forced copy will be triggered. If copy=False, a copy might\n be triggered by a conversion.\n\n force_all_finite : boolean (default=True)\n Whether to raise an error on np.inf and np.nan in X.\n\n ensure_2d : boolean (default=True)\n Whether to make X at least 2d.\n\n allow_nd : boolean (default=False)\n Whether to allow X.ndim > 2.\n\n Returns\n -------\n X_converted : object\n The converted and validated X.", "id": "f4075:m7"} {"signature": "def assert_all_finite(X):", "body": "_assert_all_finite(X.data if sp.issparse(X) else X)", "docstring": "Throw a ValueError if X contains NaN or infinity.\n\n Input MUST be an np.ndarray instance or a scipy.sparse matrix.", "id": "f4075:m1"} {"signature": "def remove_from_string(string, values):", "body": "for v in values:string = string.replace(v, '')return string", "docstring": "Parameters\n----------\nstring:\nvalues:\n\nReturns\n-------", "id": "f4077:m9"} {"signature": "def is_fnmatch_regex(string):", "body": "is_regex = Falseregex_chars = ['', '', '']for c in regex_chars:if string.find(c) > -:return Truereturn is_regex", "docstring": "Returns True if the given string is considered a fnmatch\nregular expression, False otherwise.\nIt will look for\n\n:param string: str", "id": "f4077:m8"} {"signature": "def append_to_list(lst, preffix):", "body": "return [preffix + str(item) for item in lst]", "docstring": "Parameters\n----------\nlst:\npreffix:\n\nReturns\n-------", "id": "f4077:m5"} {"signature": "def match_list(lst, pattern, group_names=[]):", "body": "filtfn = re.compile(pattern).matchfiltlst = filter_list(lst, filtfn)if not group_names:return [m.string for m in filtlst]else:return [m.group(group_names) for m in filtlst]", "docstring": "Parameters\n----------\nlst: list of str\n\nregex: string\n\ngroup_names: list of strings\n See re.MatchObject group docstring\n\nReturns\n-------\nlist of strings\n Filtered list, with the strings that match the pattern", "id": "f4077:m2"} {"signature": "def is_valid_regex(string):", "body": "try:re.compile(string)is_valid = Trueexcept re.error:is_valid = Falsereturn is_valid", "docstring": "Checks whether the re module can compile the given regular expression.\n\nParameters\n----------\nstring: str\n\nReturns\n-------\nboolean", "id": "f4077:m6"} {"signature": "def search_list(lst, pattern):", "body": "filt = re.compile(pattern).searchreturn filter_list(lst, filt)", "docstring": "Parameters\n----------\npattern: string\n\nlst: list of strings\n\nReturns\n-------\nfiltered_list: list of str\n Filtered lists with the strings in which the pattern is found.", "id": "f4077:m3"} {"signature": "def merge(dict_1, dict_2):", "body": "return dict((str(key), dict_1.get(key) or dict_2.get(key))for key in set(dict_2) | set(dict_1))", "docstring": "Merge two dictionaries.\n\n Values that evaluate to true take priority over falsy values.\n `dict_1` takes priority over `dict_2`.", "id": "f4080:m0"} {"signature": "def get_rcfile_variable_value(var_name, app_name, section_name=None):", "body": "cfg = get_rcfile_section(app_name, section_name)if var_name in cfg:raise KeyError(''''.format(var_name, section_name))return cfg[var_name]", "docstring": "Return the value of the variable in the section_name section of the\n app_name rc file.\n\n Parameters\n ----------\n var_name: str\n Name of the variable to be searched for.\n\n section_name: str\n Name of the section in the rcfiles.\n\n app_name: str\n Name of the application to look for its rcfiles.\n\n Returns\n -------\n var_value: str\n The value of the variable with given var_name.", "id": "f4080:m8"} {"signature": "def rcfile(appname, section=None, args={}, strip_dashes=True):", "body": "if strip_dashes:for k in args.keys():args[k.lstrip('')] = args.pop(k)environ = get_environment(appname)if section is None:section = appnameconfig = get_config(appname,section,args.get('', ''),args.get('', ''))config = merge(merge(args, config), environ)if not config:raise IOError(''''.format(appname))return config", "docstring": "Read environment variables and config files and return them merged with\n predefined list of arguments.\n\n Parameters\n ----------\n appname: str\n Application name, used for config files and environment variable\n names.\n\n section: str\n Name of the section to be read. If this is not set: appname.\n\n args:\n arguments from command line (optparse, docopt, etc).\n\n strip_dashes: bool\n Strip dashes prefixing key names from args dict.\n\n Returns\n --------\n dict\n containing the merged variables of environment variables, config\n files and args.\n\n Raises\n ------\n IOError\n In case the return value is empty.\n\n Notes\n -----\n Environment variables are read if they start with appname in uppercase\n with underscore, for example:\n\n TEST_VAR=1\n\n Config files compatible with ConfigParser are read and the section name\n appname is read, example:\n\n [appname]\n var=1\n\n We can also have host-dependent configuration values, which have\n priority over the default appname values.\n\n [appname]\n var=1\n\n [appname:mylinux]\n var=3\n\n\n For boolean flags do not try to use: 'True' or 'False',\n 'on' or 'off',\n '1' or '0'.\n Unless you are willing to parse this values by yourself.\n We recommend commenting the variables out with '#' if you want to set a\n flag to False and check if it is in the rcfile cfg dict, i.e.:\n\n flag_value = 'flag_variable' in cfg\n\n\n Files are read from: /etc/appname/config,\n /etc/appfilerc,\n ~/.config/appname/config,\n ~/.config/appname,\n ~/.appname/config,\n ~/.appnamerc,\n appnamerc,\n .appnamerc,\n appnamerc file found in 'path' folder variable in args,\n .appnamerc file found in 'path' folder variable in args,\n file provided by 'config' variable in args.\n\n Example\n -------\n args = rcfile(__name__, docopt(__doc__, version=__version__))", "id": "f4080:m6"} {"signature": "def get_rcfile_section(app_name, section_name):", "body": "try:settings = rcfile(app_name, section_name)except IOError:raiseexcept:raise KeyError(''''.format(section_name, app_name))else:return settings", "docstring": "Return the dictionary containing the rcfile section configuration\n variables.\n\n Parameters\n ----------\n section_name: str\n Name of the section in the rcfiles.\n\n app_name: str\n Name of the application to look for its rcfiles.\n\n Returns\n -------\n settings: dict\n Dict with variable values", "id": "f4080:m7"} {"signature": "def get_sys_path(rcpath, app_name, section_name=None):", "body": "if op.exists(rcpath):return op.realpath(op.expanduser(rcpath))try:settings = rcfile(app_name, section_name)except:raisetry:sys_path = op.expanduser(settings[rcpath])except KeyError:raise IOError(''''''.format(rcpath,section_name,app_name))else:if not op.exists(sys_path):raise IOError(''''''.format(rcpath, section_name, app_name,sys_path))return op.realpath(op.expanduser(sys_path))", "docstring": "Return a folder path if it exists.\n\n First will check if it is an existing system path, if it is, will return it\n expanded and absoluted.\n\n If this fails will look for the rcpath variable in the app_name rcfiles or\n exclusively within the given section_name, if given.\n\n Parameters\n ----------\n rcpath: str\n Existing folder path or variable name in app_name rcfile with an\n existing one.\n\n section_name: str\n Name of a section in the app_name rcfile to look exclusively there for\n variable names.\n\n app_name: str\n Name of the application to look for rcfile configuration files.\n\n Returns\n -------\n sys_path: str\n A expanded absolute file or folder path if the path exists.\n\n Raises\n ------\n IOError if the proposed sys_path does not exist.", "id": "f4080:m5"} {"signature": "def _cache(self, func, func_memory_level=, **kwargs):", "body": "verbose = getattr(self, '', )if not hasattr(self, \"\"):self.memory_level = if not hasattr(self, \"\"):self.memory = Memory(cachedir=None, verbose=verbose)if isinstance(self.memory, _basestring):self.memory = Memory(cachedir=self.memory, verbose=verbose)if self.memory_level == :if (isinstance(self.memory, _basestring)or self.memory.cachedir is not None):warnings.warn(\"\"\"\"\"\")self.memory_level = return cache(func, self.memory, func_memory_level=func_memory_level,memory_level=self.memory_level, **kwargs)", "docstring": "Return a joblib.Memory object.\n\n The memory_level determines the level above which the wrapped\n function output is cached. By specifying a numeric value for\n this level, the user can to control the amount of cache memory\n used. This function will cache the function call or not\n depending on the cache level.\n\n Parameters\n ----------\n func: function\n The function the output of which is to be cached.\n\n memory_level: int\n The memory_level from which caching must be enabled for the wrapped\n function.\n\n Returns\n -------\n mem: joblib.Memory\n object that wraps the function func. This object may be\n a no-op, if the requested level is lower than the value given\n to _cache()). For consistency, a joblib.Memory object is always\n returned.", "id": "f4082:c0:m0"} {"signature": "def check_call(cmd_args):", "body": "p = subprocess.Popen(cmd_args, stdout=subprocess.PIPE)(output, err) = p.communicate()return output", "docstring": "Calls the command\n\nParameters\n----------\ncmd_args: list of str\n Command name to call and its arguments in a list.\n\nReturns\n-------\nCommand output", "id": "f4083:m6"} {"signature": "def whosdaddy():", "body": "return inspect.stack()[][]", "docstring": "Get the name of the current function", "id": "f4083:m4"} {"signature": "def call_command(cmd_name, args_strings):", "body": "if not op.isabs(cmd_name):cmd_fullpath = which(cmd_name)else:cmd_fullpath = cmd_nametry:cmd_line = [cmd_fullpath] + args_stringslog.info(''.format(cmd_line))retval = subprocess.check_call(cmd_line)except CalledProcessError as ce:log.exception(\"\"\"\".format(cmd_name, args_strings,ce.returncode))raiseelse:return retval", "docstring": "Call CLI command with arguments and returns its return value.\n\n Parameters\n ----------\n cmd_name: str\n Command name or full path to the binary file.\n\n arg_strings: list of str\n Argument strings list.\n\n Returns\n -------\n return_value\n Command return value.", "id": "f4083:m7"} {"signature": "def whoami():", "body": "return inspect.stack()[][]", "docstring": "Get the name of the current function", "id": "f4083:m3"} {"signature": "def which(program):", "body": "if (sys.version_info > (, )):return which_py3(program)else:return which_py2(program)", "docstring": "Returns the absolute path of the given CLI program name.", "id": "f4083:m0"} {"signature": "def check_compatibility(self, one_img, another_img=None):", "body": "if another_img is None:if len(self.items) > :another_img = self.items[]else:raise ValueError(''''.format(repr_imgs(one_img)))try:if self.all_compatible:check_img_compatibility(one_img, another_img)if self.mask is not None:check_img_compatibility(one_img, self.mask, only_check_3d=True)except:raise", "docstring": "Parameters\n----------\none_img: str or img-like object.\n See NeuroImage constructor docstring.\n\nanoter_img: str or img-like object.\n See NeuroImage constructor docstring.\n If None will use the first image of self.images, if there is any.\n\nRaises\n------\nNiftiFilesNotCompatible\n If one_img and another_img aren't compatible.\n\nValueError\n If another_img is None and there are no other images in this set.", "id": "f4085:c0:m7"} {"signature": "def from_dict(self, subj_files):", "body": "for group_label in subj_files:try:group_files = subj_files[group_label]self.items.extend([self._load_image(get_abspath(imgf)) for imgf in group_files])self.labels.extend([group_label]*len(group_files))except Exception as exc:raise Exception(''''.format(group_label)) from exc", "docstring": "Parameters\n----------\nsubj_files: dict of str\n file_path -> int/str", "id": "f4085:c1:m5"} {"signature": "def set_labels(self, labels):", "body": "if not isinstance(labels, string_types) and len(labels) != self.n_subjs:raise ValueError(''''.format(len(labels), self.n_subjs))self.labels = labels", "docstring": "Parameters\n----------\nlabels: list of int or str\n This list will be checked to have the same size as\n\nRaises\n------\nValueError\n if len(labels) != self.n_subjs", "id": "f4085:c0:m9"} {"signature": "def _init_subj_data(self, subj_files):", "body": "try:if isinstance(subj_files, list):self.from_list(subj_files)elif isinstance(subj_files, dict):self.from_dict(subj_files)else:raise ValueError('')except Exception as exc:raise Exception('') from exc", "docstring": "Parameters\n----------\nsubj_files: list or dict of str\n file_path -> int/str", "id": "f4085:c1:m1"} {"signature": "def to_matrix(self, smooth_fwhm=, outdtype=None):", "body": "if not self.all_compatible:raise ValueError(\"\")if not outdtype:outdtype = self.items[].dtypen_voxels = Nonemask_indices = Nonemask_shape = self.items[].shape[:]if self.has_mask:mask_arr = self.mask.get_data()mask_indices = np.nonzero(mask_arr)mask_shape = self.mask.shapen_voxels = np.count_nonzero(mask_arr)if n_voxels is None:log.debug(''.format(self.mask))n_voxels = np.prod(mask_shape)mask_indices = Nonendims = self.items[].ndimif ndims == :subj_flat_shape = (n_voxels, )elif ndims == :subj_flat_shape = (n_voxels, self.items[].shape[])else:raise NotImplementedError(''''.format(ndims))outmat = np.zeros((self.n_subjs, ) + subj_flat_shape, dtype=outdtype)try:for i, image in enumerate(self.items):if smooth_fwhm > :image.fwhm = smooth_fwhmif self.has_mask:image.set_mask(self.mask)outmat[i, :], _, _ = image.mask_and_flatten()image.clear_data()except Exception as exc:raise Exception(''.format(image)) from excelse:return outmat, mask_indices, mask_shape", "docstring": "Return numpy.ndarray with the masked or flatten image data and\n the relevant information (mask indices and volume shape).\n\n Parameters\n ----------\n smooth__fwhm: int\n Integer indicating the size of the FWHM Gaussian smoothing kernel\n to smooth the subject volumes before creating the data matrix\n\n outdtype: dtype\n Type of the elements of the array, if None will obtain the dtype from\n the first nifti file.\n\n Returns\n -------\n outmat, mask_indices, vol_shape\n\n outmat: Numpy array with shape N x prod(vol.shape)\n containing the N files as flat vectors.\n\n mask_indices: matrix with indices of the voxels in the mask\n\n vol_shape: Tuple with shape of the volumes, for reshaping.", "id": "f4085:c0:m11"} {"signature": "@mask.setterdef mask(self, image):", "body": "if image is None:self._mask = Nonetry:mask = load_mask(image)except Exception as exc:raise Exception(''.format(image)) from excelse:self._mask = mask", "docstring": "self.mask setter\n\n Parameters\n ----------\n image: str or img-like object.\n See NeuroImage constructor docstring.", "id": "f4085:c0:m4"} {"signature": "def all_childnodes_to_nifti1img(h5group):", "body": "child_nodes = []def append_parent_if_dataset(name, obj):if isinstance(obj, h5py.Dataset):if name.split('')[-] == '':child_nodes.append(obj.parent)vols = []h5group.visititems(append_parent_if_dataset)for c in child_nodes:vols.append(hdfgroup_to_nifti1image(c))return vols", "docstring": "Returns in a list all images found under h5group.\n\n Parameters\n ----------\n h5group: h5py.Group\n HDF group\n\n Returns\n -------\n list of nifti1Image", "id": "f4087:m6"} {"signature": "def get_nifti1hdr_from_h5attrs(h5attrs):", "body": "hdr = nib.Nifti1Header()for k in list(h5attrs.keys()):hdr[str(k)] = np.array(h5attrs[k])return hdr", "docstring": "Transforms an H5py Attributes set to a dict.\n Converts unicode string keys into standard strings\n and each value into a numpy array.\n\n Parameters\n ----------\n h5attrs: H5py Attributes\n\n Returns\n --------\n dict", "id": "f4087:m5"} {"signature": "def hdfgroup_to_nifti1image(h5group):", "body": "try:data = h5group[''][:]affine = h5group[''][:]extra = Noneif '' in h5group:extra = h5group[''][:]header = get_nifti1hdr_from_h5attrs(h5group[''].attrs)img = nib.Nifti1Image(data, affine, header=header, extra=extra)return imgexcept KeyError as ke:raise Exception('' + h5group.name) from ke", "docstring": "Returns a nibabel Nifti1Image from a HDF5 group datasets\n\n Parameters\n ----------\n h5group: h5py.Group\n HDF5 group\n\n Returns\n -------\n nibabel Nifti1Image", "id": "f4087:m4"} {"signature": "def spatialimg_to_hdfpath(file_path, spatial_img, h5path=None, append=True):", "body": "if h5path is None:h5path = ''mode = ''if os.path.exists(file_path):if append:mode = ''with h5py.File(file_path, mode) as f:try:h5img = f.create_group(h5path)spatialimg_to_hdfgroup(h5img, spatial_img)except ValueError as ve:raise Exception('' + h5path) from ve", "docstring": "Saves a Nifti1Image into an HDF5 file.\n\n Parameters\n ----------\n file_path: string\n Output HDF5 file path\n\n spatial_img: nibabel SpatialImage\n Image to be saved\n\n h5path: string\n HDF5 group path where the image data will be saved.\n Datasets will be created inside the given group path:\n 'data', 'extra', 'affine', the header information will\n be set as attributes of the 'data' dataset.\n Default: '/img'\n\n append: bool\n True if you don't want to erase the content of the file\n if it already exists, False otherwise.\n\n Note\n ----\n HDF5 open modes\n >>> 'r' Readonly, file must exist\n >>> 'r+' Read/write, file must exist\n >>> 'w' Create file, truncate if exists\n >>> 'w-' Create file, fail if exists\n >>> 'a' Read/write if exists, create otherwise (default)", "id": "f4087:m2"} {"signature": "def partition_timeseries(image, roi_img, mask_img=None, zeroe=True, roi_values=None, outdict=False):", "body": "img = read_img(image)rois = read_img(roi_img)check_img_compatibility(img, rois, only_check_3d=True)roi_data = rois.get_data()if roi_values is not None:for rv in roi_values:if not np.any(roi_data == rv):raise ValueError(''.format(rv, repr_imgs(roi_img)))else:roi_values = get_unique_nonzeros(roi_data)if mask_img is None:mask_data = Noneelse:mask = load_mask(mask_img)check_img_compatibility(img, mask, only_check_3d=True)mask_data = mask.get_data()if outdict:extract_data = _extract_timeseries_dictelse:extract_data = _extract_timeseries_listtry:return extract_data(img.get_data(), rois.get_data(), mask_data,roi_values=roi_values, zeroe=zeroe)except:raise", "docstring": "Partition the timeseries in tsvol according to the ROIs in roivol.\n If a mask is given, will use it to exclude any voxel outside of it.\n\n The outdict indicates whether you want a dictionary for each set of timeseries keyed by the ROI value\n or a list of timeseries sets. If True and roi_img is not None will return an OrderedDict, if False\n or roi_img or roi_list is None will return a list.\n\n Background value is assumed to be 0 and won't be used here.\n\n Parameters\n ----------\n image: img-like object or str\n 4D timeseries volume\n\n roi_img: img-like object or str\n 3D volume defining different ROIs.\n\n mask_img: img-like object or str\n 3D mask volume\n\n zeroe: bool\n If true will remove the null timeseries voxels.\n\n roi_values: list of ROI values (int?)\n List of the values of the ROIs to indicate the\n order and which ROIs will be processed.\n\n outdict: bool\n If True will return an OrderedDict of timeseries sets, otherwise a list.\n\n Returns\n -------\n timeseries: list or OrderedDict\n A dict with the timeseries as items and keys as the ROIs voxel values or\n a list where each element is the timeseries set ordered by the sorted values in roi_img or by the roi_values\n argument.", "id": "f4088:m8"} {"signature": "def create_rois_mask(roislist, filelist):", "body": "roifiles = []for roi in roislist:try:roi_file = search_list(roi, filelist)[]except Exception as exc:raise Exception(''.format(str(exc)))else:roifiles.append(roi_file)return binarise(roifiles)", "docstring": "Look for the files in filelist containing the names in roislist, these files will be opened, binarised\n and merged in one mask.\n\n Parameters\n ----------\n roislist: list of strings\n Names of the ROIs, which will have to be in the names of the files in filelist.\n\n filelist: list of strings\n List of paths to the volume files containing the ROIs.\n\n Returns\n -------\n numpy.ndarray\n Mask volume", "id": "f4088:m4"} {"signature": "def get_roilist_from_atlas(atlas_img):", "body": "return get_unique_nonzeros(check_img(atlas_img).get_data())", "docstring": "Extract unique values from the atlas and returns them as an ordered list.\n\nParameters\n----------\natlas_img: img-like object or str\n Volume defining different ROIs.\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\nReturns\n-------\nnp.ndarray\n An 1D array of roi values from atlas volume.\n\nNote\n----\nThe roi with value 0 will be considered background so will be removed.", "id": "f4088:m6"} {"signature": "def _partition_data(datavol, roivol, roivalue, maskvol=None, zeroe=True):", "body": "if maskvol is not None:indices = (roivol == roivalue) * (maskvol > )else:indices = roivol == roivalueif datavol.ndim == :ts = datavol[indices, :]else:ts = datavol[indices]if zeroe:if datavol.ndim == :ts = ts[ts.sum(axis=) != , :]return ts", "docstring": "Extracts the values in `datavol` that are in the ROI with value `roivalue` in `roivol`.\n The ROI can be masked by `maskvol`.\n\n Parameters\n ----------\n datavol: numpy.ndarray\n 4D timeseries volume or a 3D volume to be partitioned\n\n roivol: numpy.ndarray\n 3D ROIs volume\n\n roivalue: int or float\n A value from roivol that represents the ROI to be used for extraction.\n\n maskvol: numpy.ndarray\n 3D mask volume\n\n zeroe: bool\n If true will remove the null timeseries voxels. Only applied to timeseries (4D) data.\n\n Returns\n -------\n values: np.array\n An array of the values in the indicated ROI.\n A 2D matrix if `datavol` is 4D or a 1D vector if `datavol` is 3D.", "id": "f4088:m11"} {"signature": "def largest_connected_component(volume):", "body": "volume = np.asarray(volume)labels, num_labels = scn.label(volume)if not num_labels:raise ValueError('')if num_labels == :return volume.astype(np.bool)label_count = np.bincount(labels.ravel().astype(np.int))label_count[] = return labels == label_count.argmax()", "docstring": "Return the largest connected component of a 3D array.\n\n Parameters\n -----------\n volume: numpy.array\n 3D boolean array.\n\n Returns\n --------\n volume: numpy.array\n 3D boolean array with only one connected component.", "id": "f4088:m2"} {"signature": "def get_unique_nonzeros(arr):", "body": "rois = np.unique(arr)rois = rois[np.nonzero(rois)]rois.sort()return rois", "docstring": "Return a sorted list of the non-zero unique values of arr.\n\n Parameters\n ----------\n arr: numpy.ndarray\n The data array\n\n Returns\n -------\n list of items of arr.", "id": "f4088:m5"} {"signature": "def large_clusters_mask(volume, min_cluster_size):", "body": "labels, num_labels = scn.label(volume)labels_to_keep = set([i for i in range(num_labels)if np.sum(labels == i) >= min_cluster_size])clusters_mask = np.zeros_like(volume, dtype=int)for l in range(num_labels):if l in labels_to_keep:clusters_mask[labels == l] = return clusters_mask", "docstring": "Return as mask for `volume` that includes only areas where\n the connected components have a size bigger than `min_cluster_size`\n in number of voxels.\n\n Parameters\n -----------\n volume: numpy.array\n 3D boolean array.\n\n min_cluster_size: int\n Minimum size in voxels that the connected component must have.\n\n Returns\n --------\n volume: numpy.array\n 3D int array with a mask excluding small connected components.", "id": "f4088:m3"} {"signature": "def get_3D_from_4D(image, vol_idx=):", "body": "img = check_img(image)hdr, aff = get_img_info(img)if len(img.shape) != :raise AttributeError(''.format(repr_imgs(img)))if not <= vol_idx < img.shape[]:raise IndexError(''''.format(repr_imgs(img), img.shape[], vol_idx))img_data = img.get_data()new_vol = img_data[:, :, :, vol_idx].copy()hdr.set_data_shape(hdr.get_data_shape()[:])return new_vol, hdr, aff", "docstring": "Pick one 3D volume from a 4D nifti image file\n\n Parameters\n ----------\n image: img-like object or str\n Volume defining different ROIs.\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\n vol_idx: int\n Index of the 3D volume to be extracted from the 4D volume.\n\n Returns\n -------\n vol, hdr, aff\n The data array, the image header and the affine transform matrix.", "id": "f4088:m14"} {"signature": "def _extract_timeseries_dict(tsvol, roivol, maskvol=None, roi_values=None, zeroe=True):", "body": "_check_for_partition(tsvol, roivol, maskvol)if roi_values is None:roi_values = get_unique_nonzeros(roivol)ts_dict = OrderedDict()for r in roi_values:ts = _partition_data(tsvol, roivol, r, maskvol, zeroe)if len(ts) == :ts = np.zeros(tsvol.shape[-])ts_dict[r] = tsreturn ts_dict", "docstring": "Partition the timeseries in tsvol according to the ROIs in roivol.\n If a mask is given, will use it to exclude any voxel outside of it.\n\n Parameters\n ----------\n tsvol: numpy.ndarray\n 4D timeseries volume or a 3D volume to be partitioned\n\n roivol: numpy.ndarray\n 3D ROIs volume\n\n maskvol: numpy.ndarray\n 3D mask volume\n\n zeroe: bool\n If true will remove the null timeseries voxels.\n\n roi_values: list of ROI values (int?)\n List of the values of the ROIs to indicate the\n order and which ROIs will be processed.\n\n Returns\n -------\n ts_dict: OrderedDict\n A dict with the timeseries as items and keys as the ROIs voxel values.", "id": "f4088:m12"} {"signature": "def union_mask(filelist):", "body": "firstimg = check_img(filelist[])mask = np.zeros_like(firstimg.get_data())try:for volf in filelist:roiimg = check_img(volf)check_img_compatibility(firstimg, roiimg)mask += get_img_data(roiimg)except Exception as exc:raise ValueError(''.format(repr_imgs(firstimg), repr_imgs(volf))) from excelse:return as_ndarray(mask > , dtype=bool)", "docstring": "Creates a binarised mask with the union of the files in filelist.\n\nParameters\n----------\nfilelist: list of img-like object or boyle.nifti.NeuroImage or str\n List of paths to the volume files containing the ROIs.\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\nReturns\n-------\nndarray of bools\n Mask volume\n\nRaises\n------\nValueError", "id": "f4089:m3"} {"signature": "def _apply_mask_to_4d_data(vol_data, mask_img):", "body": "mask_data = load_mask_data(mask_img)return vol_data[mask_data], mask_data", "docstring": "Parameters\n----------\nvol_data:\nmask_img:\n\nReturns\n-------\nmasked_data, mask_indices\n\nmasked_data: numpy.ndarray\n 2D array of series with shape (image number, voxel number)\n\nNote\n----\nvol_data and mask_file must have the same shape.", "id": "f4089:m6"} {"signature": "def binarise(image, threshold=):", "body": "img = check_img(image)return img.get_data() > threshold", "docstring": "Binarise image with the given threshold\n\n Parameters\n ----------\n image: img-like object or boyle.nifti.NeuroImage or str\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\n threshold: float\n\n Returns\n -------\n binarised_img: numpy.ndarray\n Mask volume", "id": "f4089:m2"} {"signature": "def load_mask(image, allow_empty=True):", "body": "img = check_img(image, make_it_3d=True)values = np.unique(img.get_data())if len(values) == :if values[] == and not allow_empty:raise ValueError('')elif len(values) == :if not in values:raise ValueError(''''.format(values))elif len(values) != :raise ValueError(''''.format(values))return nib.Nifti1Image(as_ndarray(get_img_data(img), dtype=bool), img.get_affine(), img.get_header())", "docstring": "Load a Nifti mask volume.\n\n Parameters\n ----------\n image: img-like object or boyle.nifti.NeuroImage or str\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\n allow_empty: boolean, optional\n Allow loading an empty mask (full of 0 values)\n\n Returns\n -------\n nibabel.Nifti1Image with boolean data.", "id": "f4089:m0"} {"signature": "def vector_to_volume(arr, mask, order=''):", "body": "if mask.dtype != np.bool:raise ValueError(\"\")if arr.ndim != :raise ValueError(\"\")if arr.ndim == and any(v == for v in arr.shape):log.debug(''.format(arr.shape))arr = arr.flatten()volume = np.zeros(mask.shape[:], dtype=arr.dtype, order=order)volume[mask] = arrreturn volume", "docstring": "Transform a given vector to a volume. This is a reshape function for\n 3D flattened and maybe masked vectors.\n\n Parameters\n ----------\n arr: np.array\n 1-Dimensional array\n\n mask: numpy.ndarray\n Mask image. Must have 3 dimensions, bool dtype.\n\n Returns\n -------\n np.ndarray", "id": "f4089:m7"} {"signature": "def matrix_to_4dvolume(arr, mask, order=''):", "body": "if mask.dtype != np.bool:raise ValueError(\"\")if arr.ndim != :raise ValueError(\"\")if mask.sum() != arr.shape[]:raise ValueError(''.format(mask.sum(), arr.shape))data = np.zeros(mask.shape + (arr.shape[],), dtype=arr.dtype,order=order)data[mask, :] = arrreturn data", "docstring": "Transform a given vector to a volume. This is a reshape function for\n 4D flattened masked matrices where the second dimension of the matrix\n corresponds to the original 4th dimension.\n\n Parameters\n ----------\n arr: numpy.array\n 2D numpy.array\n\n mask: numpy.ndarray\n Mask image. Must have 3 dimensions, bool dtype.\n\n dtype: return type\n If None, will get the type from vector\n\n Returns\n -------\n data: numpy.ndarray\n Unmasked data.\n Shape: (mask.shape[0], mask.shape[1], mask.shape[2], X.shape[1])", "id": "f4089:m8"} {"signature": "def smooth_volume(image, smoothmm):", "body": "return smooth_imgs(image, smoothmm)", "docstring": "See smooth_img.", "id": "f4090:m2"} {"signature": "def sigma2fwhm(sigma):", "body": "sigma = np.asarray(sigma)return np.sqrt( * np.log()) * sigma", "docstring": "Convert a sigma in a Gaussian kernel to a FWHM value.\n\n Parameters\n ----------\n sigma: float or numpy.array\n sigma value or values\n\n Returns\n -------\n fwhm: float or numpy.array\n fwhm values corresponding to `sigma` values", "id": "f4090:m1"} {"signature": "def _smooth_data_array(arr, affine, fwhm, copy=True):", "body": "if arr.dtype.kind == '':if arr.dtype == np.int64:arr = arr.astype(np.float64)else:arr = arr.astype(np.float32)if copy:arr = arr.copy()arr[np.logical_not(np.isfinite(arr))] = try:affine = affine[:, :]fwhm_sigma_ratio = np.sqrt( * np.log())vox_size = np.sqrt(np.sum(affine ** , axis=))sigma = fwhm / (fwhm_sigma_ratio * vox_size)for n, s in enumerate(sigma):ndimage.gaussian_filter1d(arr, s, output=arr, axis=n)except:raise ValueError('')else:return arr", "docstring": "Smooth images with a a Gaussian filter.\n\n Apply a Gaussian filter along the three first dimensions of arr.\n\n Parameters\n ----------\n arr: numpy.ndarray\n 3D or 4D array, with image number as last dimension.\n\n affine: numpy.ndarray\n Image affine transformation matrix for image.\n\n fwhm: scalar, numpy.ndarray\n Smoothing kernel size, as Full-Width at Half Maximum (FWHM) in millimeters.\n If a scalar is given, kernel width is identical on all three directions.\n A numpy.ndarray must have 3 elements, giving the FWHM along each axis.\n\n copy: bool\n if True, will make a copy of the input array. Otherwise will directly smooth the input array.\n\n Returns\n -------\n smooth_arr: numpy.ndarray", "id": "f4090:m3"} {"signature": "def fwhm2sigma(fwhm):", "body": "fwhm = np.asarray(fwhm)return fwhm / np.sqrt( * np.log())", "docstring": "Convert a FWHM value to sigma in a Gaussian kernel.\n\n Parameters\n ----------\n fwhm: float or numpy.array\n fwhm value or values\n\n Returns\n -------\n fwhm: float or numpy.array\n sigma values", "id": "f4090:m0"} {"signature": "def smooth_img(imgs, fwhm, **kwargs):", "body": "if hasattr(imgs, \"\")and not isinstance(imgs, string_types):single_img = Falseelse:single_img = Trueimgs = [imgs]ret = []for img in imgs:img = check_niimg(img)affine = img.get_affine()filtered = _smooth_array(img.get_data(), affine, fwhm=fwhm,ensure_finite=True, copy=True, **kwargs)ret.append(new_img_like(img, filtered, affine, copy_header=True))if single_img:return ret[]else:return ret", "docstring": "Smooth images by applying a Gaussian filter.\n Apply a Gaussian filter along the three first dimensions of arr.\n In all cases, non-finite values in input image are replaced by zeros.\n\n This is copied and slightly modified from nilearn:\n https://github.com/nilearn/nilearn/blob/master/nilearn/image/image.py\n Added the **kwargs argument.\n\n Parameters\n ==========\n imgs: Niimg-like object or iterable of Niimg-like objects\n See http://nilearn.github.io/manipulating_images/manipulating_images.html#niimg.\n Image(s) to smooth.\n fwhm: scalar, numpy.ndarray, 'fast' or None\n Smoothing strength, as a Full-Width at Half Maximum, in millimeters.\n If a scalar is given, width is identical on all three directions.\n A numpy.ndarray must have 3 elements, giving the FWHM along each axis.\n If fwhm == 'fast', a fast smoothing will be performed with\n a filter [0.2, 1, 0.2] in each direction and a normalisation\n to preserve the scale.\n If fwhm is None, no filtering is performed (useful when just removal\n of non-finite values is needed)\n Returns\n =======\n filtered_img: nibabel.Nifti1Image or list of.\n Input image, filtered. If imgs is an iterable, then filtered_img is a\n list.", "id": "f4090:m6"} {"signature": "def are_compatible_imgs(one_img, another_img):", "body": "try:check_img_compatibility(one_img, another_img)except :return Falseelse:return True", "docstring": "Return true if one_img and another_img have the same shape.\n False otherwise.\n If both are nibabel.Nifti1Image will also check for affine matrices.\n\n Parameters\n ----------\n one_img: nibabel.Nifti1Image or np.ndarray\n\n another_img: nibabel.Nifti1Image or np.ndarray\n\n Returns\n -------\n bool", "id": "f4091:m4"} {"signature": "def have_same_affine(one_img, another_img, only_check_3d=False):", "body": "img1 = check_img(one_img)img2 = check_img(another_img)ndim1 = len(img1.shape)ndim2 = len(img2.shape)if ndim1 < :raise ValueError(''.format(repr_imgs(img1), ndim1))if ndim2 < :raise ValueError(''.format(repr_imgs(img2), ndim1))affine1 = img1.get_affine()affine2 = img2.get_affine()if only_check_3d:affine1 = affine1[:, :]affine2 = affine2[:, :]try:return np.allclose(affine1, affine2)except ValueError:return Falseexcept:raise", "docstring": "Return True if the affine matrix of one_img is close to the affine matrix of another_img.\n False otherwise.\n\n Parameters\n ----------\n one_img: nibabel.Nifti1Image\n\n another_img: nibabel.Nifti1Image\n\n only_check_3d: bool\n If True will extract only the 3D part of the affine matrices when they have more dimensions.\n\n Returns\n -------\n bool\n\n Raises\n ------\n ValueError", "id": "f4091:m6"} {"signature": "def get_shape(img):", "body": "if hasattr(img, ''):shape = img.shapeelse:shape = img.get_data().shapereturn shape", "docstring": "Return the shape of img.\n\n Paramerers\n -----------\n img:\n\n Returns\n -------\n shape: tuple", "id": "f4091:m2"} {"signature": "def have_same_geometry(fname1, fname2):", "body": "img1shape = nib.load(fname1).get_shape()img2shape = nib.load(fname2).get_shape()return have_same_shape(img1shape, img2shape)", "docstring": "@param fname1: string\nFile path of an image\n\n@param fname2: string\nFile path of an image\n\n@return: bool\nTrue if both have the same geometry", "id": "f4091:m12"} {"signature": "def have_same_shape(array1, array2, nd_to_check=None):", "body": "shape1 = array1.shapeshape2 = array2.shapeif nd_to_check is not None:if len(shape1) < nd_to_check:msg = ''.format(shape1)raise ValueError(msg)elif len(shape2) < nd_to_check:msg = ''.format(shape2)raise ValueError(msg)shape1 = shape1[:nd_to_check]shape2 = shape2[:nd_to_check]return shape1 == shape2", "docstring": "Returns true if array1 and array2 have the same shapes, false\notherwise.\n\nParameters\n----------\narray1: numpy.ndarray\n\narray2: numpy.ndarray\n\nnd_to_check: int\n Number of the dimensions to check, i.e., if == 3 then will check only the 3 first numbers of array.shape.\nReturns\n-------\nbool", "id": "f4091:m11"} {"signature": "def xfm_atlas_to_functional(atlas_filepath, anatbrain_filepath, meanfunc_filepath,atlas2anat_nonlin_xfm_filepath, is_atlas2anat_inverted,anat2func_lin_xfm_filepath,atlasinanat_out_filepath, atlasinfunc_out_filepath,interp='', rewrite=True, parallel=False):", "body": "if is_atlas2anat_inverted:anat_to_mni_nl_inv = atlas2anat_nonlin_xfm_filepathelse:output_dir = op.abspath (op.dirname(atlasinanat_out_filepath))ext = get_extension(atlas2anat_nonlin_xfm_filepath)anat_to_mni_nl_inv = op.join(output_dir, remove_ext(op.basename(atlas2anat_nonlin_xfm_filepath)) + '' + ext)invwarp_cmd = op.join('', '', '')applywarp_cmd = op.join('', '', '')fslsub_cmd = op.join('', '', '')if parallel:invwarp_cmd = fslsub_cmd + '' + invwarp_cmdapplywarp_cmd = fslsub_cmd + '' + applywarp_cmdif rewrite or (not is_atlas2anat_inverted and not op.exists(anat_to_mni_nl_inv)):log.debug(''.format(anat_to_mni_nl_inv))cmd = invwarp_cmd + ''cmd += ''.format(atlas2anat_nonlin_xfm_filepath)cmd += ''.format(anat_to_mni_nl_inv)cmd += ''.format(anatbrain_filepath)log.debug(''.format(cmd))check_call(cmd)if rewrite or not op.exists(atlasinanat_out_filepath):log.debug(''.format(atlasinanat_out_filepath))cmd = applywarp_cmd + ''cmd += ''.format(atlas_filepath)cmd += ''.format(anatbrain_filepath)cmd += ''.format(anat_to_mni_nl_inv)cmd += ''.format(interp)cmd += ''.format(atlasinanat_out_filepath)log.debug(''.format(cmd))check_call(cmd)if rewrite or not op.exists(atlasinfunc_out_filepath):log.debug(''.format(atlasinfunc_out_filepath))cmd = applywarp_cmd + ''cmd += ''.format(atlasinanat_out_filepath)cmd += ''.format(meanfunc_filepath)cmd += ''.format(anat2func_lin_xfm_filepath)cmd += ''.format(interp)cmd += ''.format(atlasinfunc_out_filepath)log.debug(''.format(cmd))check_call(cmd)", "docstring": "Call FSL tools to apply transformations to a given atlas to a functional image.\n Given the transformation matrices.\n\n Parameters\n ----------\n atlas_filepath: str\n Path to the 3D atlas volume file.\n\n anatbrain_filepath: str\n Path to the anatomical brain volume file (skull-stripped and registered to the same space as the atlas,\n e.g., MNI).\n\n meanfunc_filepath: str\n Path to the average functional image to be used as reference in the last applywarp step.\n\n atlas2anat_nonlin_xfm_filepath: str\n Path to the atlas to anatomical brain linear transformation .mat file.\n If you have the inverse transformation, i.e., anatomical brain to atlas, set is_atlas2anat_inverted to True.\n\n is_atlas2anat_inverted: bool\n If False will have to calculate the inverse atlas2anat transformation to apply the transformations.\n This step will be performed with FSL invwarp.\n\n anat2func_lin_xfm_filepath: str\n Path to the anatomical to functional .mat linear transformation file.\n\n atlasinanat_out_filepath: str\n Path to output file which will contain the 3D atlas in the subject anatomical space.\n\n atlasinfunc_out_filepath: str\n Path to output file which will contain the 3D atlas in the subject functional space.\n\n verbose: bool\n If verbose will show DEBUG log info.\n\n rewrite: bool\n If True will re-run all the commands overwriting any existing file. Otherwise will check if\n each file exists and if it does won't run the command.\n\n parallel: bool\n If True will launch the commands using ${FSLDIR}/fsl_sub to use the cluster infrastructure you have setup\n with FSL (SGE or HTCondor).", "id": "f4092:m0"} {"signature": "@nifti_outdef div_img(img1, div2):", "body": "if is_img(div2):return img1.get_data()/div2.get_data()elif isinstance(div2, (float, int)):return img1.get_data()/div2else:raise NotImplementedError(''''.format(type(img1),img1,type(div2),div2))", "docstring": "Pixelwise division or divide by a number", "id": "f4093:m8"} {"signature": "@nifti_outdef positive_img(img):", "body": "bool_img = read_img(img).get_data() > return bool_img.astype(int)", "docstring": "Return an image with the positive voxels of the data of `img`.", "id": "f4093:m4"} {"signature": "def filter_icc(icc, mask=None, thr=, zscore=True, mode=\"\"):", "body": "if zscore:icc_filt = thr_img(icc_img_to_zscore(icc), thr=thr, mode=mode)else:icc_filt = thr_img(icc, thr=thr, mode=mode)if mask is not None:icc_filt = apply_mask(icc_filt, mask)return icc_filt", "docstring": "Threshold then mask an IC correlation map.\n Parameters\n ----------\n icc: img-like\n The 'raw' ICC map.\n\n mask: img-like\n If not None. Will apply this masks in the end of the process.\n\n thr: float\n The threshold value.\n\n zscore: bool\n If True will calculate the z-score of the ICC before thresholding.\n\n mode: str\n Choices: '+' for positive threshold,\n '+-' for positive and negative threshold and\n '-' for negative threshold.\n\n Returns\n -------\n icc_filt: nibabel.NiftiImage\n Thresholded and masked ICC.", "id": "f4093:m13"} {"signature": "@nifti_outdef thr_img(img, thr=, mode=''):", "body": "vol = read_img(img).get_data()if mode == '':mask = vol > threlif mode == '' or mode == '':mask = np.abs(vol) > threlif mode == '':mask = vol < -threlse:raise ValueError(\"\"\"\".format(mode))return vol * mask", "docstring": "Use the given magic function name `func` to threshold with value `thr`\n the data of `img` and return a new nibabel.Nifti1Image.\n Parameters\n ----------\n img: img-like\n\n thr: float or int\n The threshold value.\n\n mode: str\n Choices: '+' for positive threshold,\n '+-' for positive and negative threshold and\n '-' for negative threshold.\n Returns\n -------\n thr_img: nibabel.Nifti1Image\n Thresholded image", "id": "f4093:m2"} {"signature": "@nifti_outdef abs_img(img):", "body": "bool_img = np.abs(read_img(img).get_data())return bool_img.astype(int)", "docstring": "Return an image with the binarised version of the data of `img`.", "id": "f4093:m10"} {"signature": "@nifti_outdef negative_img(img):", "body": "bool_img = read_img(img).get_data() < return bool_img.astype(int)", "docstring": "Return an image with the negative voxels of the data of `img`.", "id": "f4093:m5"} {"signature": "def get_nii_info(img_file):", "body": "warnings.warn(\"\",DeprecationWarning)return get_img_info(img_file)", "docstring": "See get_img_info", "id": "f4094:m1"} {"signature": "def get_nii_data(nii_file):", "body": "warnings.warn(\"\",DeprecationWarning)return get_img_data(nii_file)", "docstring": "See get_img_data", "id": "f4094:m2"} {"signature": "def new_img_like(ref_niimg, data, affine=None, copy_header=False):", "body": "if not (hasattr(ref_niimg, '')and hasattr(ref_niimg,'')):if isinstance(ref_niimg, _basestring):ref_niimg = nib.load(ref_niimg)elif operator.isSequenceType(ref_niimg):ref_niimg = nib.load(ref_niimg[])else:raise TypeError(('''') % ref_niimg )if affine is None:affine = ref_niimg.get_affine()if data.dtype == bool:default_dtype = np.int8if (LooseVersion(nib.__version__) >= LooseVersion('') andisinstance(ref_niimg, nib.freesurfer.mghformat.MGHImage)):default_dtype = np.uint8data = as_ndarray(data, dtype=default_dtype)header = Noneif copy_header:header = copy.copy(ref_niimg.get_header())header[''] = header[''] = header[''] = header[''] = np.max(data) if data.size > else header[''] = np.min(data) if data.size > else return ref_niimg.__class__(data, affine, header=header)", "docstring": "Create a new image of the same class as the reference image\n\n Parameters\n ----------\n ref_niimg: image\n Reference image. The new image will be of the same type.\n\n data: numpy array\n Data to be stored in the image\n\n affine: 4x4 numpy array, optional\n Transformation matrix\n\n copy_header: boolean, optional\n Indicated if the header of the reference image should be used to\n create the new image\n\n Returns\n -------\n new_img: image\n A loaded image with the same type (and header) as the reference image.", "id": "f4094:m9"} {"signature": "def _make_it_3d(img):", "body": "shape = img.shapeif len(shape) == :return imgelif len(shape) == and shape[] == :return img[:, :, :, ]else:raise TypeError(''.format(shape))", "docstring": "Enforce that img is a 3D img-like object, if it is not, raise a TypeError.\n i.e., remove dimensions of size 1.\n\n Parameters\n ----------\n img: numpy.ndarray\n Image data array\n\n Returns\n -------\n 3D numpy ndarray object", "id": "f4096:m1"} {"signature": "def write_mhd_file(filename, data, shape=None, meta_dict=None):", "body": "ext = get_extension(filename)fname = op.basename(filename)if ext != '' or ext != '':mhd_filename = fname + ''raw_filename = fname + ''elif ext == '':mhd_filename = fnameraw_filename = remove_ext(fname) + ''elif ext == '':mhd_filename = remove_ext(fname) + ''raw_filename = fnameelse:raise ValueError(''''.format(ext, filename))if meta_dict is None:meta_dict = {}if shape is None:shape = data.shapemeta_dict[''] = meta_dict.get('', '')meta_dict[''] = meta_dict.get('', '' )meta_dict[''] = meta_dict.get('', '')meta_dict[''] = meta_dict.get('', NUMPY_TO_MHD_TYPE[data.dtype.type])meta_dict[''] = meta_dict.get('', str(len(shape)))meta_dict[''] = meta_dict.get('', ''.join([str(i) for i in shape]))meta_dict[''] = meta_dict.get('', raw_filename)mhd_filename = op.join(op.dirname(filename), mhd_filename)raw_filename = op.join(op.dirname(filename), raw_filename)write_meta_header(mhd_filename, meta_dict)dump_raw_data(raw_filename, data)return mhd_filename, raw_filename", "docstring": "Write the `data` and `meta_dict` in two files with names\n that use `filename` as a prefix.\n\n Parameters\n ----------\n filename: str\n Path to the output file.\n This is going to be used as a preffix.\n Two files will be created, one with a '.mhd' extension\n and another with '.raw'. If `filename` has any of these already\n they will be taken into account to build the filenames.\n\n data: numpy.ndarray\n n-dimensional image data array.\n\n shape: tuple\n Tuple describing the shape of `data`\n Default: data.shape\n\n meta_dict: dict\n Dictionary with the fields of the metadata .mhd file\n Default: {}\n\n Returns\n -------\n mhd_filename: str\n Path to the .mhd file\n\n raw_filename: str\n Path to the .raw file", "id": "f4098:m2"} {"signature": "def write_meta_header(filename, meta_dict):", "body": "header = ''for tag in MHD_TAGS:if tag in meta_dict.keys():header += ''.format(tag, meta_dict[tag])with open(filename, '') as f:f.write(header)", "docstring": "Write the content of the `meta_dict` into `filename`.\n\n Parameters\n ----------\n filename: str\n Path to the output file\n\n meta_dict: dict\n Dictionary with the fields of the metadata .mhd file", "id": "f4098:m0"} {"signature": "def copy_mhd_and_raw(src, dst):", "body": "if not op.exists(src):raise IOError(''.format(src))ext = get_extension(src)if ext != '':msg = ''.format(src)raise ValueError(msg)meta_src = _read_meta_header(src)src_raw = meta_src['']if not op.isabs(src_raw):src_raw = op.join(op.dirname(src), src_raw)if op.isdir(dst):shutil.copyfile(src, dst)shutil.copyfile(src_raw, dst)return dstdst_raw = op.join(op.dirname(dst), remove_ext(op.basename(dst))) + ''if get_extension(dst) != '':dst += ''log.debug(''.format(src, dst))log.debug(''.format(src_raw, dst_raw))shutil.copyfile(src, dst)shutil.copyfile(src_raw, dst_raw)if op.basename(dst) != op.basename(src):log.debug(''.format(dst, src_raw,op.basename(dst_raw)))meta_dst = _read_meta_header(dst)meta_dst[''] = op.basename(dst_raw)write_meta_header(dst, meta_dst)return dst", "docstring": "Copy .mhd and .raw files to dst.\n\n If dst is a folder, won't change the file, but if dst is another filepath,\n will modify the ElementDataFile field in the .mhd to point to the\n new renamed .raw file.\n\n Parameters\n ----------\n src: str\n Path to the .mhd file to be copied\n\n dst: str\n Path to the destination of the .mhd and .raw files.\n If a new file name is given, the extension will be ignored.\n\n Returns\n -------\n dst: str", "id": "f4098:m3"} {"signature": "def dump_raw_data(filename, data):", "body": "if data.ndim == :data = data.reshape([data.shape[], data.shape[]*data.shape[]])a = array.array('')for o in data:a.fromlist(list(o.flatten()))with open(filename, '') as rawf:a.tofile(rawf)", "docstring": "Write the data into a raw format file. Big endian is always used.\n\n Parameters\n ----------\n filename: str\n Path to the output file\n\n data: numpy.ndarray\n n-dimensional image data array.", "id": "f4098:m1"} {"signature": "def load_raw_data_with_mhd(filename):", "body": "meta_dict = _read_meta_header(filename)dim = int(meta_dict[''])assert (meta_dict[''] in MHD_TO_NUMPY_TYPE)arr = [int(i) for i in meta_dict[''].split()]volume = reduce(lambda x, y: x*y, arr[:dim-], )pwd = op.dirname(filename)raw_file = meta_dict['']data_file = op.join(pwd, raw_file)ndtype = MHD_TO_NUMPY_TYPE[meta_dict['']]arrtype = NDARRAY_TO_ARRAY_TYPE[ndtype]with open(data_file, '') as fid:binvalues = array.array(arrtype)binvalues.fromfile(fid, volume*arr[dim-])data = np.array (binvalues, ndtype)data = np.reshape(data, (arr[dim-], volume))if dim >= :dimensions = [int(i) for i in meta_dict[''].split()]data = data.reshape(dimensions)return data, meta_dict", "docstring": "Return a dictionary of meta data from meta header file.\n\n Parameters\n ----------\n filename: str\n Path to a .mhd file\n\n Returns\n -------\n data: numpy.ndarray\n n-dimensional image data array.\n\n meta_dict: dict\n A dictionary with the .mhd header content.", "id": "f4099:m1"} {"signature": "def tabulate(self, tablefmt=''):", "body": "return tabulate(self, tablefmt=tablefmt)", "docstring": ":param tablefmt: string\n Supported table formats are:\n\"plain\"\n\"simple\"\n\"grid\"\n\"pipe\"\n\"orgtbl\"\n\"rst\"\n\"mediawiki\"\n\"latex\"\n\n:return: tabulate\nTabulated content", "id": "f4100:c0:m1"} {"signature": "def tabulate(self, tablefmt=''):", "body": "return tabulate(list(self.items()), tablefmt=tablefmt)", "docstring": ":param tablefmt: string\n Supported table formats are:\n\"plain\"\n\"simple\"\n\"grid\"\n\"pipe\"\n\"orgtbl\"\n\"rst\"\n\"mediawiki\"\n\"latex\"\n\n:return: tabulate\nTabulated content", "id": "f4100:c1:m1"} {"signature": "def apply_smoothing(self, smooth_fwhm):", "body": "if smooth_fwhm <= :returnold_smooth_fwhm = self._smooth_fwhmself._smooth_fwhm = smooth_fwhmtry:data = self.get_data(smoothed=True, masked=True, safe_copy=True)except ValueError as ve:self._smooth_fwhm = old_smooth_fwhmraiseelse:self._smooth_fwhm = smooth_fwhmreturn data", "docstring": "Set self._smooth_fwhm and then smooths the data.\n See boyle.nifti.smooth.smooth_imgs.\n\n Returns\n -------\n the smoothed data deepcopied.", "id": "f4101:c1:m16"} {"signature": "def apply_mask(self, mask_img):", "body": "self.set_mask(mask_img)return self.get_data(masked=True, smoothed=True, safe_copy=True)", "docstring": "First set_mask and the get_masked_data.\n\n Parameters\n ----------\n mask_img: nifti-like image, NeuroImage or str\n 3D mask array: True where a voxel should be used.\n Can either be:\n - a file path to a Nifti image\n - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image.\n If niimg is a string, consider it as a path to Nifti image and\n call nibabel.load on it. If it is an object, check if get_data()\n and get_affine() methods are present, raise TypeError otherwise.\n\n Returns\n -------\n The masked data deepcopied", "id": "f4101:c1:m13"} {"signature": "def to_file(self, outpath):", "body": "if not self.has_mask() and not self.is_smoothed():save_niigz(outpath, self.img)else:save_niigz(outpath, self.get_data(masked=True, smoothed=True),self.get_header(), self.get_affine())", "docstring": "Save this object instance in outpath.\n\n Parameters\n ----------\n outpath: str\n Output file path", "id": "f4101:c1:m19"} {"signature": "def open_volume_file(filepath):", "body": "if not op.exists(filepath):raise IOError(''.format(filepath))def open_nifti_file(filepath):return NiftiImage(filepath)def open_mhd_file(filepath):return MedicalImage(filepath)vol_data, hdr_data = load_raw_data_with_mhd(filepath)return vol_data, hdr_datadef open_mha_file(filepath):raise NotImplementedError('')def _load_file(filepath, loader):return loader(filepath)filext_loader = {'': open_nifti_file,'': open_mhd_file,'': open_mha_file,}ext = get_extension(filepath)loader = Nonefor e in filext_loader:if ext in e:loader = filext_loader[e]if loader is None:raise ValueError(''.format(filepath))return _load_file(filepath, loader)", "docstring": "Open a volumetric file using the tools following the file extension.\n\n Parameters\n ----------\n filepath: str\n Path to a volume file\n\n Returns\n -------\n volume_data: np.ndarray\n Volume data\n\n pixdim: 1xN np.ndarray\n Vector with the description of the voxels physical size (usually in mm) for each volume dimension.\n\n Raises\n ------\n IOError\n In case the file is not found.", "id": "f4102:m0"} {"signature": "def compose_err_msg(msg, **kwargs):", "body": "updated_msg = msgfor k, v in sorted(kwargs.items()):if isinstance(v, _basestring): updated_msg += \"\" + k + \"\" + vreturn updated_msg", "docstring": "Append key-value pairs to msg, for display.\n\n Parameters\n ----------\n msg: string\n arbitrary message\n kwargs: dict\n arbitrary dictionary\n\n Returns\n -------\n updated_msg: string\n msg, with \"key: value\" appended. Only string values are appended.\n\n Example\n -------\n >>> compose_err_msg('Error message with arguments...', arg_num=123, \\\n arg_str='filename.nii', arg_bool=True)\n 'Error message with arguments...\\\\narg_str: filename.nii'\n >>>", "id": "f4103:m0"} {"signature": "def insert_unique(self, table_name, data, unique_fields=None, *, raise_if_found=False):", "body": "return insert_unique(table=self.table(table_name),data=_to_string(data),unique_fields=unique_fields,raise_if_found=raise_if_found)", "docstring": "Insert `data` into `table` ensuring that data has unique values\n in `table` for the fields listed in `unique_fields`.\n\n If `raise_if_found` is True, will raise an NotUniqueItemError if\n another item with the same `unique_fields` values are found\n previously in `table`.\n If False, will return the `eid` from the item found.\n\n Parameters\n ----------\n table_name: str\n\n data: dict\n\n unique_fields: list of str\n Name of fields (keys) from `data` which are going to be used to build\n a sample to look for exactly the same values in the database.\n If None, will use every key in `data`.\n\n raise_if_found: bool\n\n Returns\n -------\n eid: int\n Id of the object inserted or the one found with same `unique_fields`.\n\n Raises\n ------\n MoreThanOneItemError\n Raise even with `raise_with_found` == False if it finds more than one item\n with the same values as the sample.\n\n NotUniqueItemError\n If `raise_if_found` is True and an item with the same `unique_fields`\n values from `data` is found in `table`.", "id": "f4104:c2:m2"} {"signature": "def timestamp_with_tzinfo(dt):", "body": "utc = tzutc()if dt.tzinfo:dt = dt.astimezone(utc).replace(tzinfo=None)return dt.isoformat() + ''", "docstring": "Serialize a date/time value into an ISO8601 text representation\nadjusted (if needed) to UTC timezone.\n\nFor instance:\n>>> serialize_date(datetime(2012, 4, 10, 22, 38, 20, 604391))\n'2012-04-10T22:38:20.604391Z'", "id": "f4104:m0"} {"signature": "def search_unique(table, sample, unique_fields=None):", "body": "if unique_fields is None:unique_fields = list(sample.keys())query = _query_data(sample, field_names=unique_fields, operators='')items = table.search(query)if len(items) == :return items[]if len(items) == :return Noneraise MoreThanOneItemError(''''.format(len(items)))", "docstring": "Search for items in `table` that have the same field sub-set values as in `sample`.\n Expecting it to be unique, otherwise will raise an exception.\n\n Parameters\n ----------\n table: tinydb.table\n sample: dict\n Sample data\n\n Returns\n -------\n search_result: tinydb.database.Element\n Unique item result of the search.\n\n Raises\n ------\n KeyError:\n If the search returns for more than one entry.", "id": "f4104:m5"} {"signature": "def insert_unique(table, data, unique_fields=None, *, raise_if_found=False):", "body": "item = find_unique(table, data, unique_fields)if item is not None:if raise_if_found:raise NotUniqueItemError(''''.format(unique_fields,data,table.get(eid=item),item))else:return itemreturn table.insert(data)", "docstring": "Insert `data` into `table` ensuring that data has unique values\n in `table` for the fields listed in `unique_fields`.\n\n If `raise_if_found` is True, will raise an NotUniqueItemError if\n another item with the same `unique_fields` values are found\n previously in `table`.\n If False, will return the `eid` from the item found.\n\n Parameters\n ----------\n table: tinydb.Table\n\n data: dict\n\n unique_fields: list of str\n Name of fields (keys) from `data` which are going to be used to build\n a sample to look for exactly the same values in the database.\n If None, will use every key in `data`.\n\n raise_if_found: bool\n\n Returns\n -------\n eid: int\n Id of the object inserted or the one found with same `unique_fields`.\n\n Raises\n ------\n MoreThanOneItemError\n Raise even with `raise_with_found` == False if it finds more than one item\n with the same values as the sample.\n\n NotUniqueItemError\n If `raise_if_found` is True and an item with the same `unique_fields`\n values from `data` is found in `table`.", "id": "f4104:m3"} {"signature": "def search_sample(self, table_name, sample):", "body": "return search_sample(table=self.table(table_name),sample=sample)", "docstring": "Search for items in `table` that have the same field sub-set values as in `sample`.\n\n Parameters\n ----------\n table_name: str\n\n sample: dict\n Sample data\n\n Returns\n -------\n search_result: list of dict\n List of the items found. The list is empty if no item is found.", "id": "f4104:c2:m4"} {"signature": "def _query_data(data, field_names=None, operators=''):", "body": "if field_names is None:field_names = list(data.keys())if isinstance(field_names, str):field_names = [field_names]sample = OrderedDict([(fn, data[fn]) for fn in field_names])return _query_sample(sample, operators=operators)", "docstring": "Create a tinyDB Query object that looks for items that confirms the correspondent operator\n from `operators` for each `field_names` field values from `data`.\n\n Parameters\n ----------\n data: dict\n The data sample\n\n field_names: str or list of str\n The name of the fields in `data` that will be used for the query.\n\n operators: str or list of str\n A list of comparison operations for each field value in `field_names`.\n If this is a str, will use the same operator for all `field_names`.\n If you want different operators for each field, remember to use an OrderedDict for `data`.\n Check TinyDB.Query class for possible choices.\n\n Returns\n -------\n query: tinydb.database.Query", "id": "f4104:m8"} {"signature": "def get_requirements(*args):", "body": "install_deps = []try:for fpath in args:install_deps.extend([str(d.req or d.url) for d in parse_requirements(fpath)])except:print(''.format(fpath))return [dep for dep in install_deps if dep != '']", "docstring": "Parse all requirements files given and return a list of the dependencies", "id": "f4105:m0"} {"signature": "@baker.command(default=True,shortopts={'': '','': '','': '','': ''})def copy(configfile='', destpath='', overwrite=False, sub_node=''):", "body": "log.info(''.format(os.path.basename(__file__),whoami(),locals()))assert(os.path.isfile(configfile))if os.path.exists(destpath):if os.listdir(destpath):raise FolderAlreadyExists(''''.format(destpath))else:log.info(''.format(destpath))path(destpath).makedirs_p()from boyle.files.file_tree_map import FileTreeMapfile_map = FileTreeMap()try:file_map.from_config_file(configfile)except Exception as e:raise FileTreeMapError(str(e))if sub_node:sub_map = file_map.get_node(sub_node)if not sub_map:raise FileTreeMapError(''''.format(sub_node))file_map._filetree = {}file_map._filetree[sub_node] = sub_maptry:file_map.copy_to(destpath, overwrite=overwrite)except Exception as e:raise FileTreeMapError(str(e))", "docstring": "Copies the files in the built file tree map\n to despath.\n\n :param configfile: string\n Path to the FileTreeMap config file\n\n :param destpath: string\n Path to the files destination\n\n :param overwrite: bool\n Overwrite files if they already exist.\n\n :param sub_node: string\n Tree map configuration sub path.\n Will copy only the contents within this sub-node", "id": "f4107:m0"} {"signature": "def print_compare_idsets_one_ref(self, idset1_name, idset2_name):", "body": "try:idset1 = self[idset1_name]idset2 = self[idset2_name]except KeyError as ke:log.error(''.format(idset1_name,idset2_name))import sys, pdbpdb.post_mortem(sys.exc_info()[])raiseassert(isinstance(idset1, idset_with_reference))assert(isinstance(idset2, idset))self._print_general_vs_table(idset1, idset2)self._print_foreign_repetition_table(idset1, idset2)", "docstring": "idset1_name: string\nkey of an idset_with_reference\n\nidset2_name: string\nkey of an idset", "id": "f4108:c2:m7"} {"signature": "@baker.command(name='',params={\"\": \"\",\"\": \"\",\"\": \"\"\"\"\"\",\"\": \"\"\"\"\"\"},shortopts={'': '', '': '', '': '', '': ''})def convert_sav(inputfile, outputfile=None, method='', otype=''):", "body": "assert(os.path.isfile(inputfile))assert(method=='' or method=='')if method == '':df = sav_to_pandas_rpy2(inputfile)elif method == '':df = sav_to_pandas_savreader(inputfile)otype_exts = {'': '', '': '', '': '','': '','': '','': '','': ''}if outputfile is None:outputfile = inputfile.replace(path(inputfile).ext, '')outputfile = add_extension_if_needed(outputfile, otype_exts[otype])if otype == '':df.to_csv(outputfile)elif otype == '':df.to_hdf(outputfile, os.path.basename(outputfile))elif otype == '':df.to_stata(outputfile)elif otype == '':df.to_json(outputfile)elif otype == '':df.to_pickle(outputfile)elif otype == '':df.to_excel(outputfile)elif otype == '':df.to_html(outputfile)else:df.to_csv(outputfile)", "docstring": "Transforms the input .sav SPSS file into other format.\n If you don't specify an outputfile, it will use the\n inputfile and change its extension to .csv", "id": "f4109:m0"} {"signature": "def setDefaultIREncoding(encoding):", "body": "try:b''.decode(encoding)except:raise ValueError('' %(str(encoding), ))global defaultIREncodingdefaultIREncoding = encoding", "docstring": "setDefaultIREncoding - Sets the default encoding used by IndexedRedis.\n This will be the default encoding used for field data. You can override this on a\n per-field basis by using an IRField (such as IRUnicodeField or IRRawField)\n\n@param encoding - An encoding (like utf-8)", "id": "f4140:m0"} {"signature": "def __new__(self, val=''):", "body": "return IrNullBaseType.__new__(self, '')", "docstring": "Don't let this be assigned a value.", "id": "f4144:c0:m0"} {"signature": "def getObj(self):", "body": "if self.obj is None:if not self.pk:return Noneself.obj = self.foreignModel.objects.get(self.pk)return self.obj", "docstring": "getObj - Fetch (if not fetched) and return the obj associated with this data.", "id": "f4146:c1:m3"} {"signature": "def __init__(self, pk=None, foreignModel=None, obj=None):", "body": "self.pk = pkself.obj = objif foreignModel is not None:if issubclass(foreignModel.__class__, weakref.ReferenceType):foreignModel = foreignModel()self._foreignModel = weakref.ref(foreignModel)else:self._foreignModel = None", "docstring": "__init__ - Create a ForeignLinkData object\n\n@param pk - The primary key of the foreign object\n@param obj - The resolved object, or None if not yet resolved", "id": "f4146:c1:m0"} {"signature": "def isMulti(self):", "body": "return False", "docstring": "isMulti - Returns True if this is a MultiLink object (expects lists), otherwise False (expects object)\n\n@return ", "id": "f4146:c4:m8"} {"signature": "def isFetched(self):", "body": "return not bool(self.obj is None)", "docstring": "isFetched - Check if the associated obj has been fetched or not.", "id": "f4146:c1:m7"} {"signature": "def getObjs(self):", "body": "return self.getObj()", "docstring": "getObjs - @see ForeignLinkData.getObjs", "id": "f4146:c2:m4"} {"signature": "def __init__(self, name='', foreignModel=None):", "body": "IRField.__init__(self, name, valueType=int, defaultValue=irNull)if foreignModel:if not isinstance(foreignModel, type):raise ValueError('')if not hasattr(foreignModel, ''):raise ValueError('')self._foreignModel = weakref.ref(foreignModel)", "docstring": "__init__ - Create an IRForeignLinkField. Only takes a name\n\n@param name - Field name\n\nThis field type does not support indexing.", "id": "f4146:c4:m0"} {"signature": "def __init__(self, name='', defaultValue=irNull):", "body": "self.valueType = Noneself.defaultValue = defaultValue", "docstring": "__init__ - Create an IRPickleField\n\n@param name - Field name\n\n@param defaultValue - The default value of this field\n\nBecause even with the same format, python2 and python3 can output different pickle strings for the same object,\n as well as different host configurations may lead to different output, this field type is not indexable.", "id": "f4147:c0:m0"} {"signature": "def __init__(self, name='', decimalPlaces=, defaultValue=irNull):", "body": "self.decimalPlaces = decimalPlacesif isinstance(defaultValue, int):defaultValue = float(defaultValue)elif isinstance(defaultValue, float):defaultValue = round(defaultValue, decimalPlaces)self.defaultValue = defaultValue", "docstring": "__init__ - Create this object.\n\n@param name - Field name (or blank if used in an IRFieldChain)\n\n@param decimalPlaces - The number of decimal places to use (precision). Values will be rounded to this many places, and always have\n this many digits after the decimal point.\n\n@param defaultValue - The default value for this field\n\nAn IRFixedPointField is indexable, and has no option to hash the index.", "id": "f4148:c0:m0"} {"signature": "def toIndex(self, value):", "body": "if self._isIrNull(value):ret = IR_NULL_STRelse:ret = self._toIndex(value)if self.isIndexHashed is False:return retreturn md5(tobytes(ret)).hexdigest()", "docstring": "toIndex - An optional method which will return the value prepped for index.\n\nBy default, \"toStorage\" will be called. If you provide \"hashIndex=True\" on the constructor,\nthe field will be md5summed for indexing purposes. This is useful for large strings, etc.", "id": "f4149:c0:m8"} {"signature": "def toStorage(self, value):", "body": "if value == irNull or None:return IR_NULL_STRreturn self._toStorage(value)", "docstring": "toStorage - Convert the value to a string representation for storage.\n\n The default implementation will work here for basic types.\n\n@param value - The value of the item to convert\n@return A string value suitable for storing.", "id": "f4149:c0:m1"} {"signature": "def _getReprProperties(self):", "body": "ret = []if getattr(self, '', None) is not None:ret.append('' %(self.valueType.__name__, ))if hasattr(self, ''):ret.append('' %(self.hashIndex, ))return ret", "docstring": "_getReprProperties - Get the properties of this field to display in repr().\n\n These should be in the form of $propertyName=$propertyRepr\n\n The default IRField implementation handles just the \"hashIndex\" property.\n\n defaultValue is part of \"__repr__\" impl. You should just extend this method\n with your object's properties instead of rewriting repr.", "id": "f4149:c0:m18"} {"signature": "def _toStorage(self, value):", "body": "return to_unicode(value)", "docstring": "_toStorage - Convert the value to a string for storage.\n\nThe default implementation works for most valueTypes within IRField, override this for extending types.\n\nYou don't need to handle null\n\n@param value - Value of item to convert\n\n@return - A string value suitable for storing", "id": "f4149:c0:m2"} {"signature": "@propertydef name(self):", "body": "return str(self)", "docstring": "name - Property, return this field's name\n\n@return - Field name", "id": "f4149:c0:m11"} {"signature": "def copy(self):", "body": "return self.__class__(name=self.name, valueType=self.valueType, defaultValue=self.defaultValue, hashIndex=self.hashIndex)", "docstring": "copy - Create a copy of this IRField.\n\n Each subclass should implement this, as you'll need to pass in the args to constructor.\n\n@return - Another IRField that has all the same values as this one.", "id": "f4149:c0:m20"} {"signature": "@propertydef isIndexHashed(self):", "body": "return bool(self.hashIndex)", "docstring": "isIndexHashed - Returns if the index value should be hashed\n\n@return - True if this field should be hashed before indexing / filtering", "id": "f4149:c0:m10"} {"signature": "def fromInput(self, value):", "body": "if value == irNull:return irNullreturn self._fromInput(value)", "docstring": "fromInput - Convert the value from input (like assigning this through constructor or as an item assignment on the object\n\n@param value - Value to convert\n\n@return - Converted value", "id": "f4149:c0:m6"} {"signature": "def _fromStorage(self, value):", "body": "return self.valueType(value)", "docstring": "_fromStorage - Convert the value from storage to the value type.\n\n This default impl works fine for most value types, should be implemented by extending types.\n\n @param value - Value to convert\n\n @return - Converted value", "id": "f4149:c0:m4"} {"signature": "def fromStorage(self, value):", "body": "if value in IR_NULL_STRINGS:return irNullreturn self._fromStorage(value)", "docstring": "fromStorage - Convert the value from storage to the value type.\n\n@param value - Value to convert\n\n@return - The converted value", "id": "f4149:c0:m3"} {"signature": "def getEncoding(self):", "body": "if not self.encoding:return getDefaultIREncoding()return self.encoding", "docstring": "getEncoding - Get the encoding codec associated with this field.\n\n If you provided None, this will return the defaultIREncoding\n\n@return - Encoding", "id": "f4152:c0:m1"} {"signature": "def __init__(self, name='', defaultValue=irNull, encoding=None):", "body": "self.valueType = Noneself.defaultValue = defaultValueself.encoding = encoding", "docstring": "__init__ - Create an IRBytesField object\n\n@param name - Field name\n\n@param defaultValue default irNull - Default value for this field\n\n@param encoding - If None, defaultIREncoding will be used when converting to bytes,\n otherwise you can provide an explicit encoding\n\nAn IRBytesField is indexable, and the index is forced to be hashed.", "id": "f4154:c0:m0"} {"signature": "def reload(self):", "body": "if len(self) == :return []ret = []for obj in self:res = Nonetry:res = obj.reload()except Exception as e:res = eret.append(res)return ret", "docstring": "reload - Reload all objects in this list. \n Updates in-place. To just fetch all these objects again, use \"refetch\"\n\n@return - List (same order as current objects) of either exception (KeyError) if operation failed,\n or a dict of fields changed -> (old, new)", "id": "f4155:c0:m5"} {"signature": "@staticmethoddef __validate_model(mdl):", "body": "if not hasattr(mdl, ''):raise ValueError('' %(str(mdl.__class__.__name__),))", "docstring": "__validate_model - Internal function to check that model is of correct type.\n\nUses a class variable that has been defined for IndexedRedisModel s for a long time, not the type itself, to prevent circular imports etc.\n\n@param mdl - type to validate", "id": "f4155:c0:m1"} {"signature": "def refetch(self):", "body": "if len(self) == :return IRQueryableList()mdl = self.getModel()pks = [item._id for item in self if item._id]return mdl.objects.getMultiple(pks)", "docstring": "refetch - Fetch a fresh copy of all items in this list.\n Returns a new list. To update in-place, use \"reload\".\n\n@return IRQueryableList - List of fetched items", "id": "f4155:c0:m6"} {"signature": "def delete(self):", "body": "if len(self) == :return mdl = self.getModel()return mdl.deleter.deleteMultiple(self)", "docstring": "delete - Delete all objects in this list.\n\n@return - Number of objects deleted", "id": "f4155:c0:m3"} {"signature": "def _add_id_to_keys(self, pk, conn=None):", "body": "if conn is None:conn = self._get_connection()conn.sadd(self._get_ids_key(), pk)", "docstring": "_add_id_to_keys - Adds primary key to table\ninternal", "id": "f4156:c2:m5"} {"signature": "def diff(firstObj, otherObj, includeMeta=False):", "body": "if not isIndexedRedisModel(firstObj): raise ValueError('' %( type(firstObj).__name__ , ) )if not isIndexedRedisModel(otherObj): raise ValueError('' %( type(otherObj).__name__ , ) )firstObj.validateModel()otherObj.validateModel()if getattr(firstObj, '') != getattr(otherObj, ''):raise ValueError('' %( firstObj.__class__, otherObj.__class__) )diffFields = {}for thisField in firstObj.FIELDS:thisFieldStr = str(thisField)firstVal = object.__getattribute__( firstObj, thisFieldStr )otherVal = object.__getattribute__( otherObj, thisFieldStr )if firstVal != otherVal:diffFields[ thisFieldStr ] = ( (firstVal, otherVal) )if includeMeta:firstPk = firstObj.getPk()otherPk = otherObj.getPk()if firstPk != otherPk:diffFields[''] = ( firstPk, otherPk )return diffFields", "docstring": "diff - Compare the field values on two IndexedRedisModels.\n\n@param firstObj - First object (or self)\n\n@param otherObj - Second object\n\n@param includeMeta - If meta information (like pk) should be in the diff results.\n\n\n@return - Dict of 'field' : ( value_firstObjForField, value_otherObjForField ).\n\n Keys are names of fields with different values.\n Value is a tuple of ( value_firstObjForField, value_otherObjForField )\n\nCan be called statically, like: IndexedRedisModel.diff ( obj1, obj2 )\n\n or in reference to an obj : obj1.diff(obj2)", "id": "f4156:c1:m7"} {"signature": "@classpropertydef saver(cls):", "body": "return IndexedRedisSave(cls)", "docstring": "saver - Get an IndexedRedisSave associated with this model", "id": "f4156:c1:m9"} {"signature": "def saveMultiple(self, objs):", "body": "return self.save(objs)", "docstring": "saveMultiple - Save a list of objects using a pipeline.\n\n@param objs < list > - List of objects to save", "id": "f4156:c4:m1"} {"signature": "def getDefaultRedisConnectionParams():", "body": "global _defaultRedisConnectionParamsreturn copy.copy(_defaultRedisConnectionParams)", "docstring": "getDefaultRedisConnectionParams - Gets A COPY OF the default Redis connection params.\n\n@see setDefaultRedisConnectionParams for more info\n\n@return - copy of default Redis connection parameters", "id": "f4156:m1"} {"signature": "def saveToExternal(self, redisCon):", "body": "if type(redisCon) == dict:conn = redis.Redis(**redisCon)elif hasattr(conn, '') and issubclass(conn.__class__, redis.Redis):conn = redisConelse:raise ValueError('')saver = self.saverforceID = saver._getNextID(conn) myCopy = self.copy(False)return saver.save(myCopy, usePipeline=True, forceID=forceID, conn=conn)", "docstring": "saveToExternal - Saves this object to a different Redis than that specified by REDIS_CONNECTION_PARAMS on this model.\n\n@param redisCon - Either a dict of connection params, a la REDIS_CONNECTION_PARAMS, or an existing Redis connection.\n If you are doing a lot of bulk copies, it is recommended that you create a Redis connection and pass it in rather than establish a new\n connection with each call.\n\n@note - You will generate a new primary key relative to the external Redis environment. If you need to reference a \"shared\" primary key, it is better\n to use an indexed field than the internal pk.", "id": "f4156:c1:m23"} {"signature": "def all(self, cascadeFetch=False):", "body": "matchedKeys = self.getPrimaryKeys()if matchedKeys:return self.getMultiple(matchedKeys, cascadeFetch=cascadeFetch)return IRQueryableList([], mdl=self.mdl)", "docstring": "all - Get the underlying objects which match the filter criteria.\n\nExample: objs = Model.objects.filter(field1='value', field2='value2').all()\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n@return - Objects of the Model instance associated with this query.", "id": "f4156:c3:m9"} {"signature": "def __setattr__(self, keyName, value):", "body": "oga = object.__getattribute__if keyName not in ('', ''):fields = oga(self, '')try:idx = fields.index(keyName)except:idx = -if idx != -:value = fields[idx].fromInput(value)object.__setattr__(self, keyName, value)", "docstring": "__setattr__ - Will be used to set an attribute on this object.\n\n If the attribute is a field (in self.FIELDS), it will be converted via the field type's #fromInput method.\n\n Otherwise, it will just set the attribute on this object.", "id": "f4156:c1:m1"} {"signature": "def reload(self, cascadeObjects=True):", "body": "_id = self._idif not _id:raise KeyError('')currentData = self.asDict(False, forStorage=False)newDataObj = self.objects.get(_id)if not newDataObj:raise KeyError('' %(_id,))newData = newDataObj.asDict(False, forStorage=False)if currentData == newData and not self.foreignFields:return []updatedFields = {}for thisField, newValue in newData.items():defaultValue = thisField.getDefaultValue()currentValue = currentData.get(thisField, defaultValue)fieldIsUpdated = Falseif currentValue != newValue:fieldIsUpdated = Trueelif cascadeObjects is True and issubclass(thisField.__class__, IRForeignLinkFieldBase):if currentValue.isFetched():oldObjs = currentValue.getObjs()newObjs = newValue.getObjs()if oldObjs != newObjs: fieldIsUpdated = Trueelse:for i in range(len(oldObjs)):if not oldObjs[i].hasSameValues(newObjs[i], cascadeObjects=True):fieldIsUpdated = Truebreakif fieldIsUpdated is True:updatedFields[thisField] = ( currentValue, newValue) setattr(self, thisField, newValue)self._origData[thisField] = newDataObj._origData[thisField]return updatedFields", "docstring": "reload - Reload this object from the database, overriding any local changes and merging in any updates.\n\n\n @param cascadeObjects Default True. If True, foreign-linked objects will be reloaded if their values have changed\n since last save/fetch. If False, only if the pk changed will the foreign linked objects be reloaded.\n\n @raises KeyError - if this object has not been saved (no primary key)\n\n @return - Dict with the keys that were updated. Key is field name that was updated,\n and value is tuple of (old value, new value). \n\n NOTE: Currently, this will cause a fetch of all Foreign Link objects, one level", "id": "f4156:c1:m24"} {"signature": "def save(self, cascadeSave=True):", "body": "saver = IndexedRedisSave(self.__class__)return saver.save(self, cascadeSave=cascadeSave)", "docstring": "save - Save this object.\n\nWill perform an \"insert\" if this object had not been saved before,\n otherwise will update JUST the fields changed on THIS INSTANCE of the model.\n\n i.e. If you have two processes fetch the same object and change different fields, they will not overwrite\n eachother, but only save the ones each process changed.\n\nIf you want to save multiple objects of type MyModel in a single transaction,\nand you have those objects in a list, myObjs, you can do the following:\n\n MyModel.saver.save(myObjs)\n\n@param cascadeSave Default True - If True, any Foreign models linked as attributes that have been altered\n or created will be saved with this object. If False, only this object (and the reference to an already-saved foreign model) will be saved.\n\n@see #IndexedRedisSave.save\n\n@return - Single element list, id of saved object (if successful)", "id": "f4156:c1:m11"} {"signature": "@deprecated('')@classmethoddef connect(cls, redisConnectionParams):", "body": "return cls.connectAlt(redisConnectionParams)", "docstring": "connect - DEPRECATED NAME - @see connectAlt\n Create a class of this model which will use an alternate connection than the one specified by REDIS_CONNECTION_PARAMS on this model.\n\n@param redisConnectionParams - Dictionary of arguments to redis.Redis, same as REDIS_CONNECTION_PARAMS.\n\n@return - A class that can be used in all the same ways as the existing IndexedRedisModel, but that connects to a different instance.", "id": "f4156:c1:m30"} {"signature": "def random(self, cascadeFetch=False):", "body": "matchedKeys = list(self.getPrimaryKeys())obj = Nonewhile matchedKeys and not obj:key = matchedKeys.pop(random.randint(, len(matchedKeys)-))obj = self.get(key, cascadeFetch=cascadeFetch)return obj", "docstring": "Random - Returns a random record in current filterset.\n\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n@return - Instance of Model object, or None if no items math current filters", "id": "f4156:c3:m15"} {"signature": "@staticmethoddef _doCascadeFetch(obj):", "body": "obj.validateModel()if not obj.foreignFields:returnNOTE: Currently this fetches using one transaction per object. Implementation for actual resolution is inIndexedRedisModel.__getattribute__ for foreignField in obj.foreignFields:subObjsData = object.__getattribute__(obj, foreignField)if not subObjsData:setattr(obj, str(foreignField), irNull)continuesubObjs = subObjsData.getObjs()for subObj in subObjs:if isIndexedRedisModel(subObj):IndexedRedisQuery._doCascadeFetch(subObj)", "docstring": "_doCascadeFetch - Takes an object and performs a cascading fetch on all foreign links, and all theirs, and so on.\n\n@param obj - A fetched model", "id": "f4156:c3:m18"} {"signature": "@classmethoddef copyModel(mdl):", "body": "copyNum = _modelCopyMap[mdl]_modelCopyMap[mdl] += mdlCopy = type(mdl.__name__ + '' + str(copyNum), mdl.__bases__, copy.deepcopy(dict(mdl.__dict__)))mdlCopy.FIELDS = [field.copy() for field in mdl.FIELDS]mdlCopy.INDEXED_FIELDS = [str(idxField) for idxField in mdl.INDEXED_FIELDS] mdlCopy.validateModel()return mdlCopy", "docstring": "copyModel - Copy this model, and return that copy.\n\n The copied model will have all the same data, but will have a fresh instance of the FIELDS array and all members,\n and the INDEXED_FIELDS array.\n\n This is useful for converting, like changing field types or whatever, where you can load from one model and save into the other.\n\n@return - A copy class of this model class with a unique name.", "id": "f4156:c1:m28"} {"signature": "def getMultipleOnlyFields(self, pks, fields, cascadeFetch=False):", "body": "if type(pks) == set:pks = list(pks)if len(pks) == :return IRQueryableList([self.getOnlyFields(pks[], fields, cascadeFetch=cascadeFetch)], mdl=self.mdl)conn = self._get_connection()pipeline = conn.pipeline()for pk in pks:key = self._get_key_for_id(pk)pipeline.hmget(key, fields)res = pipeline.execute()ret = IRQueryableList(mdl=self.mdl)pksLen = len(pks)i = numFields = len(fields)while i < pksLen:objDict = {}anyNotNone = FalsethisRes = res[i]if thisRes is None or type(thisRes) != list:ret.append(None)i += continuej = while j < numFields:objDict[fields[j]] = thisRes[j]if thisRes[j] != None:anyNotNone = Truej += if anyNotNone is False:ret.append(None)i += continueobjDict[''] = pks[i]obj = self._redisResultToObj(objDict)ret.append(obj)i += if cascadeFetch is True:for obj in ret:self._doCascadeFetch(obj)return ret", "docstring": "getMultipleOnlyFields - Gets only certain fields from a list of primary keys. For working on entire filter set, see allOnlyFields\n\n@param pks list - Primary Keys\n\n@param fields list - List of fields\n\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\nreturn - List of partial objects with only fields applied", "id": "f4156:c3:m21"} {"signature": "def delete(self):", "body": "deleter = IndexedRedisDelete(self.__class__)return deleter.deleteOne(self)", "docstring": "delete - Delete this object", "id": "f4156:c1:m12"} {"signature": "def save(self, obj, usePipeline=True, forceID=False, cascadeSave=True, conn=None):", "body": "if conn is None:conn = self._get_connection()if usePipeline is True:idConn = connelse:idConn = self._get_new_connection()if issubclass(obj.__class__, (list, tuple)):objs = objelse:objs = [obj]if usePipeline is True:pipeline = conn.pipeline()else:pipeline = connoga = object.__getattribute__if cascadeSave is True:ignPipelines = OrderedDict()foreignSavers = {}for thisObj in objs:if not thisObj.foreignFields:continueforeignFields = thisObj.foreignFieldsfor foreignField in foreignFields:rawObj = oga(thisObj, str(foreignField))if rawObj in (None, irNull) or not rawObj.isFetched():continueforeignObjects = oga(thisObj, str(foreignField)).getObjs()for foreignObject in foreignObjects:doSaveForeign = Falseif getattr(foreignObject, '', None):if foreignObject.hasUnsavedChanges(cascadeObjects=True):doSaveForeign = Trueelse:doSaveForeign = Trueif doSaveForeign is True:if foreignField not in foreignSavers:foreignPipelines[foreignField] = self._get_new_connection().pipeline()foreignSavers[foreignField] = IndexedRedisSave(foreignObject.__class__)", "docstring": "save - Save an object / objects associated with this model. \n\nYou probably want to just do object.save() instead of this, but to save multiple objects at once in a single transaction, \n you can use:\n\n MyModel.saver.save(myObjs)\n\n@param obj - The object to save, or a list of objects to save\n\n@param usePipeline - Use a pipeline for saving. You should always want this, unless you are calling this function from within an existing pipeline.\n\n@param forceID - if not False, force ID to this. If obj is list, this is also list. Forcing IDs also forces insert. Up to you to ensure ID will not clash.\n@param cascadeSave Default True - If True, any Foreign models linked as attributes that have been altered\n or created will be saved with this object. If False, only this object (and the reference to an already-saved foreign model) will be saved.\n\n@param conn - A connection or None\n\n@note - if no ID is specified\n\n@return - List of pks", "id": "f4156:c4:m0"} {"signature": "@classmethoddef connectAlt(cls, redisConnectionParams):", "body": "if not isinstance(redisConnectionParams, dict):raise ValueError('')hashVal = hashDictOneLevel(redisConnectionParams)modelDictCopy = copy.deepcopy(dict(cls.__dict__))modelDictCopy[''] = redisConnectionParamsConnectedIndexedRedisModel = type('' + cls.__name__ + str(hashVal), cls.__bases__, modelDictCopy)return ConnectedIndexedRedisModel", "docstring": "connectAlt - Create a class of this model which will use an alternate connection than the one specified by REDIS_CONNECTION_PARAMS on this model.\n\n@param redisConnectionParams - Dictionary of arguments to redis.Redis, same as REDIS_CONNECTION_PARAMS.\n\n@return - A class that can be used in all the same ways as the existing IndexedRedisModel, but that connects to a different instance.\n\n The fields and key will be the same here, but the connection will be different. use #copyModel if you want an independent class for the model", "id": "f4156:c1:m31"} {"signature": "def __eq__(self, other):", "body": "if type(self) != type(other):return Falseif not self.hasSameValues(other):return Falseif getattr(self, '', None) != getattr(other, '', None):return Falsereturn True", "docstring": "__eq__ - Check if two IndexedRedisModels are equal.\n\nThey are equal if they have the same type and same field values (including id).\n\nTo check if two models have the same values (but can have different ids), use #hasSameValues method.", "id": "f4156:c1:m16"} {"signature": "def __repr__(self):", "body": "myDict = self.asDict(True, forStorage=False, strKeys=True)myClassName = self.__class__.__name__ret = [myClassName, '']_id = myDict.pop('', '')if _id:ret += ['', to_unicode(_id), '']key = Nonefor key, value in myDict.items():ret += [key, '', repr(value), '']if key is not None or not _id:ret.pop()ret.append('')return ''.join(ret)", "docstring": "__repr__ - Returns a string of the constructor/params to recreate this object.\n Example: objCopy = eval(repr(obj))\n\n @return - String of python init call to recreate this object", "id": "f4156:c1:m19"} {"signature": "def _rem_id_from_index(self, indexedField, pk, val, conn=None):", "body": "if conn is None:conn = self._get_connection()conn.srem(self._get_key_for_index(indexedField, val), pk)", "docstring": "_rem_id_from_index - Removes an id from an index\ninternal", "id": "f4156:c2:m8"} {"signature": "def filterInline(self, **kwargs):", "body": "return IndexedRedisQuery._filter(self, **kwargs)", "docstring": "filterInline - @see IndexedRedisQuery.filter. This is the same as filter, but works inline on this object instead of creating a copy.\n Use this is you do not need to retain the previous filter object.", "id": "f4156:c3:m4"} {"signature": "def getPk(self):", "body": "return self._id", "docstring": "getPk - Gets the internal primary key associated with this object", "id": "f4156:c1:m13"} {"signature": "def _get_connection(self):", "body": "if self._connection is None:self._connection = self._get_new_connection() return self._connection", "docstring": "_get_connection - Maybe get a new connection, or reuse if passed in.\n Will share a connection with a model\ninternal", "id": "f4156:c2:m3"} {"signature": "def asDict(self, includeMeta=False, forStorage=False, strKeys=False):", "body": "ret = {}for thisField in self.FIELDS:val = object.__getattribute__(self, thisField)if forStorage is True:val = thisField.toStorage(val)if strKeys:ret[str(thisField)] = valelse:ret[thisField] = valif includeMeta is True:ret[''] = getattr(self, '', None)return ret", "docstring": "toDict / asDict - Get a dictionary representation of this model.\n\n@param includeMeta - Include metadata in return. For now, this is only pk stored as \"_id\"\n\n@param convertValueTypes - default True. If False, fields with fieldValue defined will be converted to that type.\n Use True when saving, etc, as native type is always either str or bytes.\n\n@param strKeys Default False - If True, just the string value of the field name will be used as the key.\n Otherwise, the IRField itself will be (although represented and indexed by string)\n\n@return - Dictionary reprensetation of this object and all fields", "id": "f4156:c1:m3"} {"signature": "def __setstate__(self, stateDict):", "body": "self.__class__.validateModel()for key, value in stateDict.items():setattr(self, key, value)self._origData = stateDict['']", "docstring": "pickle uses this", "id": "f4156:c1:m27"} {"signature": "def _get_new_connection(self):", "body": "pool = getRedisPool(self.mdl.REDIS_CONNECTION_PARAMS)return redis.Redis(connection_pool=pool)", "docstring": "_get_new_connection - Get a new connection\ninternal", "id": "f4156:c2:m2"} {"signature": "def allByAge(self, cascadeFetch=False):", "body": "matchedKeys = self.getPrimaryKeys(sortByAge=True)if matchedKeys:return self.getMultiple(matchedKeys, cascadeFetch=cascadeFetch)return IRQueryableList([], mdl=self.mdl)", "docstring": "allByAge - Get the underlying objects which match the filter criteria, ordered oldest -> newest\n If you are doing a queue or just need the head/tail, consider .first() and .last() instead.\n\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n@return - Objects of the Model instance associated with this query, sorted oldest->newest", "id": "f4156:c3:m10"} {"signature": "def __init__(self, mdl):", "body": "mdl.validateModel()self.mdl = mdlself.keyName = mdl.KEY_NAMEfields = mdl.FIELDSself.fields = mdl.FIELDSself.indexedFields = [fields[fieldName] for fieldName in mdl.INDEXED_FIELDS]self._connection = None", "docstring": "Internal constructor\n\n@param mdl - IndexedRedisModel implementer", "id": "f4156:c2:m0"} {"signature": "def hasSameValues(self, other, cascadeObject=True):", "body": "if self.FIELDS != other.FIELDS:return Falseoga = object.__getattribute__for field in self.FIELDS:thisVal = oga(self, field)otherVal = oga(other, field)if thisVal != otherVal:return Falseif cascadeObject is True and issubclass(field.__class__, IRForeignLinkFieldBase):if thisVal and thisVal.isFetched():if otherVal and otherVal.isFetched():theseForeign = thisVal.getObjs()othersForeign = otherVal.getObjs()for i in range(len(theseForeign)):if not theseForeign[i].hasSameValues(othersForeign[i]):return Falseelse:theseForeign = thisVal.getObjs()for i in range(len(theseForeign)):if theseForeign[i].hasUnsavedChanges(cascadeObjects=True):return Falseelse:if otherVal and otherVal.isFetched():othersForeign = otherVal.getObjs()for i in range(len(othersForeign)):if othersForeign[i].hasUnsavedChanges(cascadeObjects=True):return Falsereturn True", "docstring": "hasSameValues - Check if this and another model have the same fields and values.\n\nThis does NOT include id, so the models can have the same values but be different objects in the database.\n\n@param other - Another model\n\n@param cascadeObject default True - If True, foreign link values with changes will be considered a difference.\n Otherwise, only the immediate values are checked.\n\n@return - True if all fields have the same value, otherwise False", "id": "f4156:c1:m15"} {"signature": "def count(self):", "body": "conn = self._get_connection()numFilters = len(self.filters)numNotFilters = len(self.notFilters)if numFilters + numNotFilters == :return conn.scard(self._get_ids_key())if numNotFilters == :if numFilters == :(filterFieldName, filterValue) = self.filters[]return conn.scard(self._get_key_for_index(filterFieldName, filterValue))indexKeys = [self._get_key_for_index(filterFieldName, filterValue) for filterFieldName, filterValue in self.filters]return len(conn.sinter(indexKeys))notIndexKeys = [self._get_key_for_index(filterFieldName, filterValue) for filterFieldName, filterValue in self.notFilters]if numFilters == :return len(conn.sdiff(self._get_ids_key(), *notIndexKeys))indexKeys = [self._get_key_for_index(filterFieldName, filterValue) for filterFieldName, filterValue in self.filters]tempKey = self._getTempKey()pipeline = conn.pipeline()pipeline.sinterstore(tempKey, *indexKeys)pipeline.sdiff(tempKey, *notIndexKeys)pipeline.delete(tempKey)pks = pipeline.execute()[] return len(pks)", "docstring": "count - gets the number of records matching the filter criteria\n\nExample:\n theCount = Model.objects.filter(field1='value').count()", "id": "f4156:c3:m6"} {"signature": "def setDefaultRedisConnectionParams( connectionParams ):", "body": "global _defaultRedisConnectionParams_defaultRedisConnectionParams.clear()for key, value in connectionParams.items():_defaultRedisConnectionParams[key] = valueclearRedisPools()", "docstring": "setDefaultRedisConnectionParams - Sets the default parameters used when connecting to Redis.\n\n This should be the args to redis.Redis in dict (kwargs) form.\n\n @param connectionParams - A dict of connection parameters.\n Common keys are:\n\n host - hostname/ip of Redis server (default '127.0.0.1')\n port - Port number\t\t\t(default 6379)\n db - Redis DB number\t\t(default 0)\n\n Omitting any of those keys will ensure the default value listed is used.\n\n This connection info will be used by default for all connections to Redis, unless explicitly set otherwise.\n The common way to override is to define REDIS_CONNECTION_PARAMS on a model, or use AltConnectedModel = MyModel.connectAlt( PARAMS )\n\n Any omitted fields in these connection overrides will inherit the value from the global default.\n\n For example, if your global default connection params define host = 'example.com', port=15000, and db=0, \n and then one of your models has\n\n REDIS_CONNECTION_PARAMS = { 'db' : 1 }\n\n as an attribute, then that model's connection will inherit host='example.com\" and port=15000 but override db and use db=1\n\n\n NOTE: Calling this function will clear the connection_pool attribute of all stored managed connections, disconnect all managed connections,\n and close-out the connection pool.\n It may not be safe to call this function while other threads are potentially hitting Redis (not that it would make sense anyway...)\n\n @see clearRedisPools for more info", "id": "f4156:m0"} {"signature": "def cascadeFetch(self):", "body": "IndexedRedisQuery._doCascadeFetch(self)", "docstring": "cascadeFetch - Immediately fetch all foreign links on this field, and all their links, etc.\n\n Normally, this would be done on access of the foreign members, or at .all() time by passing cascadeFetch=True into\n the fetch function\n\n e.x. MyModel.objects.filter(...).all(cascadeFetch=True)", "id": "f4156:c1:m25"} {"signature": "def allOnlyFields(self, fields, cascadeFetch=False):", "body": "matchedKeys = self.getPrimaryKeys()if matchedKeys:return self.getMultipleOnlyFields(matchedKeys, fields, cascadeFetch=cascadeFetch)return IRQueryableList([], mdl=self.mdl)", "docstring": "allOnlyFields - Get the objects which match the filter criteria, only fetching given fields.\n\n@param fields - List of fields to fetch\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n\n@return - Partial objects with only the given fields fetched", "id": "f4156:c3:m11"} {"signature": "@classmethoddef validateModel(model):", "body": "if model == IndexedRedisModel:import reif re.match('', sys.argv[]):returnraise ValueError('')global validatedModelskeyName = model.KEY_NAMEif not keyName:raise InvalidModelException('' %(str(model.__name__), ) )if model in validatedModels:return TruefailedValidationStr = '' %(str(model.__name__), ) fieldSet = set(model.FIELDS)indexedFieldSet = set(model.INDEXED_FIELDS)if not fieldSet:raise InvalidModelException('' %(failedValidationStr,))if hasattr(model, ''):raise InvalidModelException('')if hasattr(model, ''):raise InvalidModelException('')newFields = []updatedFields = []mustUpdateFields = FalseforeignFields = []for thisField in fieldSet:if thisField == '':raise InvalidModelException('' %(failedValidationStr,))try:codecs.ascii_encode(thisField)except UnicodeDecodeError as e:raise InvalidModelException('' %(failedValidationStr, to_unicode(thisField), str(e)))if issubclass(thisField.__class__, IRForeignLinkFieldBase):foreignFields.append(thisField)if issubclass(thisField.__class__, IRField):newFields.append(thisField)else:mustUpdateFields = TruenewField = IRClassicField(thisField)newFields.append(newField)updatedFields.append(thisField)thisField = newFieldif str(thisField) == '':raise InvalidModelException('' %(failedValidationStr, str(type(thisField)), repr(thisField) ) )if thisField in indexedFieldSet and thisField.CAN_INDEX is False:raise InvalidModelException('' %(failedValidationStr, str(thisField.__class__.__name__), repr(thisField)))if hasattr(IndexedRedisModel, thisField) is True:raise InvalidModelException('' %(failedValidationStr, str(thisField)))if mustUpdateFields is True:model.FIELDS = newFieldsdeprecatedMessage('' %(model.__name__, repr(updatedFields)), '' + model.__name__)model.FIELDS = KeyList(model.FIELDS)if bool(indexedFieldSet - fieldSet):raise InvalidModelException('' %(failedValidationStr, str(list(indexedFieldSet - fieldSet)), ) )model.foreignFields = foreignFieldsvalidatedModels.add(model)return True", "docstring": "validateModel - Class method that validates a given model is implemented correctly. Will only be validated once, on first model instantiation.\n\n@param model - Implicit of own class\n\n@return - True\n\n@raises - InvalidModelException if there is a problem with the model, and the message contains relevant information.", "id": "f4156:c1:m29"} {"signature": "def last(self, cascadeFetch=False):", "body": "obj = NonematchedKeys = self.getPrimaryKeys(sortByAge=True)if matchedKeys:while matchedKeys and obj is None:obj = self.get(matchedKeys.pop(), cascadeFetch=cascadeFetch)return obj", "docstring": "Last - Returns the newest record (highest primary key) with current filters.\n This makes an efficient queue, as it only fetches a single object.\n\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n@return - Instance of Model object, or None if no items match current filters", "id": "f4156:c3:m14"} {"signature": "def _rem_id_from_keys(self, pk, conn=None):", "body": "if conn is None:conn = self._get_connection()conn.srem(self._get_ids_key(), pk)", "docstring": "_rem_id_from_keys - Remove primary key from table\ninternal", "id": "f4156:c2:m6"} {"signature": "def getRedisPool(params):", "body": "global RedisPoolsglobal _defaultRedisConnectionParamsglobal _redisManagedConnectionParamsif not params:params = _defaultRedisConnectionParamsisDefaultParams = Trueelse:isDefaultParams = bool(params is _defaultRedisConnectionParams)if '' in params:return params['']hashValue = hashDictOneLevel(params)if hashValue in RedisPools:params[''] = RedisPools[hashValue]return RedisPools[hashValue]if not isDefaultParams:origParams = paramsparams = copy.copy(params)else:origParams = paramscheckAgain = Falseif '' not in params:if not isDefaultParams and '' in _defaultRedisConnectionParams:params[''] = _defaultRedisConnectionParams['']else:params[''] = ''checkAgain = Trueif '' not in params:if not isDefaultParams and '' in _defaultRedisConnectionParams:params[''] = _defaultRedisConnectionParams['']else:params[''] = checkAgain = Trueif '' not in params:if not isDefaultParams and '' in _defaultRedisConnectionParams:params[''] = _defaultRedisConnectionParams['']else:params[''] = checkAgain = Trueif not isDefaultParams:otherGlobalKeys = set(_defaultRedisConnectionParams.keys()) - set(params.keys())for otherKey in otherGlobalKeys:if otherKey == '':continueparams[otherKey] = _defaultRedisConnectionParams[otherKey]checkAgain = Trueif checkAgain:hashValue = hashDictOneLevel(params)if hashValue in RedisPools:params[''] = RedisPools[hashValue]return RedisPools[hashValue]connectionPool = redis.ConnectionPool(**params)origParams[''] = params[''] = connectionPoolRedisPools[hashValue] = connectionPoolorigParamsHash = hashDictOneLevel(origParams)if origParamsHash not in _redisManagedConnectionParams:_redisManagedConnectionParams[origParamsHash] = [origParams]elif origParams not in _redisManagedConnectionParams[origParamsHash]:_redisManagedConnectionParams[origParamsHash].append(origParams)return connectionPool", "docstring": "getRedisPool - Returns and possibly also creates a Redis connection pool\n based on the REDIS_CONNECTION_PARAMS passed in.\n\n The goal of this method is to keep a small connection pool rolling\n to each unique Redis instance, otherwise during network issues etc\n python-redis will leak connections and in short-order can exhaust\n all the ports on a system. There's probably also some minor\n performance gain in sharing Pools.\n\n Will modify \"params\", if \"host\" and/or \"port\" are missing, will fill\n them in with defaults, and prior to return will set \"connection_pool\"\n on params, which will allow immediate return on the next call,\n and allow access to the pool directly from the model object.\n\n @param params - REDIS_CONNECTION_PARAMS - kwargs to redis.Redis\n\n @return redis.ConnectionPool corrosponding to this unique server.", "id": "f4156:m3"} {"signature": "def getOnlyFields(self, pk, fields, cascadeFetch=False):", "body": "conn = self._get_connection()key = self._get_key_for_id(pk)res = conn.hmget(key, fields)if type(res) != list or not len(res):return NoneobjDict = {}numFields = len(fields)i = anyNotNone = Falsewhile i < numFields:objDict[fields[i]] = res[i]if res[i] != None:anyNotNone = Truei += if anyNotNone is False:return NoneobjDict[''] = pkret = self._redisResultToObj(objDict)if cascadeFetch is True:self._doCascadeFetch(ret)return ret", "docstring": "getOnlyFields - Gets only certain fields from a paticular primary key. For working on entire filter set, see allOnlyFields\n\n@param pk - Primary Key\n\n@param fields list - List of fields\n\n@param cascadeFetch Default False, If True, all Foreign objects associated with this model\n will be fetched immediately. If False, foreign objects will be fetched on-access.\n\n\nreturn - Partial objects with only fields applied", "id": "f4156:c3:m20"} {"signature": "def pprint(self, stream=None):", "body": "pprint.pprint(self.asDict(includeMeta=True, forStorage=False, strKeys=True), stream=stream)", "docstring": "pprint - Pretty-print a dict representation of this object.\n\n@param stream - Either a stream to output, or None to default to sys.stdout", "id": "f4156:c1:m4"} {"signature": "def _get_key_for_index(self, indexedField, val):", "body": "if hasattr(indexedField, ''):val = indexedField.toIndex(val)else:val = self.fields[indexedField].toIndex(val)return ''.join( [INDEXED_REDIS_PREFIX, self.keyName, '', indexedField, '', val] )", "docstring": "_get_key_for_index - Returns the key name that would hold the indexes on a value\nInternal - does not validate that indexedFields is actually indexed. Trusts you. Don't let it down.\n\n@param indexedField - string of field name\n@param val - Value of field\n\n@return - Key name string, potentially hashed.", "id": "f4156:c2:m9"} {"signature": "def __getstate__(self):", "body": "myData = self.asDict(True, forStorage=False)myData[''] = self._origDatareturn myData", "docstring": "pickle uses this", "id": "f4156:c1:m26"} {"signature": "def deprecatedMessage(msg, key=None, printStack=False):", "body": "if __deprecatedMessagesEnabled is False:returnif not _alreadyWarned:sys.stderr.write('')if key is None:from .compat_str import tobyteskey = md5(tobytes(msg)).hexdigest()if key not in _alreadyWarned:_alreadyWarned[key] = Truesys.stderr.write('' %(msg, ))if printStack:sys.stderr.write('')curStack = traceback.extract_stack()sys.stderr.write('' + ''.join(traceback.format_list(curStack[:-])).replace('', '') + '')", "docstring": "deprecatedMessage - Print a deprecated messsage (unless they are toggled off). Will print a message only once (based on \"key\")\n\n@param msg - Deprecated message to possibly print\n\n@param key - A key that is specific to this message. \n If None is provided (default), one will be generated from the md5 of the message.\n However, better to save cycles and provide a unique key if at all possible.\n The decorator uses the function itself as the key.\n\n@param printStack Default False, if True print a stack trace", "id": "f4157:m1"} {"signature": "def hashDictOneLevel(myDict):", "body": "keys = [str(x) for x in myDict.keys()]keys.sort()lst = []for key in keys:lst.append(str(myDict[key]) + '')return ''.join(lst).__hash__()", "docstring": "A function which can generate a hash of a one-level \n dict containing strings (like REDIS_CONNECTION_PARAMS)\n\n@param myDict - Dict with string keys and values\n\n@return - Hash of myDict", "id": "f4158:m0"} {"signature": "def raw_from_delimited(msgs: DelimitedMsg) -> RawMsgs:", "body": "delim = _rindex(msgs, b'')return tuple(msgs[:delim]), tuple(msgs[delim + :])", "docstring": "\\\n From a message consisting of header frames, delimiter frame, and payload frames, return a tuple `(header, payload)`.\n The payload frames may be returned as sequences of bytes (raw) or as `Message`s.", "id": "f4166:m3"} {"signature": "def parse(self, data: RawMessage) -> Message:", "body": "try:return self.receiver.parse(data)except KeyError as err:raise UnknownCommandError from errexcept DecodeError as err:raise UnknownCommandError(f\"\") from err", "docstring": "\\\n Parses a binary protobuf message into a Message object.", "id": "f4168:c0:m1"} {"signature": "async def get_ltd_product(session, slug=None, url=None):", "body": "if url is None:url = ''.format(slug)async with session.get(url) as response:data = await response.json()return data", "docstring": "Get the product resource (JSON document) from the LSST the Docs API.\n\n Parameters\n ----------\n session : `aiohttp.ClientSession`\n Your application's aiohttp client session.\n See http://aiohttp.readthedocs.io/en/stable/client.html.\n slug : `str`, optional\n Slug identfying the product. This is the same as the subdomain.\n For example, ``'ldm-151'`` is the slug for ``ldm-151.lsst.io``.\n A full product URL can be provided instead, see ``url``.\n url : `str`, optional\n The full LTD Keeper URL for the product resource. For example,\n ``'https://keeper.lsst.codes/products/ldm-151'``. The ``slug``\n can be provided instead.\n\n Returns\n -------\n product : `dict`\n Product dataset. See\n https://ltd-keeper.lsst.io/products.html#get--products-(slug)\n for fields.", "id": "f4201:m1"} {"signature": "async def get_ltd_product_urls(session):", "body": "product_url = ''async with session.get(product_url) as response:data = await response.json()return data['']", "docstring": "Get URLs for LSST the Docs (LTD) products from the LTD Keeper API.\n\n Parameters\n ----------\n session : `aiohttp.ClientSession`\n Your application's aiohttp client session.\n See http://aiohttp.readthedocs.io/en/stable/client.html.\n\n Returns\n -------\n product_urls : `list`\n List of product URLs.", "id": "f4201:m0"} {"signature": "def ensure_pandoc(func):", "body": "logger = logging.getLogger(__name__)@functools.wraps(func)def _install_and_run(*args, **kwargs):try:result = func(*args, **kwargs)except OSError:message = \"\"logger.warning(message)pypandoc.download_pandoc(version='')logger.debug(\"\")result = func(*args, **kwargs)return resultreturn _install_and_run", "docstring": "Decorate a function that uses pypandoc to ensure that pandoc is\n installed if necessary.", "id": "f4202:m0"} {"signature": "def __call__(self, tex_source):", "body": "for linker in self._linkers:tex_source = linker(tex_source)return tex_source", "docstring": "r\"\"\"Convert citations in LaTeX source to Hyperref links.\n\n Parameters\n ----------\n tex_source : `str`\n LaTeX document source.\n\n Returns\n -------\n processed_tex : `str`\n LaTeX document source with all citation commands converted to\n ``\\hyperref`` commands.", "id": "f4206:c0:m1"} {"signature": "def remove_comments(tex_source):", "body": "return re.sub(r'', r'', tex_source, flags=re.M)", "docstring": "Delete latex comments from TeX source.\n\n Parameters\n ----------\n tex_source : str\n TeX source content.\n\n Returns\n -------\n tex_source : str\n TeX source without comments.", "id": "f4207:m0"} {"signature": "def remove_trailing_whitespace(tex_source):", "body": "return re.sub(r'', '', tex_source, flags=re.M)", "docstring": "Delete trailing whitespace from TeX source.\n\n Parameters\n ----------\n tex_source : str\n TeX source content.\n\n Returns\n -------\n tex_source : str\n TeX source without trailing whitespace.", "id": "f4207:m1"} {"signature": "def process_inputs(tex_source, root_dir=None):", "body": "logger = logging.getLogger(__name__)def _sub_line(match):\"\"\"\"\"\"fname = match.group('')if not fname.endswith(''):full_fname = \"\".join((fname, ''))else:full_fname = fnamefull_path = os.path.abspath(os.path.join(root_dir, full_fname))try:included_source = read_tex_file(full_path, root_dir=root_dir)except IOError:logger.error(\"\".format(full_path))raiseelse:return included_sourcetex_source = input_include_pattern.sub(_sub_line, tex_source)return tex_source", "docstring": "r\"\"\"Insert referenced TeX file contents (from ``\\input`` and ``\\include``\n commands) into the source.\n\n Parameters\n ----------\n tex_source : `str`\n TeX source where referenced source files will be found and inserted.\n root_dir : `str`, optional\n Name of the directory containing the TeX project's root file. Files\n referenced by TeX ``\\input`` and ``\\include`` commands are relative to\n this directory. If not set, the current working directory is assumed.\n\n Returns\n -------\n tex_source : `str`\n TeX source.\n\n See also\n --------\n `read_tex_file`\n Recommended API for reading a root TeX source file and inserting\n referenced files.", "id": "f4207:m3"} {"signature": "def _parse_doc_ref(self):", "body": "command = LatexCommand('',{'': '', '': True, '': ''})try:parsed = next(command.parse(self._tex))except StopIteration:self._logger.warning('')self._handle = Noneself._series = Noneself._serial = Nonereturnself._handle = parsed['']try:self._series, self._serial = self._handle.split('', )except ValueError:self._logger.warning('''', self._handle)self._series = Noneself._serial = None", "docstring": "Parse the document handle.\n\n Sets the ``_series``, ``_serial``, and ``_handle`` attributes.", "id": "f4210:c0:m29"} {"signature": "def _parse_abstract(self):", "body": "command = LatexCommand('',{'': '', '': True, '': ''})try:parsed = next(command.parse(self._tex))except StopIteration:self._logger.warning('')self._abstract = Nonereturntry:content = parsed['']except KeyError:self._logger.warning('')self._abstract = Nonereturncontent = content.strip()self._abstract = content", "docstring": "Parse the abstract from the TeX source.\n\n Sets the ``_abstract`` attribute.", "id": "f4210:c0:m31"} {"signature": "@propertydef title(self):", "body": "if not hasattr(self, ''):self._parse_title()return self._title", "docstring": "LaTeX-formatted document title (`str`).", "id": "f4210:c0:m5"} {"signature": "@propertydef html_abstract(self):", "body": "return self.format_abstract(format='', deparagraph=False,mathjax=False, smart=True)", "docstring": "HTML5-formatted document abstract (`str`).", "id": "f4210:c0:m12"} {"signature": "def _parse_revision_date(self):", "body": "doc_datetime = Noneif not self.is_draft:date_command = LatexCommand('',{'': '', '': True, '': ''})try:parsed = next(date_command.parse(self._tex))command_content = parsed[''].strip()except StopIteration:command_content = Noneself._logger.warning('')if command_content is not None and command_content != r'':try:doc_datetime = datetime.datetime.strptime(command_content,'')project_tz = timezone('')localized_datetime = project_tz.localize(doc_datetime)doc_datetime = localized_datetime.astimezone(pytz.utc)self._revision_datetime_source = ''except ValueError:self._logger.warning('''',command_content)if doc_datetime is None:content_extensions = ('', '', '', '', '')try:doc_datetime = get_content_commit_date(content_extensions,root_dir=self._root_dir)self._revision_datetime_source = ''except RuntimeError:self._logger.warning('''',self._root_dir)if doc_datetime is None:doc_datetime = pytz.utc.localize(datetime.datetime.now())self._revision_datetime_source = ''self._datetime = doc_datetime", "docstring": "r\"\"\"Parse the ``\\date`` command, falling back to getting the\n most recent Git commit date and the current datetime.\n\n Result is available from the `revision_datetime` attribute.", "id": "f4210:c0:m34"} {"signature": "def format_short_title(self, format='', deparagraph=True,mathjax=False, smart=True, extra_args=None):", "body": "if self.short_title is None:return Noneoutput_text = convert_lsstdoc_tex(self.short_title, '',deparagraph=deparagraph,mathjax=mathjax,smart=smart,extra_args=extra_args)return output_text", "docstring": "Get the document short title in the specified markup format.\n\n Parameters\n ----------\n format : `str`, optional\n Output format (such as ``'html5'`` or ``'plain'``).\n deparagraph : `bool`, optional\n Remove the paragraph tags from single paragraph content.\n mathjax : `bool`, optional\n Allow pandoc to use MathJax math markup.\n smart : `True`, optional\n Allow pandoc to create \"smart\" unicode punctuation.\n extra_args : `list`, optional\n Additional command line flags to pass to Pandoc. See\n `lsstprojectmeta.pandoc.convert.convert_text`.\n\n Returns\n -------\n output_text : `str`\n Converted content or `None` if the short title is not available in\n the document.", "id": "f4210:c0:m24"} {"signature": "@propertydef plain_title(self):", "body": "return self.format_title(format='', deparagraph=True,mathjax=False, smart=True)", "docstring": "Plain-text-formatted document title (`str`).", "id": "f4210:c0:m4"} {"signature": "def _parse_title(self):", "body": "command = LatexCommand('',{'': '', '': False, '': ''},{'': '', '': True, '': ''})try:parsed = next(command.parse(self._tex))except StopIteration:self._logger.warning('')self._title = Noneself._short_title = Noneself._title = parsed['']try:self._short_title = parsed['']except KeyError:self._logger.warning('')self._short_title = None", "docstring": "Parse the title from TeX source.\n\n Sets these attributes:\n\n - ``_title``\n - ``_short_title``", "id": "f4210:c0:m28"} {"signature": "def format_authors(self, format='', deparagraph=True, mathjax=False,smart=True, extra_args=None):", "body": "formatted_authors = []for latex_author in self.authors:formatted_author = convert_lsstdoc_tex(latex_author, format,deparagraph=deparagraph,mathjax=mathjax,smart=smart,extra_args=extra_args)formatted_author = formatted_author.strip()formatted_authors.append(formatted_author)return formatted_authors", "docstring": "Get the document authors in the specified markup format.\n\n Parameters\n ----------\n format : `str`, optional\n Output format (such as ``'html5'`` or ``'plain'``).\n deparagraph : `bool`, optional\n Remove the paragraph tags from single paragraph content.\n mathjax : `bool`, optional\n Allow pandoc to use MathJax math markup.\n smart : `True`, optional\n Allow pandoc to create \"smart\" unicode punctuation.\n extra_args : `list`, optional\n Additional command line flags to pass to Pandoc. See\n `lsstprojectmeta.pandoc.convert.convert_text`.\n\n Returns\n -------\n output_text : `list` of `str`\n Sequence of author names in the specified output markup format.", "id": "f4210:c0:m26"} {"signature": "@propertydef is_draft(self):", "body": "if not hasattr(self, ''):self._parse_documentclass()if '' in self._document_options:return Trueelse:return False", "docstring": "Document is a draft if ``'lsstdoc'`` is included in the\n documentclass options (`bool`).", "id": "f4210:c0:m18"} {"signature": "@propertydef plain_content(self):", "body": "return self.format_content(format='', mathjax=False, smart=True)", "docstring": "Plain-text-formatted document content (`str`).", "id": "f4210:c0:m2"} {"signature": "def format_abstract(self, format='', deparagraph=False, mathjax=False,smart=True, extra_args=None):", "body": "if self.abstract is None:return Noneabstract_latex = self._prep_snippet_for_pandoc(self.abstract)output_text = convert_lsstdoc_tex(abstract_latex, format,deparagraph=deparagraph,mathjax=mathjax,smart=smart,extra_args=extra_args)return output_text", "docstring": "Get the document abstract in the specified markup format.\n\n Parameters\n ----------\n format : `str`, optional\n Output format (such as ``'html5'`` or ``'plain'``).\n deparagraph : `bool`, optional\n Remove the paragraph tags from single paragraph content.\n mathjax : `bool`, optional\n Allow pandoc to use MathJax math markup.\n smart : `True`, optional\n Allow pandoc to create \"smart\" unicode punctuation.\n extra_args : `list`, optional\n Additional command line flags to pass to Pandoc. See\n `lsstprojectmeta.pandoc.convert.convert_text`.\n\n Returns\n -------\n output_text : `str`\n Converted content or `None` if the title is not available in\n the document.", "id": "f4210:c0:m25"} {"signature": "@propertydef plain_short_title(self):", "body": "return self.format_short_title(format='', deparagraph=True,mathjax=False, smart=True)", "docstring": "Plaintext-formatted document short title (`str`).", "id": "f4210:c0:m7"} {"signature": "@propertydef authors(self):", "body": "if not hasattr(self, ''):self._parse_author()return self._authors", "docstring": "LaTeX-formatted authors (`list` of `str`).", "id": "f4210:c0:m11"} {"signature": "def _load_bib_db(self):", "body": "command = LatexCommand('',{'': '', '': True, '': ''})try:parsed = next(command.parse(self._tex))bib_names = [n.strip() for n in parsed[''].split('')]except StopIteration:self._logger.warning('')bib_names = []custom_bib_names = [n for n in bib_namesif n not in KNOWN_LSSTTEXMF_BIB_NAMES]custom_bibs = []for custom_bib_name in custom_bib_names:custom_bib_path = os.path.join(os.path.join(self._root_dir),custom_bib_name + '')if not os.path.exists(custom_bib_path):self._logger.warning('',custom_bib_path)continuewith open(custom_bib_path, '') as file_handle:custom_bibs.append(file_handle.read())if len(custom_bibs) > :custom_bibtex = ''.join(custom_bibs)else:custom_bibtex = Nonedb = get_bibliography(bibtex=custom_bibtex)self._bib_db = db", "docstring": "r\"\"\"Load the BibTeX bibliography referenced by the document.\n\n This method triggered by the `bib_db` attribute and populates the\n `_bib_db` private attribute.\n\n The ``\\bibliography`` command is parsed to identify the bibliographies\n referenced by the document.", "id": "f4210:c0:m33"} {"signature": "@propertydef abstract(self):", "body": "if not hasattr(self, ''):self._parse_abstract()return self._abstract", "docstring": "LaTeX-formatted abstract (`str`).", "id": "f4210:c0:m14"} {"signature": "@staticmethoddef _parse_whitespace_argument(source, name):", "body": "command_pattern = r'' + name + r''command_match = re.search(command_pattern, source)if command_match is not None:source = source[command_match.end():]pattern = r''match = re.search(pattern, source)if match is None:message = ('''')raise CommandParserError(message.format(name))content = match.group('')content.strip()return content", "docstring": "r\"\"\"Attempt to parse a single token on the first line of this source.\n\n This method is used for parsing whitespace-delimited arguments, like\n ``\\input file``. The source should ideally contain `` file`` along\n with a newline character.\n\n >>> source = 'Line 1\\n' r'\\input test.tex' '\\nLine 2'\n >>> LatexCommand._parse_whitespace_argument(source, 'input')\n 'test.tex'\n\n Bracket delimited arguments (``\\input{test.tex}``) are handled in\n the normal logic of `_parse_command`.", "id": "f4211:c0:m4"} {"signature": "def parse(self, source):", "body": "command_regex = self._make_command_regex(self.name)for match in re.finditer(command_regex, source):self._logger.debug(match)start_index = match.start()yield self._parse_command(source, start_index)", "docstring": "Parse command content from the LaTeX source.\n\n Parameters\n ----------\n source : `str`\n The full source of the tex document.\n\n Yields\n ------\n parsed_command : `ParsedCommand`\n Yields parsed commands instances for each occurence of the command\n in the source.", "id": "f4211:c0:m1"} {"signature": "def get_authoryear_from_entry(entry, paren=False):", "body": "def _format_last(person):\"\"\"\"\"\"return ''.join([n.strip('') for n in person.last_names])if len(entry.persons['']) > :persons = entry.persons['']elif len(entry.persons['']) > :persons = entry.persons['']else:raise AuthorYearErrortry:year = entry.fields['']except KeyError:raise AuthorYearErrorif paren and len(persons) == :template = ''return template.format(author=_format_last(persons[]),year=year)elif not paren and len(persons) == :template = ''return template.format(author=_format_last(persons[]),year=year)elif paren and len(persons) == :template = ''return template.format(author1=_format_last(persons[]),author2=_format_last(persons[]),year=year)elif not paren and len(persons) == :template = ''return template.format(author1=_format_last(persons[]),author2=_format_last(persons[]),year=year)elif not paren and len(persons) > :template = ''return template.format(author=_format_last(persons[]),year=year)elif paren and len(persons) > :template = ''return template.format(author=_format_last(persons[]),year=year)", "docstring": "Get and format author-year text from a pybtex entry to emulate\n natbib citations.\n\n Parameters\n ----------\n entry : `pybtex.database.Entry`\n A pybtex bibliography entry.\n parens : `bool`, optional\n Whether to add parentheses around the year. Default is `False`.\n\n Returns\n -------\n authoryear : `str`\n The author-year citation text.", "id": "f4212:m5"} {"signature": "def get_lsst_bibtex(bibtex_filenames=None):", "body": "logger = logging.getLogger(__name__)if bibtex_filenames is None:bibtex_names = KNOWN_LSSTTEXMF_BIB_NAMESelse:bibtex_names = []for filename in bibtex_filenames:name = os.path.basename(os.path.splitext(filename)[])if name not in KNOWN_LSSTTEXMF_BIB_NAMES:logger.warning('',name)continuebibtex_names.append(name)uncached_names = [name for name in bibtex_namesif name not in _LSSTTEXMF_BIB_CACHE]if len(uncached_names) > :loop = asyncio.get_event_loop()future = asyncio.ensure_future(_download_lsst_bibtex(uncached_names))loop.run_until_complete(future)for name, text in zip(bibtex_names, future.result()):_LSSTTEXMF_BIB_CACHE[name] = textreturn {name: _LSSTTEXMF_BIB_CACHE[name] for name in bibtex_names}", "docstring": "Get content of lsst-texmf bibliographies.\n\n BibTeX content is downloaded from GitHub (``master`` branch of\n https://github.com/lsst/lsst-texmf or retrieved from an in-memory cache.\n\n Parameters\n ----------\n bibtex_filenames : sequence of `str`, optional\n List of lsst-texmf BibTeX files to retrieve. These can be the filenames\n of lsst-bibtex files (for example, ``['lsst.bib', 'lsst-dm.bib']``)\n or names without an extension (``['lsst', 'lsst-dm']``). The default\n (recommended) is to get *all* lsst-texmf bibliographies:\n\n .. code-block:: python\n\n ['lsst', 'lsst-dm', 'refs', 'books', 'refs_ads']\n\n Returns\n -------\n bibtex : `dict`\n Dictionary with keys that are bibtex file names (such as ``'lsst'``,\n ``'lsst-dm'``). Values are the corresponding bibtex file content\n (`str`).", "id": "f4212:m2"} {"signature": "def get_installation_token(installation_id, integration_jwt):", "body": "api_root = ''url = ''.format(api_root=api_root,id_=installation_id)headers = {'': ''.format(integration_jwt.decode('')),'': ''}resp = requests.post(url, headers=headers)resp.raise_for_status()return resp.json()", "docstring": "Create a GitHub token for an integration installation.\n\n Parameters\n ----------\n installation_id : `int`\n Installation ID. This is available in the URL of the integration's\n **installation** ID.\n integration_jwt : `bytes`\n The integration's JSON Web Token (JWT). You can create this with\n `create_jwt`.\n\n Returns\n -------\n token_obj : `dict`\n GitHub token object. Includes the fields:\n\n - ``token``: the token string itself.\n - ``expires_at``: date time string when the token expires.\n\n Example\n -------\n The typical workflow for authenticating to an integration installation is:\n\n .. code-block:: python\n\n from dochubadapter.github import auth\n jwt = auth.create_jwt(integration_id, private_key_path)\n token_obj = auth.get_installation_token(installation_id, jwt)\n print(token_obj['token'])\n\n Notes\n -----\n See\n https://developer.github.com/early-access/integrations/authentication/#as-an-installation\n for more information", "id": "f4213:m0"} {"signature": "def create_jwt(integration_id, private_key_path):", "body": "integration_id = int(integration_id)with open(private_key_path, '') as f:cert_bytes = f.read()now = datetime.datetime.now()expiration_time = now + datetime.timedelta(minutes=)payload = {'': int(now.timestamp()),'': int(expiration_time.timestamp()),'': integration_id}return jwt.encode(payload, cert_bytes, algorithm='')", "docstring": "Create a JSON Web Token to authenticate a GitHub Integration or\n installation.\n\n Parameters\n ----------\n integration_id : `int`\n Integration ID. This is available from the GitHub integration's\n homepage.\n private_key_path : `str`\n Path to the integration's private key (a ``.pem`` file).\n\n Returns\n -------\n jwt : `bytes`\n JSON Web Token that is good for 9 minutes.\n\n Notes\n -----\n The JWT is encoded with the RS256 algorithm. It includes a payload with\n fields:\n\n - ``'iat'``: The current time, as an `int` timestamp.\n - ``'exp'``: Expiration time, as an `int timestamp. The expiration\n time is set of 9 minutes in the future (maximum allowance is 10 minutes).\n - ``'iss'``: The integration ID (`int`).\n\n For more information, see\n https://developer.github.com/early-access/integrations/authentication/.", "id": "f4213:m1"} {"signature": "def make_raw_content_url(repo_slug, git_ref, file_path):", "body": "if isinstance(repo_slug, RepoSlug):slug_str = repo_slug.fullelse:slug_str = repo_slugif file_path.startswith(''):file_path = file_path.lstrip('')template = ''return template.format(slug=slug_str,git_ref=git_ref,path=file_path)", "docstring": "Make a raw content (raw.githubusercontent.com) URL to a file.\n\n Parameters\n ----------\n repo_slug : `str` or `RepoSlug`\n The repository slug, formatted as either a `str` (``'owner/name'``)\n or a `RepoSlug` object (created by `parse_repo_slug_from_url`).\n git_ref : `str`\n The git ref: a branch name, commit hash, or tag name.\n file_path : `str`\n The POSIX path of the file in the repository tree.", "id": "f4215:m1"} {"signature": "def normalize_repo_root_url(url):", "body": "if url.endswith(''):url = url[:-]return url", "docstring": "Normalize a GitHub URL into the root repository URL.\n\n Parameters\n ----------\n url : `str`\n A GitHub URL\n\n Returns\n -------\n url : `str`\n Normalized URL of a GitHub repository.\n\n Examples\n --------\n >>> normalize_repo_root_url('https://github.com/lsst/LDM-151.git')\n 'https://github.com/lsst/LDM-151'", "id": "f4215:m2"} {"signature": "async def process_ltd_doc_products(session, product_urls, github_api_token,mongo_collection=None):", "body": "tasks = [asyncio.ensure_future(process_ltd_doc(session, github_api_token,product_url,mongo_collection=mongo_collection))for product_url in product_urls]await asyncio.gather(*tasks)", "docstring": "Run a pipeline to process extract, transform, and load metadata for\n multiple LSST the Docs-hosted projects\n\n Parameters\n ----------\n session : `aiohttp.ClientSession`\n Your application's aiohttp client session.\n See http://aiohttp.readthedocs.io/en/stable/client.html.\n product_urls : `list` of `str`\n List of LSST the Docs product URLs.\n github_api_token : `str`\n A GitHub personal API token. See the `GitHub personal access token\n guide`_.\n mongo_collection : `motor.motor_asyncio.AsyncIOMotorCollection`, optional\n MongoDB collection. This should be the common MongoDB collection for\n LSST projectmeta JSON-LD records.", "id": "f4218:m3"} {"signature": "def main():", "body": "parser = argparse.ArgumentParser(description='''''''')parser.add_argument('',dest='',help='''''')parser.add_argument('',help='')parser.add_argument('',help='''''')parser.add_argument('',default='',help='')parser.add_argument('',default='',help='')args = parser.parse_args()stream_handler = logging.StreamHandler()stream_formatter = logging.Formatter('')stream_handler.setFormatter(stream_formatter)root_logger = logging.getLogger()root_logger.addHandler(stream_handler)root_logger.setLevel(logging.WARNING)app_logger = logging.getLogger('')app_logger.setLevel(logging.DEBUG)if args.mongodb_uri is not None:mongo_client = AsyncIOMotorClient(args.mongodb_uri, ssl=True)collection = mongo_client[args.mongodb_db][args.mongodb_collection]else:collection = Noneloop = asyncio.get_event_loop()if args.ltd_product_url is not None:loop.run_until_complete(run_single_ltd_doc(args.ltd_product_url,args.github_token,collection))else:loop.run_until_complete(run_bulk_etl(args.github_token,collection))", "docstring": "Command line entrypoint to reduce technote metadata.", "id": "f4218:m0"} {"signature": "def _encode_datetime(self, dt):", "body": "if dt.tzinfo is None:dt = dt.replace(tzinfo=datetime.timezone.utc)dt = dt.astimezone(datetime.timezone.utc)return dt.strftime('')", "docstring": "Encode a datetime in the format '%Y-%m-%dT%H:%M:%SZ'.\n\n The datetime can be naieve (doesn't have timezone info) or aware\n (it does have a tzinfo attribute set). Regardless, the datetime\n is transformed into UTC.", "id": "f4219:c0:m1"} {"signature": "def decode_jsonld(jsonld_text):", "body": "decoder = json.JSONDecoder(object_pairs_hook=_decode_object_pairs)return decoder.decode(jsonld_text)", "docstring": "Decode a JSON-LD dataset, including decoding datetime\n strings into `datetime.datetime` objects.\n\n Parameters\n ----------\n encoded_dataset : `str`\n The JSON-LD dataset encoded as a string.\n\n Returns\n -------\n jsonld_dataset : `dict`\n A JSON-LD dataset.\n\n Examples\n --------\n\n >>> doc = '{\"dt\": \"2018-01-01T12:00:00Z\"}'\n >>> decode_jsonld(doc)\n {'dt': datetime.datetime(2018, 1, 1, 12, 0, tzinfo=datetime.timezone.utc)}", "id": "f4219:m1"} {"signature": "def default(self, obj):", "body": "if isinstance(obj, datetime.datetime):return self._encode_datetime(obj)return json.JSONEncoder.default(self, obj)", "docstring": "Encode values as JSON strings.\n\n This method overrides the default implementation from\n `json.JSONEncoder`.", "id": "f4219:c0:m0"} {"signature": "def reduce_technote_metadata(github_url, metadata, github_data,ltd_product_data):", "body": "repo_slug = parse_repo_slug_from_url(github_url)jsonld = {'': [\"\"\"\",\"\"],'': ['', ''],'': github_url}if '' in metadata:url = metadata['']elif '' in ltd_product_data:url = ltd_product_data['']else:raise RuntimeError(''''.format(github_url))jsonld[''] = urljsonld[''] = urlif '' in metadata and '' in metadata:jsonld[''] = ''.format(**metadata)else:raise RuntimeError(''.format(github_url))if '' in metadata:jsonld[''] = metadata['']if '' in metadata:jsonld[''] = metadata['']if '' in metadata:jsonld[''] = [{'': '', '': author_name}for author_name in metadata['']]if '' in metadata:jsonld[''] = datetime.datetime.strptime(metadata[''],'')else:try:_repo_data = github_data['']['']_master_data = _repo_data['']jsonld[''] = datetime.datetime.strptime(_master_data[''][''],'')except KeyError:passtry:_license_data = github_data['']['']['']_spdxId = _license_data['']if _spdxId is not None:_spdx_url = ''.format(_spdxId)jsonld[''] = _spdx_urlexcept KeyError:passtry:_master_data = github_data['']['']['']_files = _master_data['']['']['']for _node in _files:filename = _node['']normalized_filename = filename.lower()if normalized_filename.startswith(''):readme_url = make_raw_content_url(repo_slug, '',filename)jsonld[''] = readme_urlbreakexcept KeyError:passtravis_url = ''.format(repo_slug.full)jsonld[''] = travis_urlreturn jsonld", "docstring": "Reduce a technote project's metadata from multiple sources into a\n single JSON-LD resource.\n\n Parameters\n ----------\n github_url : `str`\n URL of the technote's GitHub repository.\n metadata : `dict`\n The parsed contents of ``metadata.yaml`` found in a technote's\n repository.\n github_data : `dict`\n The contents of the ``technote_repo`` GitHub GraphQL API query.\n ltd_product_data : `dict`\n JSON dataset for the technote corresponding to the\n ``/products/`` of LTD Keeper.\n\n Returns\n -------\n metadata : `dict`\n JSON-LD-formatted dictionary.\n\n .. `GitHub personal access token guide`: https://ls.st/41d", "id": "f4221:m1"} {"signature": "async def _upload_to_mongodb(collection, jsonld):", "body": "document = {'': jsonld}query = {'': jsonld['']}await collection.update(query, document, upsert=True, multi=False)", "docstring": "Upsert the technote resource into the projectmeta MongoDB collection.\n\n Parameters\n ----------\n collection : `motor.motor_asyncio.AsyncIOMotorCollection`\n The MongoDB collection.\n jsonld : `dict`\n The JSON-LD document that represents the document resource.", "id": "f4222:m1"} {"signature": "def read_git_commit_timestamp(repo_path=None, repo=None):", "body": "if repo is None:repo = git.repo.base.Repo(path=repo_path,search_parent_directories=True)head_commit = repo.head.commitreturn head_commit.committed_datetime", "docstring": "Obtain the timestamp from the current head commit of a Git repository.\n\n Parameters\n ----------\n repo_path : `str`, optional\n Path to the Git repository. Leave as `None` to use the current working\n directory.\n\n Returns\n -------\n commit_timestamp : `datetime.datetime`\n The datetime of the head commit.", "id": "f4223:m0"} {"signature": "def read_git_commit_timestamp_for_file(filepath, repo_path=None, repo=None):", "body": "logger = logging.getLogger(__name__)if repo is None:repo = git.repo.base.Repo(path=repo_path,search_parent_directories=True)repo_path = repo.working_tree_dirhead_commit = repo.head.commitlogger.debug('', repo_path)filepath = os.path.relpath(os.path.abspath(filepath),start=repo_path)logger.debug('', filepath)for commit in head_commit.iter_items(repo,head_commit,[filepath],skip=):return commit.committed_datetimeraise IOError(''.format(filepath))", "docstring": "Obtain the timestamp for the most recent commit to a given file in a\n Git repository.\n\n Parameters\n ----------\n filepath : `str`\n Absolute or repository-relative path for a file.\n repo_path : `str`, optional\n Path to the Git repository. Leave as `None` to use the current working\n directory or if a ``repo`` argument is provided.\n repo : `git.Repo`, optional\n A `git.Repo` instance.\n\n Returns\n -------\n commit_timestamp : `datetime.datetime`\n The datetime of the most recent commit to the given file.\n\n Raises\n ------\n IOError\n Raised if the ``filepath`` does not exist in the Git repository.", "id": "f4223:m1"} {"signature": "def __init__(self, path_to_tagger):", "body": "self._path_to_tagger = path_to_taggerself._dir_to_tagger = os.path.dirname(path_to_tagger)self._tagger = subprocess.Popen(''+os.path.basename(path_to_tagger),cwd=self._dir_to_tagger,stdin=subprocess.PIPE, stdout=subprocess.PIPE)", "docstring": "Arguments:\n- `path_to_tagger`:", "id": "f4225:c0:m0"} {"signature": "def count(s, limit=):", "body": "return _gen(parse(s), limit, count=True)", "docstring": "Counts all matching strings to a given regular expression\n\n :param s: Regular expression\n :type s: str\n :param limit: Range limit\n :type limit: int\n :rtype: int\n :returns: number of matching strings", "id": "f4227:m14"} {"signature": "def _gen(d, limit=, count=False, grouprefs=None):", "body": "if grouprefs is None:grouprefs = {}ret = ['']strings = literal = Falsefor i in d:if i[] == sre_parse.IN:subs = _in(i[])if count:strings = (strings or ) * len(subs)ret = comb(ret, subs)elif i[] == sre_parse.LITERAL:literal = Trueret = mappend(ret, unichr(i[]))elif i[] == sre_parse.CATEGORY:subs = CATEGORIES.get(i[], [''])if count:strings = (strings or ) * len(subs)ret = comb(ret, subs)elif i[] == sre_parse.ANY:subs = CATEGORIES['']if count:strings = (strings or ) * len(subs)ret = comb(ret, subs)elif i[] == sre_parse.MAX_REPEAT or i[] == sre_parse.MIN_REPEAT:items = list(i[][])if i[][] + - i[][] >= limit:r1 = i[][]r2 = i[][] + limitelse:r1 = i[][]r2 = i[][] + ran = range(r1, r2)if count:branch_count = for p in ran:branch_count += pow(_gen(items, limit, True, grouprefs), p)strings = (strings or ) * branch_countret = prods(ret, ran, items, limit, grouprefs)elif i[] == sre_parse.BRANCH:if count:for x in i[][]:strings += _gen(x, limit, True, grouprefs) or ret = concit(ret, i[][], limit, grouprefs)elif i[] == sre_parse.SUBPATTERN or i[] == sre_parse.ASSERT:subexpr = i[][]if IS_PY36_OR_GREATER and i[] == sre_parse.SUBPATTERN:subexpr = i[][]if count:strings = (strings or ) * (sum(ggen([], _gen, subexpr, limit=limit, count=True, grouprefs=grouprefs)) or )ret = ggen(ret, _gen, subexpr, limit=limit, count=False, grouprefs=grouprefs, groupref=i[][])elif i[] == sre_parse.AT:continueelif i[] == sre_parse.NOT_LITERAL:subs = list(CATEGORIES[''])if unichr(i[]) in subs:subs.remove(unichr(i[]))if count:strings = (strings or ) * len(subs)ret = comb(ret, subs)elif i[] == sre_parse.GROUPREF:ret = dappend(ret, grouprefs, i[])elif i[] == sre_parse.ASSERT_NOT:passelse:print('' + repr(i))if count:if strings == and literal:inc = Truefor i in d:if i[] not in (sre_parse.AT, sre_parse.LITERAL):inc = Falseif inc:strings = return stringsreturn ret", "docstring": "docstring for _gen", "id": "f4227:m8"} {"signature": "def generate(s, limit=):", "body": "return _gen(parse(s), limit)", "docstring": "Creates a generator that generates all matching strings to a given regular expression\n\n :param s: Regular expression\n :type s: str\n :param limit: Range limit\n :type limit: int\n :returns: string generator object", "id": "f4227:m13"} {"signature": "def sre_to_string(sre_obj, paren=True):", "body": "ret = u''for i in sre_obj:if i[] == sre_parse.IN:prefix = ''if len(i[]) and i[][][] == sre_parse.NEGATE:prefix = ''ret += u''.format(prefix, sre_to_string(i[], paren=paren))elif i[] == sre_parse.LITERAL:u = unichr(i[])ret += u if u not in sre_parse.SPECIAL_CHARS else ''.format(u)elif i[] == sre_parse.CATEGORY:ret += REVERSE_CATEGORIES[i[]]elif i[] == sre_parse.ANY:ret += ''elif i[] == sre_parse.BRANCH:parts = [sre_to_string(x, paren=paren) for x in i[][]]if not any(parts):continueif i[][]:if len(parts) == :paren = Falseprefix = ''else:prefix = ''branch = ''.join(parts)if paren:ret += ''.format(prefix, branch)else:ret += ''.format(branch)elif i[] == sre_parse.SUBPATTERN:subexpr = i[][]if IS_PY36_OR_GREATER and i[] == sre_parse.SUBPATTERN:subexpr = i[][]if i[][]:ret += ''.format(sre_to_string(subexpr, paren=False))else:ret += ''.format(sre_to_string(subexpr, paren=paren))elif i[] == sre_parse.NOT_LITERAL:ret += ''.format(unichr(i[]))elif i[] == sre_parse.MAX_REPEAT:if i[][] == i[][]:range_str = ''.format(i[][])else:if i[][] == and i[][] - i[][] == sre_parse.MAXREPEAT:range_str = ''elif i[][] == and i[][] - i[][] == sre_parse.MAXREPEAT - :range_str = ''else:range_str = ''.format(i[][], i[][])ret += sre_to_string(i[][], paren=paren) + range_strelif i[] == sre_parse.MIN_REPEAT:if i[][] == and i[][] == sre_parse.MAXREPEAT:range_str = ''elif i[][] == and i[][] == sre_parse.MAXREPEAT:range_str = ''elif i[][] == sre_parse.MAXREPEAT:range_str = ''.format(i[][])else:range_str = ''.format(i[][], i[][])ret += sre_to_string(i[][], paren=paren) + range_strelif i[] == sre_parse.GROUPREF:ret += ''.format(i[])elif i[] == sre_parse.AT:if i[] == sre_parse.AT_BEGINNING:ret += ''elif i[] == sre_parse.AT_END:ret += ''elif i[] == sre_parse.NEGATE:passelif i[] == sre_parse.RANGE:ret += ''.format(unichr(i[][]), unichr(i[][]))elif i[] == sre_parse.ASSERT:if i[][]:ret += ''.format(sre_to_string(i[][], paren=False))else:ret += ''.format(sre_to_string(i[][], paren=paren))elif i[] == sre_parse.ASSERT_NOT:passelse:print('' % str(i))return ret", "docstring": "sre_parse object to string\n\n :param sre_obj: Output of sre_parse.parse()\n :type sre_obj: list\n :rtype: str", "id": "f4227:m10"} {"signature": "def _update_settings(self, new_settings, enforce_helpstring=True):", "body": "for raw_setting_name, value in six.iteritems(new_settings):setting_name = raw_setting_name.replace(\"\", \"\")setting_already_exists = setting_name in self._instance_settingsvalue_is_list_len_2 = isinstance(value, list) and len(value) == treat_as_tuple = not setting_already_exists and value_is_list_len_2if isinstance(value, tuple) or treat_as_tuple:self._instance_settings[setting_name] = valueelse:if setting_name not in self._instance_settings:if enforce_helpstring:msg = \"\"raise InternalCashewException(msg % setting_name)else:self._instance_settings[setting_name] = ('', value,)else:orig = self._instance_settings[setting_name]self._instance_settings[setting_name] = (orig[], value,)", "docstring": "This method does the work of updating settings. Can be passed with\nenforce_helpstring = False which you may want if allowing end users to\nadd arbitrary metadata via the settings system.\n\nPreferable to use update_settings (without leading _) in code to do the\nright thing and always have docstrings.", "id": "f4241:c0:m11"} {"signature": "def standardize_alias_or_aliases(cls, alias_or_aliases):", "body": "if isinstance(alias_or_aliases, string_types):return [alias_or_aliases]else:return alias_or_aliases", "docstring": "Make sure we don't attempt to iterate over an alias string thinking\nit's an array.", "id": "f4241:c1:m4"} {"signature": "def settings_and_attributes(self):", "body": "attrs = self.setting_values()attrs.update(self.__dict__)skip = [\"\", \"\"]for a in skip:del attrs[a]return attrs", "docstring": "Return a combined dictionary of setting values and attribute values.", "id": "f4241:c0:m12"} {"signature": "def setting_values(self, skip=None):", "body": "if not skip:skip = []return dict((k, v[])for k, v in six.iteritems(self._instance_settings)if not k in skip)", "docstring": "Returns dict of all setting values (removes the helpstrings).", "id": "f4241:c0:m9"} {"signature": "def check_docstring(cls):", "body": "docstring = inspect.getdoc(cls)if not docstring:breadcrumbs = \"\".join(t.__name__ for t in inspect.getmro(cls)[:-][::-])msg = \"\"args = (cls.__name__, breadcrumbs, cls.__module__)raise InternalCashewException(msg % args)max_line_length = cls._class_settings.get('')if max_line_length:for i, line in enumerate(docstring.splitlines()):if len(line) > max_line_length:msg = \"\" args = (i, cls.__name__, len(line) - max_line_length)raise Exception(msg % args)return docstring", "docstring": "Asserts that the class has a docstring, returning it if successful.", "id": "f4241:c1:m7"} {"signature": "def expect(self, bytes, stream=None):", "body": "if stream is None:stream = self.std_out", "docstring": "Block until given bytes appear in the stream.", "id": "f4246:c1:m6"} {"signature": "@propertydef pid(self):", "body": "return self._process.pid", "docstring": "The process' PID.", "id": "f4246:c1:m4"} {"signature": "def block(self):", "body": "self._status_code = self._process.wait()", "docstring": "Blocks until command finishes. Returns Response instance.", "id": "f4246:c1:m8"} {"signature": "def run(command, data=None, timeout=None, kill_timeout=None, env=None, cwd=None):", "body": "command = expand_args(command)history = []for c in command:if len(history):data = history[-].std_out[:*]cmd = Command(c)try:out, err = cmd.run(data, timeout, kill_timeout, env, cwd)status_code = cmd.returncodeexcept OSError as e:out, err = '', u\"\".join([e.strerror, traceback.format_exc()])status_code = r = Response(process=cmd)r.command = cr.std_out = outr.std_err = errr.status_code = status_codehistory.append(r)r = history.pop()r.history = historyreturn r", "docstring": "Executes a given commmand and returns Response.\n\n Blocks until process is complete, or timeout is reached.", "id": "f4246:m4"} {"signature": "def send(self, str, end=''):", "body": "return self._process.stdin.write(str+end)", "docstring": "Sends a line to std_in.", "id": "f4246:c1:m7"} {"signature": "def user_line(self, frame):", "body": "self.get_stack_data(frame, None, '')", "docstring": "This function is called when we stop or break at this line.", "id": "f4257:c0:m3"} {"signature": "def user_return(self, frame, return_value):", "body": "self.get_stack_data(frame, None, '')", "docstring": "This function is called when a return trap is set here.", "id": "f4257:c0:m4"} {"signature": "def get_type_info(obj):", "body": "if isinstance(obj, primitive_types):return ('', type(obj).__name__)if isinstance(obj, sequence_types):return ('', type(obj).__name__)if isinstance(obj, array_types):return ('', type(obj).__name__)if isinstance(obj, key_value_types):return ('', type(obj).__name__)if isinstance(obj, types.ModuleType):return ('', type(obj).__name__)if isinstance(obj, (types.FunctionType, types.MethodType)):return ('', type(obj).__name__)if isinstance(obj, type):if hasattr(obj, ''):return ('', obj.__name__)if isinstance(type(obj), type):if hasattr(obj, ''):cls_name = type(obj).__name__if cls_name == '':cls_name = obj.__name__return ('', ''.format(cls_name))if cls_name == '':cls_name = obj.__class__.__name__return ('', ''.format(cls_name))return ('', type(obj).__name__)", "docstring": "Get type information for a Python object\n\n Args:\n obj: The Python object\n\n Returns:\n tuple: (object type \"catagory\", object type name)", "id": "f4259:m3"} {"signature": "@contextmanagerdef redirect_stdout(new_stdout):", "body": "old_stdout, sys.stdout = sys.stdout, new_stdouttry:yield Nonefinally:sys.stdout = old_stdout", "docstring": "Redirect the stdout\n\n Args:\n new_stdout (io.StringIO): New stdout to use instead", "id": "f4259:m1"} {"signature": "def filter_dict(d, exclude):", "body": "ret = {}for key, value in d.items():if key not in exclude:ret.update({key: value})return ret", "docstring": "Return a new dict with specified keys excluded from the origional dict\n\n Args:\n d (dict): origional dict\n exclude (list): The keys that are excluded", "id": "f4259:m0"} {"signature": "def which(program):", "body": "if os.path.split(program)[]:program_path = find_exe(program)if program_path:return program_pathelse:for path in get_path_list():program_path = find_exe(os.path.join(path, program))if program_path:return program_pathreturn None", "docstring": "Identify the location of an executable file.", "id": "f4266:m0"} {"signature": "def print_unicode(text):", "body": "if sys.version_info[] < :text = text.encode('')print(text)", "docstring": "Print in a portable manner.", "id": "f4269:m3"} {"signature": "@classmethoddef _get_languages(cls) -> set:", "body": "cls._start_server_if_needed()url = urllib.parse.urljoin(cls._url, '')languages = set()for e in cls._get_root(url, num_tries=):languages.add(e.get(''))languages.add(e.get(''))return languages", "docstring": "Get supported languages (by querying the server).", "id": "f4271:c5:m14"} {"signature": "def correct(self, text: str, srctext=None) -> str:", "body": "return correct(text, self.check(text, srctext))", "docstring": "Automatically apply suggestions to the text.", "id": "f4271:c5:m11"} {"signature": "def disable_spellchecking(self):", "body": "self.disabled.update(self._spell_checking_rules)", "docstring": "Disable spell-checking rules.", "id": "f4271:c5:m13"} {"signature": "@classmethoddef _get_attrib(cls):", "body": "cls._start_server_if_needed()params = {'': FAILSAFE_LANGUAGE, '': ''}data = urllib.parse.urlencode(params).encode()root = cls._get_root(cls._url, data, num_tries=)return root.attrib", "docstring": "Get matches element attributes.", "id": "f4271:c5:m15"} {"signature": "def get_locale_language():", "body": "return locale.getlocale()[] or locale.getdefaultlocale()[]", "docstring": "Get the language code for the current locale setting.", "id": "f4271:m12"} {"signature": "def get_languages() -> set:", "body": "try:languages = cache['']except KeyError:languages = LanguageTool._get_languages()cache[''] = languagesreturn languages", "docstring": "Get supported languages.", "id": "f4271:m7"} {"signature": "def _check_api(self, text: str, srctext=None) -> bytes:", "body": "root = self._get_root(self._url, self._encode(text, srctext))return (b'' +ElementTree.tostring(root) + b\"\")", "docstring": "Match text against enabled rules (result in XML format).", "id": "f4271:c5:m9"} {"signature": "def set_directory(path=None):", "body": "old_path = get_directory()terminate_server()cache.clear()if path:cache[''] = pathtry:get_jar_info()except Error:cache[''] = old_pathraise", "docstring": "Set LanguageTool directory.", "id": "f4271:m9"} {"signature": "def which(program, win_allow_cross_arch=True):", "body": "def is_exe(path):return os.path.isfile(path) and os.access(path, os.X_OK)def _get_path_list():return os.environ[''].split(os.pathsep)if os.name == '':def find_exe(program):root, ext = os.path.splitext(program)if ext:if is_exe(program):return programelse:for ext in os.environ[''].split(os.pathsep):program_path = root + ext.lower()if is_exe(program_path):return program_pathreturn Nonedef get_path_list():paths = _get_path_list()if win_allow_cross_arch:alt_sys_path = os.path.expandvars(r\"\")if os.path.isdir(alt_sys_path):paths.insert(, alt_sys_path)else:alt_sys_path = os.path.expandvars(r\"\")if os.path.isdir(alt_sys_path):paths.append(alt_sys_path)return pathselse:def find_exe(program):return program if is_exe(program) else Noneget_path_list = _get_path_listif os.path.split(program)[]:program_path = find_exe(program)if program_path:return program_pathelse:for path in get_path_list():program_path = find_exe(os.path.join(path, program))if program_path:return program_pathreturn None", "docstring": "Identify the location of an executable file.", "id": "f4275:m1"} {"signature": "def default_hook(config):", "body": "if (any(arg.startswith('') for arg in sys.argv) andos.path.isdir(PY2K_DIR) != IS_PY2K and os.path.isdir(LIB_DIR)):shutil.rmtree(LIB_DIR)if IS_PY2K and any(arg.startswith(('', '', '', ''))for arg in sys.argv):generate_py2k(config)packages_root = get_cfg_value(config, '', '')packages_root = os.path.join(PY2K_DIR, packages_root)set_cfg_value(config, '', '', packages_root)", "docstring": "Default setup hook.", "id": "f4275:m16"} {"signature": "def split_elements(value):", "body": "items = [v.strip() for v in value.split('')]if len(items) == :items = value.split()return items", "docstring": "Split a string with comma or space-separated elements into a list.", "id": "f4275:m3"} {"signature": "def run_3to2(args=None):", "body": "args = BASE_ARGS_3TO2 if args is None else BASE_ARGS_3TO2 + argstry:proc = subprocess.Popen([''] + args, stderr=subprocess.PIPE)except OSError:for path in glob.glob(''):if os.path.isdir(path) and path not in sys.path:sys.path.append(path)try:from lib3to2.main import main as lib3to2_mainexcept ImportError:raise OSError('')else:if lib3to2_main('', args):raise Exception('')else:num_errors = while proc.poll() is None:line = proc.stderr.readline()sys.stderr.write(line)num_errors += line.count('')if proc.returncode or num_errors:raise Exception('')", "docstring": "Convert Python files using lib3to2.", "id": "f4275:m11"} {"signature": "def split_multiline(value):", "body": "return [element for element in (line.strip() for line in value.split(''))if element]", "docstring": "Split a multiline string into a list, excluding blank lines.", "id": "f4275:m2"} {"signature": "def write_py2k_header(file_list):", "body": "if not isinstance(file_list, list):file_list = [file_list]python_re = re.compile(br\"\")coding_re = re.compile(br\"\")new_line_re = re.compile(br\"\")version_3 = LooseVersion('')for file in file_list:if not os.path.getsize(file):continuerewrite_needed = Falsepython_found = Falsecoding_found = Falselines = []f = open(file, '')try:while len(lines) < :line = f.readline()match = python_re.match(line)if match:python_found = Trueversion = LooseVersion(match.group().decode() or '')try:version_test = version >= version_3except TypeError:version_test = Trueif version_test:line = python_re.sub(br\"\", line)rewrite_needed = Trueelif coding_re.search(line):coding_found = Truelines.append(line)if not coding_found:match = new_line_re.search(lines[])newline = match.group() if match else b\"\"line = b\"\" + newlinelines.insert( if python_found else , line)rewrite_needed = Trueif rewrite_needed:lines += f.readlines()finally:f.close()if rewrite_needed:f = open(file, '')try:f.writelines(lines)finally:f.close()", "docstring": "Write Python 2 shebang and add encoding cookie if needed.", "id": "f4275:m12"} {"signature": "def download_file_insecure(url, target):", "body": "try:from urllib.request import urlopenexcept ImportError:from urllib2 import urlopensrc = dst = Nonetry:src = urlopen(url)data = src.read()dst = open(target, \"\")dst.write(data)finally:if src:src.close()if dst:dst.close()", "docstring": "Use Python to download the file, even though it cannot authenticate the\nconnection.", "id": "f4276:m14"} {"signature": "def download_file_powershell(url, target):", "body": "target = os.path.abspath(target)cmd = ['','',\"\" % vars(),]_clean_check(cmd, target)", "docstring": "Download the file at url to target using Powershell (which will validate\ntrust). Raise an exception if the command cannot complete.", "id": "f4276:m8"} {"signature": "def main():", "body": "options = _parse_args()archive = download_setuptools(version=options.version,download_base=options.download_base,downloader_factory=options.downloader_factory,)return _install(archive, _build_install_args(options))", "docstring": "Install or upgrade setuptools and EasyInstall", "id": "f4276:m19"} {"signature": "def _parse_args():", "body": "parser = optparse.OptionParser()parser.add_option('', dest='', action='', default=False,help='')parser.add_option('', dest='', metavar=\"\",default=DEFAULT_URL,help='')parser.add_option('', dest='', action='',const=lambda: download_file_insecure, default=get_best_downloader,help='')parser.add_option('', help=\"\",default=DEFAULT_VERSION,)options, args = parser.parse_args()return options", "docstring": "Parse the command line for options", "id": "f4276:m18"} {"signature": "def _build_install_args(options):", "body": "return [''] if options.user_install else []", "docstring": "Build the arguments to 'python setup.py install' on the setuptools package", "id": "f4276:m17"} {"signature": "def make_aware(value, timezone):", "body": "if hasattr(timezone, '') and value not in (datetime.datetime.min, datetime.datetime.max):return timezone.localize(value, is_dst=None)else:return value.replace(tzinfo=timezone)", "docstring": "Makes a naive datetime.datetime in a given time zone aware.", "id": "f4281:m3"} {"signature": "def includes(self, query):", "body": "query = self.to_timezone(query)return any(self.intervals(range_start=query, range_end=query))", "docstring": "Does this schedule include the provided time?\n query should be a datetime (naive or timezone-aware)", "id": "f4285:c0:m6"} {"signature": "def intervals(self, range_start=datetime.datetime.min, range_end=datetime.datetime.max):", "body": "current_period = Nonemax_continuous_days = range_start = self.to_timezone(range_start)range_end = self.to_timezone(range_end)for period in self._daily_periods(range_start.date(), range_end.date()):if period.end < range_start or period.start > range_end:continueif current_period is None:current_period = periodelse:if ( ((period.start < current_period.end)or (period.start - current_period.end) <= datetime.timedelta(minutes=))and (current_period.end - current_period.start) < datetime.timedelta(days=max_continuous_days)):current_period = Period(current_period.start, period.end)else:yield current_periodcurrent_period = periodif current_period:yield current_period", "docstring": "Returns an iterator of Period tuples for continuous stretches of time during\n which this event is in effect, between range_start and range_end.", "id": "f4285:c2:m5"} {"signature": "def includes(self, query_date, query_time=None):", "body": "if self.start_date and query_date < self.start_date:return Falseif self.end_date and query_date > self.end_date:return Falseif query_date.weekday() not in self.weekdays:return Falseif not query_time:return Trueif query_time >= self.period.start and query_time <= self.period.end:return Truereturn False", "docstring": "Does this schedule include the provided time?\n query_date and query_time are date and time objects, interpreted\n in this schedule's timezone", "id": "f4285:c3:m1"} {"signature": "def next_interval(self, after=None):", "body": "if after is None:after = timezone.now()after = self.to_timezone(after)return next(self.intervals(range_start=after), None)", "docstring": "Returns the next Period this event is in effect, or None if the event\n has no remaining periods.", "id": "f4285:c0:m4"} {"signature": "@property@memoize_methoddef exceptions(self):", "body": "ex = {}for sd in self.root.xpath(''):bits = str(sd.text).split('')date = text_to_date(bits.pop())ex.setdefault(date, []).extend([_time_text_to_period(t)for t in bits])return ex", "docstring": "A dict of dates -> [Period time tuples] representing exceptions\n to the base recurrence pattern.", "id": "f4285:c2:m1"} {"signature": "@staticmethoddef from_element(root, timezone):", "body": "assert root.tag == ''if root.xpath(''):return _ScheduleIntervals(root, timezone)elif root.xpath(''):return _ScheduleRecurring(root, timezone)raise NotImplementedError", "docstring": "Return a Schedule object based on an lxml Element for the \n tag. timezone is a tzinfo object, ideally from pytz.", "id": "f4285:c0:m1"} {"signature": "def to_timezone(self, dt):", "body": "if timezone.is_aware(dt):return dt.astimezone(self.timezone)else:return timezone.make_aware(dt, self.timezone)", "docstring": "Converts a datetime to the timezone of this Schedule.", "id": "f4285:c0:m2"} {"signature": "@property@memoize_methoddef period(self):", "body": "start_time = self.root.findtext('')if start_time:return Period(text_to_time(start_time), text_to_time(self.root.findtext('')))return Period(datetime.time(, ), datetime.time(, ))", "docstring": "A Period tuple representing the daily start and end time.", "id": "f4285:c3:m3"} {"signature": "def _daily_periods(self, range_start, range_end):", "body": "specific = set(self.exceptions.keys())return heapq.merge(self.exception_periods(range_start, range_end), *[sched.daily_periods(range_start=range_start, range_end=range_end, exclude_dates=specific)for sched in self._recurring_schedules])", "docstring": "Returns an iterator of Period tuples for every day this event is in effect, between range_start\n and range_end.", "id": "f4285:c2:m4"} {"signature": "def _tmdd_datetime_to_iso(dt, include_offset=True, include_seconds=True):", "body": "datestring = dt.findtext('')timestring = dt.findtext('')assert len(datestring) == assert len(timestring) >= iso = datestring[:] + '' + datestring[:] + '' + datestring[:] + ''+ timestring[:] + '' + timestring[:]if include_seconds:iso += '' + timestring[:]if include_offset:offset = dt.findtext('')if offset:assert len(offset) == iso += offset[:] + '' + offset[:]else:raise Exception(\"\" % etree.tostring(dt))return iso", "docstring": "dt is an xml Element with ,