partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
sequencelengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
sequencelengths
19
28.4k
sha
stringlengths
40
40
valid
_DoCopyFile
:param unicode source_filename: The source filename. Schemas: local, ftp, http :param unicode target_filename: Target filename. Schemas: local, ftp :param copy_symlink: @see _CopyFileLocal :raises FileNotFoundError: If source_filename does not exist
zerotk/easyfs/_easyfs.py
def _DoCopyFile(source_filename, target_filename, copy_symlink=True): ''' :param unicode source_filename: The source filename. Schemas: local, ftp, http :param unicode target_filename: Target filename. Schemas: local, ftp :param copy_symlink: @see _CopyFileLocal :raises FileNotFoundError: If source_filename does not exist ''' from six.moves.urllib.parse import urlparse source_url = urlparse(source_filename) target_url = urlparse(target_filename) if _UrlIsLocal(source_url): if not Exists(source_filename): from ._exceptions import FileNotFoundError raise FileNotFoundError(source_filename) if _UrlIsLocal(target_url): # local to local _CopyFileLocal(source_filename, target_filename, copy_symlink=copy_symlink) elif target_url.scheme in ['ftp']: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) elif source_url.scheme in ['http', 'https', 'ftp']: if _UrlIsLocal(target_url): # HTTP/FTP to local from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: # HTTP/FTP to other ==> NotImplemented from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol # @Reimport raise NotImplementedProtocol(source_url.scheme)
def _DoCopyFile(source_filename, target_filename, copy_symlink=True): ''' :param unicode source_filename: The source filename. Schemas: local, ftp, http :param unicode target_filename: Target filename. Schemas: local, ftp :param copy_symlink: @see _CopyFileLocal :raises FileNotFoundError: If source_filename does not exist ''' from six.moves.urllib.parse import urlparse source_url = urlparse(source_filename) target_url = urlparse(target_filename) if _UrlIsLocal(source_url): if not Exists(source_filename): from ._exceptions import FileNotFoundError raise FileNotFoundError(source_filename) if _UrlIsLocal(target_url): # local to local _CopyFileLocal(source_filename, target_filename, copy_symlink=copy_symlink) elif target_url.scheme in ['ftp']: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) elif source_url.scheme in ['http', 'https', 'ftp']: if _UrlIsLocal(target_url): # HTTP/FTP to local from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: # HTTP/FTP to other ==> NotImplemented from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol # @Reimport raise NotImplementedProtocol(source_url.scheme)
[ ":", "param", "unicode", "source_filename", ":", "The", "source", "filename", ".", "Schemas", ":", "local", "ftp", "http" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L302-L349
[ "def", "_DoCopyFile", "(", "source_filename", ",", "target_filename", ",", "copy_symlink", "=", "True", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "source_url", "=", "urlparse", "(", "source_filename", ")", "target_url", "=", "urlparse", "(", "target_filename", ")", "if", "_UrlIsLocal", "(", "source_url", ")", ":", "if", "not", "Exists", "(", "source_filename", ")", ":", "from", ".", "_exceptions", "import", "FileNotFoundError", "raise", "FileNotFoundError", "(", "source_filename", ")", "if", "_UrlIsLocal", "(", "target_url", ")", ":", "# local to local", "_CopyFileLocal", "(", "source_filename", ",", "target_filename", ",", "copy_symlink", "=", "copy_symlink", ")", "elif", "target_url", ".", "scheme", "in", "[", "'ftp'", "]", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "elif", "source_url", ".", "scheme", "in", "[", "'http'", ",", "'https'", ",", "'ftp'", "]", ":", "if", "_UrlIsLocal", "(", "target_url", ")", ":", "# HTTP/FTP to local", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "else", ":", "# HTTP/FTP to other ==> NotImplemented", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "# @Reimport", "raise", "NotImplementedProtocol", "(", "source_url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
_CopyFileLocal
Copy a file locally to a directory. :param unicode source_filename: The filename to copy from. :param unicode target_filename: The filename to copy to. :param bool copy_symlink: If True and source_filename is a symlink, target_filename will also be created as a symlink. If False, the file being linked will be copied instead.
zerotk/easyfs/_easyfs.py
def _CopyFileLocal(source_filename, target_filename, copy_symlink=True): ''' Copy a file locally to a directory. :param unicode source_filename: The filename to copy from. :param unicode target_filename: The filename to copy to. :param bool copy_symlink: If True and source_filename is a symlink, target_filename will also be created as a symlink. If False, the file being linked will be copied instead. ''' import shutil try: # >>> Create the target_filename directory if necessary dir_name = os.path.dirname(target_filename) if dir_name and not os.path.isdir(dir_name): os.makedirs(dir_name) if copy_symlink and IsLink(source_filename): # >>> Delete the target_filename if it already exists if os.path.isfile(target_filename) or IsLink(target_filename): DeleteFile(target_filename) # >>> Obtain the relative path from link to source_filename (linkto) source_filename = ReadLink(source_filename) CreateLink(source_filename, target_filename) else: # shutil can't copy links in Windows, so we must find the real file manually if sys.platform == 'win32': while IsLink(source_filename): link = ReadLink(source_filename) if os.path.isabs(link): source_filename = link else: source_filename = os.path.join(os.path.dirname(source_filename), link) shutil.copyfile(source_filename, target_filename) shutil.copymode(source_filename, target_filename) except Exception as e: reraise(e, 'While executiong _filesystem._CopyFileLocal(%s, %s)' % (source_filename, target_filename))
def _CopyFileLocal(source_filename, target_filename, copy_symlink=True): ''' Copy a file locally to a directory. :param unicode source_filename: The filename to copy from. :param unicode target_filename: The filename to copy to. :param bool copy_symlink: If True and source_filename is a symlink, target_filename will also be created as a symlink. If False, the file being linked will be copied instead. ''' import shutil try: # >>> Create the target_filename directory if necessary dir_name = os.path.dirname(target_filename) if dir_name and not os.path.isdir(dir_name): os.makedirs(dir_name) if copy_symlink and IsLink(source_filename): # >>> Delete the target_filename if it already exists if os.path.isfile(target_filename) or IsLink(target_filename): DeleteFile(target_filename) # >>> Obtain the relative path from link to source_filename (linkto) source_filename = ReadLink(source_filename) CreateLink(source_filename, target_filename) else: # shutil can't copy links in Windows, so we must find the real file manually if sys.platform == 'win32': while IsLink(source_filename): link = ReadLink(source_filename) if os.path.isabs(link): source_filename = link else: source_filename = os.path.join(os.path.dirname(source_filename), link) shutil.copyfile(source_filename, target_filename) shutil.copymode(source_filename, target_filename) except Exception as e: reraise(e, 'While executiong _filesystem._CopyFileLocal(%s, %s)' % (source_filename, target_filename))
[ "Copy", "a", "file", "locally", "to", "a", "directory", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L352-L396
[ "def", "_CopyFileLocal", "(", "source_filename", ",", "target_filename", ",", "copy_symlink", "=", "True", ")", ":", "import", "shutil", "try", ":", "# >>> Create the target_filename directory if necessary", "dir_name", "=", "os", ".", "path", ".", "dirname", "(", "target_filename", ")", "if", "dir_name", "and", "not", "os", ".", "path", ".", "isdir", "(", "dir_name", ")", ":", "os", ".", "makedirs", "(", "dir_name", ")", "if", "copy_symlink", "and", "IsLink", "(", "source_filename", ")", ":", "# >>> Delete the target_filename if it already exists", "if", "os", ".", "path", ".", "isfile", "(", "target_filename", ")", "or", "IsLink", "(", "target_filename", ")", ":", "DeleteFile", "(", "target_filename", ")", "# >>> Obtain the relative path from link to source_filename (linkto)", "source_filename", "=", "ReadLink", "(", "source_filename", ")", "CreateLink", "(", "source_filename", ",", "target_filename", ")", "else", ":", "# shutil can't copy links in Windows, so we must find the real file manually", "if", "sys", ".", "platform", "==", "'win32'", ":", "while", "IsLink", "(", "source_filename", ")", ":", "link", "=", "ReadLink", "(", "source_filename", ")", "if", "os", ".", "path", ".", "isabs", "(", "link", ")", ":", "source_filename", "=", "link", "else", ":", "source_filename", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "source_filename", ")", ",", "link", ")", "shutil", ".", "copyfile", "(", "source_filename", ",", "target_filename", ")", "shutil", ".", "copymode", "(", "source_filename", ",", "target_filename", ")", "except", "Exception", "as", "e", ":", "reraise", "(", "e", ",", "'While executiong _filesystem._CopyFileLocal(%s, %s)'", "%", "(", "source_filename", ",", "target_filename", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CopyFiles
Copy files from the given source to the target. :param unicode source_dir: A filename, URL or a file mask. Ex. x:\coilib50 x:\coilib50\* http://server/directory/file ftp://server/directory/file :param unicode target_dir: A directory or an URL Ex. d:\Temp ftp://server/directory :param bool create_target_dir: If True, creates the target path if it doesn't exists. :param bool md5_check: .. seealso:: CopyFile :raises DirectoryNotFoundError: If target_dir does not exist, and create_target_dir is False .. seealso:: CopyFile for documentation on accepted protocols .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def CopyFiles(source_dir, target_dir, create_target_dir=False, md5_check=False): ''' Copy files from the given source to the target. :param unicode source_dir: A filename, URL or a file mask. Ex. x:\coilib50 x:\coilib50\* http://server/directory/file ftp://server/directory/file :param unicode target_dir: A directory or an URL Ex. d:\Temp ftp://server/directory :param bool create_target_dir: If True, creates the target path if it doesn't exists. :param bool md5_check: .. seealso:: CopyFile :raises DirectoryNotFoundError: If target_dir does not exist, and create_target_dir is False .. seealso:: CopyFile for documentation on accepted protocols .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' import fnmatch # Check if we were given a directory or a directory with mask if IsDir(source_dir): # Yes, it's a directory, copy everything from it source_mask = '*' else: # Split directory and mask source_dir, source_mask = os.path.split(source_dir) # Create directory if necessary if not IsDir(target_dir): if create_target_dir: CreateDirectory(target_dir) else: from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(target_dir) # List and match files filenames = ListFiles(source_dir) # Check if we have a source directory if filenames is None: return # Copy files for i_filename in filenames: if md5_check and i_filename.endswith('.md5'): continue # md5 files will be copied by CopyFile when copying their associated files if fnmatch.fnmatch(i_filename, source_mask): source_path = source_dir + '/' + i_filename target_path = target_dir + '/' + i_filename if IsDir(source_path): # If we found a directory, copy it recursively CopyFiles(source_path, target_path, create_target_dir=True, md5_check=md5_check) else: CopyFile(source_path, target_path, md5_check=md5_check)
def CopyFiles(source_dir, target_dir, create_target_dir=False, md5_check=False): ''' Copy files from the given source to the target. :param unicode source_dir: A filename, URL or a file mask. Ex. x:\coilib50 x:\coilib50\* http://server/directory/file ftp://server/directory/file :param unicode target_dir: A directory or an URL Ex. d:\Temp ftp://server/directory :param bool create_target_dir: If True, creates the target path if it doesn't exists. :param bool md5_check: .. seealso:: CopyFile :raises DirectoryNotFoundError: If target_dir does not exist, and create_target_dir is False .. seealso:: CopyFile for documentation on accepted protocols .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' import fnmatch # Check if we were given a directory or a directory with mask if IsDir(source_dir): # Yes, it's a directory, copy everything from it source_mask = '*' else: # Split directory and mask source_dir, source_mask = os.path.split(source_dir) # Create directory if necessary if not IsDir(target_dir): if create_target_dir: CreateDirectory(target_dir) else: from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(target_dir) # List and match files filenames = ListFiles(source_dir) # Check if we have a source directory if filenames is None: return # Copy files for i_filename in filenames: if md5_check and i_filename.endswith('.md5'): continue # md5 files will be copied by CopyFile when copying their associated files if fnmatch.fnmatch(i_filename, source_mask): source_path = source_dir + '/' + i_filename target_path = target_dir + '/' + i_filename if IsDir(source_path): # If we found a directory, copy it recursively CopyFiles(source_path, target_path, create_target_dir=True, md5_check=md5_check) else: CopyFile(source_path, target_path, md5_check=md5_check)
[ "Copy", "files", "from", "the", "given", "source", "to", "the", "target", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L403-L473
[ "def", "CopyFiles", "(", "source_dir", ",", "target_dir", ",", "create_target_dir", "=", "False", ",", "md5_check", "=", "False", ")", ":", "import", "fnmatch", "# Check if we were given a directory or a directory with mask", "if", "IsDir", "(", "source_dir", ")", ":", "# Yes, it's a directory, copy everything from it", "source_mask", "=", "'*'", "else", ":", "# Split directory and mask", "source_dir", ",", "source_mask", "=", "os", ".", "path", ".", "split", "(", "source_dir", ")", "# Create directory if necessary", "if", "not", "IsDir", "(", "target_dir", ")", ":", "if", "create_target_dir", ":", "CreateDirectory", "(", "target_dir", ")", "else", ":", "from", ".", "_exceptions", "import", "DirectoryNotFoundError", "raise", "DirectoryNotFoundError", "(", "target_dir", ")", "# List and match files", "filenames", "=", "ListFiles", "(", "source_dir", ")", "# Check if we have a source directory", "if", "filenames", "is", "None", ":", "return", "# Copy files", "for", "i_filename", "in", "filenames", ":", "if", "md5_check", "and", "i_filename", ".", "endswith", "(", "'.md5'", ")", ":", "continue", "# md5 files will be copied by CopyFile when copying their associated files", "if", "fnmatch", ".", "fnmatch", "(", "i_filename", ",", "source_mask", ")", ":", "source_path", "=", "source_dir", "+", "'/'", "+", "i_filename", "target_path", "=", "target_dir", "+", "'/'", "+", "i_filename", "if", "IsDir", "(", "source_path", ")", ":", "# If we found a directory, copy it recursively", "CopyFiles", "(", "source_path", ",", "target_path", ",", "create_target_dir", "=", "True", ",", "md5_check", "=", "md5_check", ")", "else", ":", "CopyFile", "(", "source_path", ",", "target_path", ",", "md5_check", "=", "md5_check", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CopyFilesX
Copies files into directories, according to a file mapping :param list(tuple(unicode,unicode)) file_mapping: A list of mappings between the directory in the target and the source. For syntax, @see: ExtendedPathMask :rtype: list(tuple(unicode,unicode)) :returns: List of files copied. (source_filename, target_filename) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def CopyFilesX(file_mapping): ''' Copies files into directories, according to a file mapping :param list(tuple(unicode,unicode)) file_mapping: A list of mappings between the directory in the target and the source. For syntax, @see: ExtendedPathMask :rtype: list(tuple(unicode,unicode)) :returns: List of files copied. (source_filename, target_filename) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' # List files that match the mapping files = [] for i_target_path, i_source_path_mask in file_mapping: tree_recurse, flat_recurse, dirname, in_filters, out_filters = ExtendedPathMask.Split(i_source_path_mask) _AssertIsLocal(dirname) filenames = FindFiles(dirname, in_filters, out_filters, tree_recurse) for i_source_filename in filenames: if os.path.isdir(i_source_filename): continue # Do not copy dirs i_target_filename = i_source_filename[len(dirname) + 1:] if flat_recurse: i_target_filename = os.path.basename(i_target_filename) i_target_filename = os.path.join(i_target_path, i_target_filename) files.append(( StandardizePath(i_source_filename), StandardizePath(i_target_filename) )) # Copy files for i_source_filename, i_target_filename in files: # Create target dir if necessary target_dir = os.path.dirname(i_target_filename) CreateDirectory(target_dir) CopyFile(i_source_filename, i_target_filename) return files
def CopyFilesX(file_mapping): ''' Copies files into directories, according to a file mapping :param list(tuple(unicode,unicode)) file_mapping: A list of mappings between the directory in the target and the source. For syntax, @see: ExtendedPathMask :rtype: list(tuple(unicode,unicode)) :returns: List of files copied. (source_filename, target_filename) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' # List files that match the mapping files = [] for i_target_path, i_source_path_mask in file_mapping: tree_recurse, flat_recurse, dirname, in_filters, out_filters = ExtendedPathMask.Split(i_source_path_mask) _AssertIsLocal(dirname) filenames = FindFiles(dirname, in_filters, out_filters, tree_recurse) for i_source_filename in filenames: if os.path.isdir(i_source_filename): continue # Do not copy dirs i_target_filename = i_source_filename[len(dirname) + 1:] if flat_recurse: i_target_filename = os.path.basename(i_target_filename) i_target_filename = os.path.join(i_target_path, i_target_filename) files.append(( StandardizePath(i_source_filename), StandardizePath(i_target_filename) )) # Copy files for i_source_filename, i_target_filename in files: # Create target dir if necessary target_dir = os.path.dirname(i_target_filename) CreateDirectory(target_dir) CopyFile(i_source_filename, i_target_filename) return files
[ "Copies", "files", "into", "directories", "according", "to", "a", "file", "mapping" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L480-L524
[ "def", "CopyFilesX", "(", "file_mapping", ")", ":", "# List files that match the mapping", "files", "=", "[", "]", "for", "i_target_path", ",", "i_source_path_mask", "in", "file_mapping", ":", "tree_recurse", ",", "flat_recurse", ",", "dirname", ",", "in_filters", ",", "out_filters", "=", "ExtendedPathMask", ".", "Split", "(", "i_source_path_mask", ")", "_AssertIsLocal", "(", "dirname", ")", "filenames", "=", "FindFiles", "(", "dirname", ",", "in_filters", ",", "out_filters", ",", "tree_recurse", ")", "for", "i_source_filename", "in", "filenames", ":", "if", "os", ".", "path", ".", "isdir", "(", "i_source_filename", ")", ":", "continue", "# Do not copy dirs", "i_target_filename", "=", "i_source_filename", "[", "len", "(", "dirname", ")", "+", "1", ":", "]", "if", "flat_recurse", ":", "i_target_filename", "=", "os", ".", "path", ".", "basename", "(", "i_target_filename", ")", "i_target_filename", "=", "os", ".", "path", ".", "join", "(", "i_target_path", ",", "i_target_filename", ")", "files", ".", "append", "(", "(", "StandardizePath", "(", "i_source_filename", ")", ",", "StandardizePath", "(", "i_target_filename", ")", ")", ")", "# Copy files", "for", "i_source_filename", ",", "i_target_filename", "in", "files", ":", "# Create target dir if necessary", "target_dir", "=", "os", ".", "path", ".", "dirname", "(", "i_target_filename", ")", "CreateDirectory", "(", "target_dir", ")", "CopyFile", "(", "i_source_filename", ",", "i_target_filename", ")", "return", "files" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
IsFile
:param unicode path: Path to a file (local or ftp) :raises NotImplementedProtocol: If checking for a non-local, non-ftp file :rtype: bool :returns: True if the file exists .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def IsFile(path): ''' :param unicode path: Path to a file (local or ftp) :raises NotImplementedProtocol: If checking for a non-local, non-ftp file :rtype: bool :returns: True if the file exists .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse url = urlparse(path) if _UrlIsLocal(url): if IsLink(path): return IsFile(ReadLink(path)) return os.path.isfile(path) elif url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(url.scheme)
def IsFile(path): ''' :param unicode path: Path to a file (local or ftp) :raises NotImplementedProtocol: If checking for a non-local, non-ftp file :rtype: bool :returns: True if the file exists .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse url = urlparse(path) if _UrlIsLocal(url): if IsLink(path): return IsFile(ReadLink(path)) return os.path.isfile(path) elif url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(url.scheme)
[ ":", "param", "unicode", "path", ":", "Path", "to", "a", "file", "(", "local", "or", "ftp", ")" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L531-L558
[ "def", "IsFile", "(", "path", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "url", "=", "urlparse", "(", "path", ")", "if", "_UrlIsLocal", "(", "url", ")", ":", "if", "IsLink", "(", "path", ")", ":", "return", "IsFile", "(", "ReadLink", "(", "path", ")", ")", "return", "os", ".", "path", ".", "isfile", "(", "path", ")", "elif", "url", ".", "scheme", "==", "'ftp'", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
GetDriveType
Determine the type of drive, which can be one of the following values: DRIVE_UNKNOWN = 0 The drive type cannot be determined. DRIVE_NO_ROOT_DIR = 1 The root path is invalid; for example, there is no volume mounted at the specified path. DRIVE_REMOVABLE = 2 The drive has removable media; for example, a floppy drive, thumb drive, or flash card reader. DRIVE_FIXED = 3 The drive has fixed media; for example, a hard disk drive or flash drive. DRIVE_REMOTE = 4 The drive is a remote (network) drive. DRIVE_CDROM = 5 The drive is a CD-ROM drive. DRIVE_RAMDISK = 6 The drive is a RAM disk :note: The implementation is valid only for Windows OS Linux will always return DRIVE_UNKNOWN :param path: Path to a file or directory
zerotk/easyfs/_easyfs.py
def GetDriveType(path): ''' Determine the type of drive, which can be one of the following values: DRIVE_UNKNOWN = 0 The drive type cannot be determined. DRIVE_NO_ROOT_DIR = 1 The root path is invalid; for example, there is no volume mounted at the specified path. DRIVE_REMOVABLE = 2 The drive has removable media; for example, a floppy drive, thumb drive, or flash card reader. DRIVE_FIXED = 3 The drive has fixed media; for example, a hard disk drive or flash drive. DRIVE_REMOTE = 4 The drive is a remote (network) drive. DRIVE_CDROM = 5 The drive is a CD-ROM drive. DRIVE_RAMDISK = 6 The drive is a RAM disk :note: The implementation is valid only for Windows OS Linux will always return DRIVE_UNKNOWN :param path: Path to a file or directory ''' if sys.platform == 'win32': import ctypes kdll = ctypes.windll.LoadLibrary("kernel32.dll") return kdll.GetDriveType(path + '\\') import win32file if IsFile(path): path = os.path.dirname(path) # A trailing backslash is required. return win32file.GetDriveType(path + '\\') else: return DRIVE_UNKNOWN
def GetDriveType(path): ''' Determine the type of drive, which can be one of the following values: DRIVE_UNKNOWN = 0 The drive type cannot be determined. DRIVE_NO_ROOT_DIR = 1 The root path is invalid; for example, there is no volume mounted at the specified path. DRIVE_REMOVABLE = 2 The drive has removable media; for example, a floppy drive, thumb drive, or flash card reader. DRIVE_FIXED = 3 The drive has fixed media; for example, a hard disk drive or flash drive. DRIVE_REMOTE = 4 The drive is a remote (network) drive. DRIVE_CDROM = 5 The drive is a CD-ROM drive. DRIVE_RAMDISK = 6 The drive is a RAM disk :note: The implementation is valid only for Windows OS Linux will always return DRIVE_UNKNOWN :param path: Path to a file or directory ''' if sys.platform == 'win32': import ctypes kdll = ctypes.windll.LoadLibrary("kernel32.dll") return kdll.GetDriveType(path + '\\') import win32file if IsFile(path): path = os.path.dirname(path) # A trailing backslash is required. return win32file.GetDriveType(path + '\\') else: return DRIVE_UNKNOWN
[ "Determine", "the", "type", "of", "drive", "which", "can", "be", "one", "of", "the", "following", "values", ":", "DRIVE_UNKNOWN", "=", "0", "The", "drive", "type", "cannot", "be", "determined", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L561-L606
[ "def", "GetDriveType", "(", "path", ")", ":", "if", "sys", ".", "platform", "==", "'win32'", ":", "import", "ctypes", "kdll", "=", "ctypes", ".", "windll", ".", "LoadLibrary", "(", "\"kernel32.dll\"", ")", "return", "kdll", ".", "GetDriveType", "(", "path", "+", "'\\\\'", ")", "import", "win32file", "if", "IsFile", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "# A trailing backslash is required.", "return", "win32file", ".", "GetDriveType", "(", "path", "+", "'\\\\'", ")", "else", ":", "return", "DRIVE_UNKNOWN" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
IsDir
:param unicode directory: A path :rtype: bool :returns: Returns whether the given path points to an existent directory. :raises NotImplementedProtocol: If the path protocol is not local or ftp .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def IsDir(directory): ''' :param unicode directory: A path :rtype: bool :returns: Returns whether the given path points to an existent directory. :raises NotImplementedProtocol: If the path protocol is not local or ftp .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) if _UrlIsLocal(directory_url): return os.path.isdir(directory) elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
def IsDir(directory): ''' :param unicode directory: A path :rtype: bool :returns: Returns whether the given path points to an existent directory. :raises NotImplementedProtocol: If the path protocol is not local or ftp .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) if _UrlIsLocal(directory_url): return os.path.isdir(directory) elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
[ ":", "param", "unicode", "directory", ":", "A", "path" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L614-L638
[ "def", "IsDir", "(", "directory", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "directory_url", "=", "urlparse", "(", "directory", ")", "if", "_UrlIsLocal", "(", "directory_url", ")", ":", "return", "os", ".", "path", ".", "isdir", "(", "directory", ")", "elif", "directory_url", ".", "scheme", "==", "'ftp'", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
Exists
:rtype: bool :returns: True if the path already exists (either a file or a directory) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def Exists(path): ''' :rtype: bool :returns: True if the path already exists (either a file or a directory) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse path_url = urlparse(path) # Handle local if _UrlIsLocal(path_url): return IsFile(path) or IsDir(path) or IsLink(path) return IsFile(path) or IsDir(path)
def Exists(path): ''' :rtype: bool :returns: True if the path already exists (either a file or a directory) .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse path_url = urlparse(path) # Handle local if _UrlIsLocal(path_url): return IsFile(path) or IsDir(path) or IsLink(path) return IsFile(path) or IsDir(path)
[ ":", "rtype", ":", "bool", ":", "returns", ":", "True", "if", "the", "path", "already", "exists", "(", "either", "a", "file", "or", "a", "directory", ")" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L645-L659
[ "def", "Exists", "(", "path", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "path_url", "=", "urlparse", "(", "path", ")", "# Handle local", "if", "_UrlIsLocal", "(", "path_url", ")", ":", "return", "IsFile", "(", "path", ")", "or", "IsDir", "(", "path", ")", "or", "IsLink", "(", "path", ")", "return", "IsFile", "(", "path", ")", "or", "IsDir", "(", "path", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CopyDirectory
Recursively copy a directory tree. :param unicode source_dir: Where files will come from :param unicode target_dir: Where files will go to :param bool override: If True and target_dir already exists, it will be deleted before copying. :raises NotImplementedForRemotePathError: If trying to copy to/from remote directories
zerotk/easyfs/_easyfs.py
def CopyDirectory(source_dir, target_dir, override=False): ''' Recursively copy a directory tree. :param unicode source_dir: Where files will come from :param unicode target_dir: Where files will go to :param bool override: If True and target_dir already exists, it will be deleted before copying. :raises NotImplementedForRemotePathError: If trying to copy to/from remote directories ''' _AssertIsLocal(source_dir) _AssertIsLocal(target_dir) if override and IsDir(target_dir): DeleteDirectory(target_dir, skip_on_error=False) import shutil shutil.copytree(source_dir, target_dir)
def CopyDirectory(source_dir, target_dir, override=False): ''' Recursively copy a directory tree. :param unicode source_dir: Where files will come from :param unicode target_dir: Where files will go to :param bool override: If True and target_dir already exists, it will be deleted before copying. :raises NotImplementedForRemotePathError: If trying to copy to/from remote directories ''' _AssertIsLocal(source_dir) _AssertIsLocal(target_dir) if override and IsDir(target_dir): DeleteDirectory(target_dir, skip_on_error=False) import shutil shutil.copytree(source_dir, target_dir)
[ "Recursively", "copy", "a", "directory", "tree", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L666-L689
[ "def", "CopyDirectory", "(", "source_dir", ",", "target_dir", ",", "override", "=", "False", ")", ":", "_AssertIsLocal", "(", "source_dir", ")", "_AssertIsLocal", "(", "target_dir", ")", "if", "override", "and", "IsDir", "(", "target_dir", ")", ":", "DeleteDirectory", "(", "target_dir", ",", "skip_on_error", "=", "False", ")", "import", "shutil", "shutil", ".", "copytree", "(", "source_dir", ",", "target_dir", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
DeleteFile
Deletes the given local filename. .. note:: If file doesn't exist this method has no effect. :param unicode target_filename: A local filename :raises NotImplementedForRemotePathError: If trying to delete a non-local path :raises FileOnlyActionError: Raised when filename refers to a directory.
zerotk/easyfs/_easyfs.py
def DeleteFile(target_filename): ''' Deletes the given local filename. .. note:: If file doesn't exist this method has no effect. :param unicode target_filename: A local filename :raises NotImplementedForRemotePathError: If trying to delete a non-local path :raises FileOnlyActionError: Raised when filename refers to a directory. ''' _AssertIsLocal(target_filename) try: if IsLink(target_filename): DeleteLink(target_filename) elif IsFile(target_filename): os.remove(target_filename) elif IsDir(target_filename): from ._exceptions import FileOnlyActionError raise FileOnlyActionError(target_filename) except Exception as e: reraise(e, 'While executing filesystem.DeleteFile(%s)' % (target_filename))
def DeleteFile(target_filename): ''' Deletes the given local filename. .. note:: If file doesn't exist this method has no effect. :param unicode target_filename: A local filename :raises NotImplementedForRemotePathError: If trying to delete a non-local path :raises FileOnlyActionError: Raised when filename refers to a directory. ''' _AssertIsLocal(target_filename) try: if IsLink(target_filename): DeleteLink(target_filename) elif IsFile(target_filename): os.remove(target_filename) elif IsDir(target_filename): from ._exceptions import FileOnlyActionError raise FileOnlyActionError(target_filename) except Exception as e: reraise(e, 'While executing filesystem.DeleteFile(%s)' % (target_filename))
[ "Deletes", "the", "given", "local", "filename", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L696-L722
[ "def", "DeleteFile", "(", "target_filename", ")", ":", "_AssertIsLocal", "(", "target_filename", ")", "try", ":", "if", "IsLink", "(", "target_filename", ")", ":", "DeleteLink", "(", "target_filename", ")", "elif", "IsFile", "(", "target_filename", ")", ":", "os", ".", "remove", "(", "target_filename", ")", "elif", "IsDir", "(", "target_filename", ")", ":", "from", ".", "_exceptions", "import", "FileOnlyActionError", "raise", "FileOnlyActionError", "(", "target_filename", ")", "except", "Exception", "as", "e", ":", "reraise", "(", "e", ",", "'While executing filesystem.DeleteFile(%s)'", "%", "(", "target_filename", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
AppendToFile
Appends content to a local file. :param unicode filename: :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() :param bool binary: If True, content is appended in binary mode. In this case, `contents` must be `bytes` and not `unicode` :raises NotImplementedForRemotePathError: If trying to modify a non-local path :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents`
zerotk/easyfs/_easyfs.py
def AppendToFile(filename, contents, eol_style=EOL_STYLE_NATIVE, encoding=None, binary=False): ''' Appends content to a local file. :param unicode filename: :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() :param bool binary: If True, content is appended in binary mode. In this case, `contents` must be `bytes` and not `unicode` :raises NotImplementedForRemotePathError: If trying to modify a non-local path :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` ''' _AssertIsLocal(filename) assert isinstance(contents, six.text_type) ^ binary, 'Must always receive unicode contents, unless binary=True' if not binary: # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Handle encoding here, and always write in binary mode. We can't use io.open because it # tries to do its own line ending handling. contents = contents.encode(encoding or sys.getfilesystemencoding()) oss = open(filename, 'ab') try: oss.write(contents) finally: oss.close()
def AppendToFile(filename, contents, eol_style=EOL_STYLE_NATIVE, encoding=None, binary=False): ''' Appends content to a local file. :param unicode filename: :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() :param bool binary: If True, content is appended in binary mode. In this case, `contents` must be `bytes` and not `unicode` :raises NotImplementedForRemotePathError: If trying to modify a non-local path :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` ''' _AssertIsLocal(filename) assert isinstance(contents, six.text_type) ^ binary, 'Must always receive unicode contents, unless binary=True' if not binary: # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Handle encoding here, and always write in binary mode. We can't use io.open because it # tries to do its own line ending handling. contents = contents.encode(encoding or sys.getfilesystemencoding()) oss = open(filename, 'ab') try: oss.write(contents) finally: oss.close()
[ "Appends", "content", "to", "a", "local", "file", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L729-L773
[ "def", "AppendToFile", "(", "filename", ",", "contents", ",", "eol_style", "=", "EOL_STYLE_NATIVE", ",", "encoding", "=", "None", ",", "binary", "=", "False", ")", ":", "_AssertIsLocal", "(", "filename", ")", "assert", "isinstance", "(", "contents", ",", "six", ".", "text_type", ")", "^", "binary", ",", "'Must always receive unicode contents, unless binary=True'", "if", "not", "binary", ":", "# Replaces eol on each line by the given eol_style.", "contents", "=", "_HandleContentsEol", "(", "contents", ",", "eol_style", ")", "# Handle encoding here, and always write in binary mode. We can't use io.open because it", "# tries to do its own line ending handling.", "contents", "=", "contents", ".", "encode", "(", "encoding", "or", "sys", ".", "getfilesystemencoding", "(", ")", ")", "oss", "=", "open", "(", "filename", ",", "'ab'", ")", "try", ":", "oss", ".", "write", "(", "contents", ")", "finally", ":", "oss", ".", "close", "(", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
MoveFile
Moves a file. :param unicode source_filename: :param unicode target_filename: :raises NotImplementedForRemotePathError: If trying to operate with non-local files.
zerotk/easyfs/_easyfs.py
def MoveFile(source_filename, target_filename): ''' Moves a file. :param unicode source_filename: :param unicode target_filename: :raises NotImplementedForRemotePathError: If trying to operate with non-local files. ''' _AssertIsLocal(source_filename) _AssertIsLocal(target_filename) import shutil shutil.move(source_filename, target_filename)
def MoveFile(source_filename, target_filename): ''' Moves a file. :param unicode source_filename: :param unicode target_filename: :raises NotImplementedForRemotePathError: If trying to operate with non-local files. ''' _AssertIsLocal(source_filename) _AssertIsLocal(target_filename) import shutil shutil.move(source_filename, target_filename)
[ "Moves", "a", "file", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L780-L795
[ "def", "MoveFile", "(", "source_filename", ",", "target_filename", ")", ":", "_AssertIsLocal", "(", "source_filename", ")", "_AssertIsLocal", "(", "target_filename", ")", "import", "shutil", "shutil", ".", "move", "(", "source_filename", ",", "target_filename", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
MoveDirectory
Moves a directory. :param unicode source_dir: :param unicode target_dir: :raises NotImplementedError: If trying to move anything other than: Local dir -> local dir FTP dir -> FTP dir (same host)
zerotk/easyfs/_easyfs.py
def MoveDirectory(source_dir, target_dir): ''' Moves a directory. :param unicode source_dir: :param unicode target_dir: :raises NotImplementedError: If trying to move anything other than: Local dir -> local dir FTP dir -> FTP dir (same host) ''' if not IsDir(source_dir): from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(source_dir) if Exists(target_dir): from ._exceptions import DirectoryAlreadyExistsError raise DirectoryAlreadyExistsError(target_dir) from six.moves.urllib.parse import urlparse source_url = urlparse(source_dir) target_url = urlparse(target_dir) # Local to local if _UrlIsLocal(source_url) and _UrlIsLocal(target_url): import shutil shutil.move(source_dir, target_dir) # FTP to FTP elif source_url.scheme == 'ftp' and target_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: raise NotImplementedError('Can only move directories local->local or ftp->ftp')
def MoveDirectory(source_dir, target_dir): ''' Moves a directory. :param unicode source_dir: :param unicode target_dir: :raises NotImplementedError: If trying to move anything other than: Local dir -> local dir FTP dir -> FTP dir (same host) ''' if not IsDir(source_dir): from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(source_dir) if Exists(target_dir): from ._exceptions import DirectoryAlreadyExistsError raise DirectoryAlreadyExistsError(target_dir) from six.moves.urllib.parse import urlparse source_url = urlparse(source_dir) target_url = urlparse(target_dir) # Local to local if _UrlIsLocal(source_url) and _UrlIsLocal(target_url): import shutil shutil.move(source_dir, target_dir) # FTP to FTP elif source_url.scheme == 'ftp' and target_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme) else: raise NotImplementedError('Can only move directories local->local or ftp->ftp')
[ "Moves", "a", "directory", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L802-L837
[ "def", "MoveDirectory", "(", "source_dir", ",", "target_dir", ")", ":", "if", "not", "IsDir", "(", "source_dir", ")", ":", "from", ".", "_exceptions", "import", "DirectoryNotFoundError", "raise", "DirectoryNotFoundError", "(", "source_dir", ")", "if", "Exists", "(", "target_dir", ")", ":", "from", ".", "_exceptions", "import", "DirectoryAlreadyExistsError", "raise", "DirectoryAlreadyExistsError", "(", "target_dir", ")", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "source_url", "=", "urlparse", "(", "source_dir", ")", "target_url", "=", "urlparse", "(", "target_dir", ")", "# Local to local", "if", "_UrlIsLocal", "(", "source_url", ")", "and", "_UrlIsLocal", "(", "target_url", ")", ":", "import", "shutil", "shutil", ".", "move", "(", "source_dir", ",", "target_dir", ")", "# FTP to FTP", "elif", "source_url", ".", "scheme", "==", "'ftp'", "and", "target_url", ".", "scheme", "==", "'ftp'", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")", "else", ":", "raise", "NotImplementedError", "(", "'Can only move directories local->local or ftp->ftp'", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
GetFileContents
Reads a file and returns its contents. Works for both local and remote files. :param unicode filename: :param bool binary: If True returns the file as is, ignore any EOL conversion. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :returns str|unicode: The file's contents. Returns unicode string when `encoding` is not None. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def GetFileContents(filename, binary=False, encoding=None, newline=None): ''' Reads a file and returns its contents. Works for both local and remote files. :param unicode filename: :param bool binary: If True returns the file as is, ignore any EOL conversion. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :returns str|unicode: The file's contents. Returns unicode string when `encoding` is not None. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' source_file = OpenFile(filename, binary=binary, encoding=encoding, newline=newline) try: contents = source_file.read() finally: source_file.close() return contents
def GetFileContents(filename, binary=False, encoding=None, newline=None): ''' Reads a file and returns its contents. Works for both local and remote files. :param unicode filename: :param bool binary: If True returns the file as is, ignore any EOL conversion. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :returns str|unicode: The file's contents. Returns unicode string when `encoding` is not None. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' source_file = OpenFile(filename, binary=binary, encoding=encoding, newline=newline) try: contents = source_file.read() finally: source_file.close() return contents
[ "Reads", "a", "file", "and", "returns", "its", "contents", ".", "Works", "for", "both", "local", "and", "remote", "files", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L843-L872
[ "def", "GetFileContents", "(", "filename", ",", "binary", "=", "False", ",", "encoding", "=", "None", ",", "newline", "=", "None", ")", ":", "source_file", "=", "OpenFile", "(", "filename", ",", "binary", "=", "binary", ",", "encoding", "=", "encoding", ",", "newline", "=", "newline", ")", "try", ":", "contents", "=", "source_file", ".", "read", "(", ")", "finally", ":", "source_file", ".", "close", "(", ")", "return", "contents" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
GetFileLines
Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def GetFileLines(filename, newline=None, encoding=None): ''' Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' return GetFileContents( filename, binary=False, encoding=encoding, newline=newline, ).split('\n')
def GetFileLines(filename, newline=None, encoding=None): ''' Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' return GetFileContents( filename, binary=False, encoding=encoding, newline=newline, ).split('\n')
[ "Reads", "a", "file", "and", "returns", "its", "contents", "as", "a", "list", "of", "lines", ".", "Works", "for", "both", "local", "and", "remote", "files", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L878-L902
[ "def", "GetFileLines", "(", "filename", ",", "newline", "=", "None", ",", "encoding", "=", "None", ")", ":", "return", "GetFileContents", "(", "filename", ",", "binary", "=", "False", ",", "encoding", "=", "encoding", ",", "newline", "=", "newline", ",", ")", ".", "split", "(", "'\\n'", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
OpenFile
Open a file and returns it. Consider the possibility of a remote file (HTTP, HTTPS, FTP) :param unicode filename: Local or remote filename. :param bool binary: If True returns the file as is, ignore any EOL conversion. If set ignores univeral_newlines parameter. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns file: The open file, it must be closed by the caller @raise: FileNotFoundError When the given filename cannot be found .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def OpenFile(filename, binary=False, newline=None, encoding=None): ''' Open a file and returns it. Consider the possibility of a remote file (HTTP, HTTPS, FTP) :param unicode filename: Local or remote filename. :param bool binary: If True returns the file as is, ignore any EOL conversion. If set ignores univeral_newlines parameter. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns file: The open file, it must be closed by the caller @raise: FileNotFoundError When the given filename cannot be found .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Check if file is local if _UrlIsLocal(filename_url): if not os.path.isfile(filename): from ._exceptions import FileNotFoundError raise FileNotFoundError(filename) mode = 'rb' if binary else 'r' return io.open(filename, mode, encoding=encoding, newline=newline) # Not local from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme)
def OpenFile(filename, binary=False, newline=None, encoding=None): ''' Open a file and returns it. Consider the possibility of a remote file (HTTP, HTTPS, FTP) :param unicode filename: Local or remote filename. :param bool binary: If True returns the file as is, ignore any EOL conversion. If set ignores univeral_newlines parameter. :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns file: The open file, it must be closed by the caller @raise: FileNotFoundError When the given filename cannot be found .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Check if file is local if _UrlIsLocal(filename_url): if not os.path.isfile(filename): from ._exceptions import FileNotFoundError raise FileNotFoundError(filename) mode = 'rb' if binary else 'r' return io.open(filename, mode, encoding=encoding, newline=newline) # Not local from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(target_url.scheme)
[ "Open", "a", "file", "and", "returns", "it", ".", "Consider", "the", "possibility", "of", "a", "remote", "file", "(", "HTTP", "HTTPS", "FTP", ")" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L905-L947
[ "def", "OpenFile", "(", "filename", ",", "binary", "=", "False", ",", "newline", "=", "None", ",", "encoding", "=", "None", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "filename_url", "=", "urlparse", "(", "filename", ")", "# Check if file is local", "if", "_UrlIsLocal", "(", "filename_url", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "from", ".", "_exceptions", "import", "FileNotFoundError", "raise", "FileNotFoundError", "(", "filename", ")", "mode", "=", "'rb'", "if", "binary", "else", "'r'", "return", "io", ".", "open", "(", "filename", ",", "mode", ",", "encoding", "=", "encoding", ",", "newline", "=", "newline", ")", "# Not local", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "target_url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ListFiles
Lists the files in the given directory :type directory: unicode | unicode :param directory: A directory or URL :rtype: list(unicode) | list(unicode) :returns: List of filenames/directories found in the given directory. Returns None if the given directory does not exists. If `directory` is a unicode string, all files returned will also be unicode :raises NotImplementedProtocol: If file protocol is not local or FTP .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def ListFiles(directory): ''' Lists the files in the given directory :type directory: unicode | unicode :param directory: A directory or URL :rtype: list(unicode) | list(unicode) :returns: List of filenames/directories found in the given directory. Returns None if the given directory does not exists. If `directory` is a unicode string, all files returned will also be unicode :raises NotImplementedProtocol: If file protocol is not local or FTP .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) # Handle local if _UrlIsLocal(directory_url): if not os.path.isdir(directory): return None return os.listdir(directory) # Handle FTP elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
def ListFiles(directory): ''' Lists the files in the given directory :type directory: unicode | unicode :param directory: A directory or URL :rtype: list(unicode) | list(unicode) :returns: List of filenames/directories found in the given directory. Returns None if the given directory does not exists. If `directory` is a unicode string, all files returned will also be unicode :raises NotImplementedProtocol: If file protocol is not local or FTP .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) # Handle local if _UrlIsLocal(directory_url): if not os.path.isdir(directory): return None return os.listdir(directory) # Handle FTP elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
[ "Lists", "the", "files", "in", "the", "given", "directory" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L954-L989
[ "def", "ListFiles", "(", "directory", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "directory_url", "=", "urlparse", "(", "directory", ")", "# Handle local", "if", "_UrlIsLocal", "(", "directory_url", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "return", "None", "return", "os", ".", "listdir", "(", "directory", ")", "# Handle FTP", "elif", "directory_url", ".", "scheme", "==", "'ftp'", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CreateFile
Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def CreateFile(filename, contents, eol_style=EOL_STYLE_NATIVE, create_dir=True, encoding=None, binary=False): ''' Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' # Lots of checks when writing binary files if binary: if isinstance(contents, six.text_type): raise TypeError('contents must be str (bytes) when binary=True') else: if not isinstance(contents, six.text_type): raise TypeError('contents must be unicode when binary=False') # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Encode string and pretend we are using binary to prevent 'open' from automatically # changing Eols encoding = encoding or sys.getfilesystemencoding() contents = contents.encode(encoding) binary = True # If asked, creates directory containing file if create_dir: dirname = os.path.dirname(filename) if dirname: CreateDirectory(dirname) from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Handle local if _UrlIsLocal(filename_url): # Always writing as binary (see handling above) with open(filename, 'wb') as oss: oss.write(contents) # Handle FTP elif filename_url.scheme == 'ftp': # Always writing as binary (see handling above) from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(filename_url.scheme) return filename
def CreateFile(filename, contents, eol_style=EOL_STYLE_NATIVE, create_dir=True, encoding=None, binary=False): ''' Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' # Lots of checks when writing binary files if binary: if isinstance(contents, six.text_type): raise TypeError('contents must be str (bytes) when binary=True') else: if not isinstance(contents, six.text_type): raise TypeError('contents must be unicode when binary=False') # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Encode string and pretend we are using binary to prevent 'open' from automatically # changing Eols encoding = encoding or sys.getfilesystemencoding() contents = contents.encode(encoding) binary = True # If asked, creates directory containing file if create_dir: dirname = os.path.dirname(filename) if dirname: CreateDirectory(dirname) from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Handle local if _UrlIsLocal(filename_url): # Always writing as binary (see handling above) with open(filename, 'wb') as oss: oss.write(contents) # Handle FTP elif filename_url.scheme == 'ftp': # Always writing as binary (see handling above) from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(filename_url.scheme) return filename
[ "Create", "a", "file", "with", "the", "given", "contents", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1034-L1113
[ "def", "CreateFile", "(", "filename", ",", "contents", ",", "eol_style", "=", "EOL_STYLE_NATIVE", ",", "create_dir", "=", "True", ",", "encoding", "=", "None", ",", "binary", "=", "False", ")", ":", "# Lots of checks when writing binary files", "if", "binary", ":", "if", "isinstance", "(", "contents", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "'contents must be str (bytes) when binary=True'", ")", "else", ":", "if", "not", "isinstance", "(", "contents", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "'contents must be unicode when binary=False'", ")", "# Replaces eol on each line by the given eol_style.", "contents", "=", "_HandleContentsEol", "(", "contents", ",", "eol_style", ")", "# Encode string and pretend we are using binary to prevent 'open' from automatically", "# changing Eols", "encoding", "=", "encoding", "or", "sys", ".", "getfilesystemencoding", "(", ")", "contents", "=", "contents", ".", "encode", "(", "encoding", ")", "binary", "=", "True", "# If asked, creates directory containing file", "if", "create_dir", ":", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "filename", ")", "if", "dirname", ":", "CreateDirectory", "(", "dirname", ")", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "filename_url", "=", "urlparse", "(", "filename", ")", "# Handle local", "if", "_UrlIsLocal", "(", "filename_url", ")", ":", "# Always writing as binary (see handling above)", "with", "open", "(", "filename", ",", "'wb'", ")", "as", "oss", ":", "oss", ".", "write", "(", "contents", ")", "# Handle FTP", "elif", "filename_url", ".", "scheme", "==", "'ftp'", ":", "# Always writing as binary (see handling above)", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "filename_url", ".", "scheme", ")", "return", "filename" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ReplaceInFile
Replaces all occurrences of "old" by "new" in the given file. :param unicode filename: The name of the file. :param unicode old: The string to search for. :param unicode new: Replacement string. :return unicode: The new contents of the file.
zerotk/easyfs/_easyfs.py
def ReplaceInFile(filename, old, new, encoding=None): ''' Replaces all occurrences of "old" by "new" in the given file. :param unicode filename: The name of the file. :param unicode old: The string to search for. :param unicode new: Replacement string. :return unicode: The new contents of the file. ''' contents = GetFileContents(filename, encoding=encoding) contents = contents.replace(old, new) CreateFile(filename, contents, encoding=encoding) return contents
def ReplaceInFile(filename, old, new, encoding=None): ''' Replaces all occurrences of "old" by "new" in the given file. :param unicode filename: The name of the file. :param unicode old: The string to search for. :param unicode new: Replacement string. :return unicode: The new contents of the file. ''' contents = GetFileContents(filename, encoding=encoding) contents = contents.replace(old, new) CreateFile(filename, contents, encoding=encoding) return contents
[ "Replaces", "all", "occurrences", "of", "old", "by", "new", "in", "the", "given", "file", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1117-L1136
[ "def", "ReplaceInFile", "(", "filename", ",", "old", ",", "new", ",", "encoding", "=", "None", ")", ":", "contents", "=", "GetFileContents", "(", "filename", ",", "encoding", "=", "encoding", ")", "contents", "=", "contents", ".", "replace", "(", "old", ",", "new", ")", "CreateFile", "(", "filename", ",", "contents", ",", "encoding", "=", "encoding", ")", "return", "contents" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CreateDirectory
Create directory including any missing intermediate directory. :param unicode directory: :return unicode|urlparse.ParseResult: Returns the created directory or url (see urlparse). :raises NotImplementedProtocol: If protocol is not local or FTP. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information
zerotk/easyfs/_easyfs.py
def CreateDirectory(directory): ''' Create directory including any missing intermediate directory. :param unicode directory: :return unicode|urlparse.ParseResult: Returns the created directory or url (see urlparse). :raises NotImplementedProtocol: If protocol is not local or FTP. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) # Handle local if _UrlIsLocal(directory_url): if not os.path.exists(directory): os.makedirs(directory) return directory # Handle FTP elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
def CreateDirectory(directory): ''' Create directory including any missing intermediate directory. :param unicode directory: :return unicode|urlparse.ParseResult: Returns the created directory or url (see urlparse). :raises NotImplementedProtocol: If protocol is not local or FTP. .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' from six.moves.urllib.parse import urlparse directory_url = urlparse(directory) # Handle local if _UrlIsLocal(directory_url): if not os.path.exists(directory): os.makedirs(directory) return directory # Handle FTP elif directory_url.scheme == 'ftp': from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme)
[ "Create", "directory", "including", "any", "missing", "intermediate", "directory", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1143-L1172
[ "def", "CreateDirectory", "(", "directory", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "directory_url", "=", "urlparse", "(", "directory", ")", "# Handle local", "if", "_UrlIsLocal", "(", "directory_url", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "directory", ")", ":", "os", ".", "makedirs", "(", "directory", ")", "return", "directory", "# Handle FTP", "elif", "directory_url", ".", "scheme", "==", "'ftp'", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")", "else", ":", "from", ".", "_exceptions", "import", "NotImplementedProtocol", "raise", "NotImplementedProtocol", "(", "directory_url", ".", "scheme", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
DeleteDirectory
Deletes a directory. :param unicode directory: :param bool skip_on_error: If True, ignore any errors when trying to delete directory (for example, directory not found) :raises NotImplementedForRemotePathError: If trying to delete a remote directory.
zerotk/easyfs/_easyfs.py
def DeleteDirectory(directory, skip_on_error=False): ''' Deletes a directory. :param unicode directory: :param bool skip_on_error: If True, ignore any errors when trying to delete directory (for example, directory not found) :raises NotImplementedForRemotePathError: If trying to delete a remote directory. ''' _AssertIsLocal(directory) import shutil def OnError(fn, path, excinfo): ''' Remove the read-only flag and try to remove again. On Windows, rmtree fails when trying to remove a read-only file. This fix it! Another case: Read-only directories return True in os.access test. It seems that read-only directories has it own flag (looking at the property windows on Explorer). ''' if IsLink(path): return if fn is os.remove and os.access(path, os.W_OK): raise # Make the file WRITEABLE and executes the original delete function (osfunc) import stat os.chmod(path, stat.S_IWRITE) fn(path) try: if not os.path.isdir(directory): if skip_on_error: return from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(directory) shutil.rmtree(directory, onerror=OnError) except: if not skip_on_error: raise
def DeleteDirectory(directory, skip_on_error=False): ''' Deletes a directory. :param unicode directory: :param bool skip_on_error: If True, ignore any errors when trying to delete directory (for example, directory not found) :raises NotImplementedForRemotePathError: If trying to delete a remote directory. ''' _AssertIsLocal(directory) import shutil def OnError(fn, path, excinfo): ''' Remove the read-only flag and try to remove again. On Windows, rmtree fails when trying to remove a read-only file. This fix it! Another case: Read-only directories return True in os.access test. It seems that read-only directories has it own flag (looking at the property windows on Explorer). ''' if IsLink(path): return if fn is os.remove and os.access(path, os.W_OK): raise # Make the file WRITEABLE and executes the original delete function (osfunc) import stat os.chmod(path, stat.S_IWRITE) fn(path) try: if not os.path.isdir(directory): if skip_on_error: return from ._exceptions import DirectoryNotFoundError raise DirectoryNotFoundError(directory) shutil.rmtree(directory, onerror=OnError) except: if not skip_on_error: raise
[ "Deletes", "a", "directory", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1329-L1372
[ "def", "DeleteDirectory", "(", "directory", ",", "skip_on_error", "=", "False", ")", ":", "_AssertIsLocal", "(", "directory", ")", "import", "shutil", "def", "OnError", "(", "fn", ",", "path", ",", "excinfo", ")", ":", "'''\n Remove the read-only flag and try to remove again.\n On Windows, rmtree fails when trying to remove a read-only file. This fix it!\n Another case: Read-only directories return True in os.access test. It seems that read-only\n directories has it own flag (looking at the property windows on Explorer).\n '''", "if", "IsLink", "(", "path", ")", ":", "return", "if", "fn", "is", "os", ".", "remove", "and", "os", ".", "access", "(", "path", ",", "os", ".", "W_OK", ")", ":", "raise", "# Make the file WRITEABLE and executes the original delete function (osfunc)", "import", "stat", "os", ".", "chmod", "(", "path", ",", "stat", ".", "S_IWRITE", ")", "fn", "(", "path", ")", "try", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "if", "skip_on_error", ":", "return", "from", ".", "_exceptions", "import", "DirectoryNotFoundError", "raise", "DirectoryNotFoundError", "(", "directory", ")", "shutil", ".", "rmtree", "(", "directory", ",", "onerror", "=", "OnError", ")", "except", ":", "if", "not", "skip_on_error", ":", "raise" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
GetMTime
:param unicode path: Path to file or directory :rtype: float :returns: Modification time for path. If this is a directory, the highest mtime from files inside it will be returned. @note: In some Linux distros (such as CentOs, or anything with ext3), mtime will not return a value with resolutions higher than a second. http://stackoverflow.com/questions/2428556/os-path-getmtime-doesnt-return-fraction-of-a-second
zerotk/easyfs/_easyfs.py
def GetMTime(path): ''' :param unicode path: Path to file or directory :rtype: float :returns: Modification time for path. If this is a directory, the highest mtime from files inside it will be returned. @note: In some Linux distros (such as CentOs, or anything with ext3), mtime will not return a value with resolutions higher than a second. http://stackoverflow.com/questions/2428556/os-path-getmtime-doesnt-return-fraction-of-a-second ''' _AssertIsLocal(path) if os.path.isdir(path): files = FindFiles(path) if len(files) > 0: return max(map(os.path.getmtime, files)) return os.path.getmtime(path)
def GetMTime(path): ''' :param unicode path: Path to file or directory :rtype: float :returns: Modification time for path. If this is a directory, the highest mtime from files inside it will be returned. @note: In some Linux distros (such as CentOs, or anything with ext3), mtime will not return a value with resolutions higher than a second. http://stackoverflow.com/questions/2428556/os-path-getmtime-doesnt-return-fraction-of-a-second ''' _AssertIsLocal(path) if os.path.isdir(path): files = FindFiles(path) if len(files) > 0: return max(map(os.path.getmtime, files)) return os.path.getmtime(path)
[ ":", "param", "unicode", "path", ":", "Path", "to", "file", "or", "directory" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1379-L1404
[ "def", "GetMTime", "(", "path", ")", ":", "_AssertIsLocal", "(", "path", ")", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "files", "=", "FindFiles", "(", "path", ")", "if", "len", "(", "files", ")", ">", "0", ":", "return", "max", "(", "map", "(", "os", ".", "path", ".", "getmtime", ",", "files", ")", ")", "return", "os", ".", "path", ".", "getmtime", "(", "path", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ListMappedNetworkDrives
On Windows, returns a list of mapped network drives :return: tuple(string, string, bool) For each mapped netword drive, return 3 values tuple: - the local drive - the remote path- - True if the mapping is enabled (warning: not reliable)
zerotk/easyfs/_easyfs.py
def ListMappedNetworkDrives(): ''' On Windows, returns a list of mapped network drives :return: tuple(string, string, bool) For each mapped netword drive, return 3 values tuple: - the local drive - the remote path- - True if the mapping is enabled (warning: not reliable) ''' if sys.platform != 'win32': raise NotImplementedError drives_list = [] netuse = _CallWindowsNetCommand(['use']) for line in netuse.split(EOL_STYLE_WINDOWS): match = re.match("(\w*)\s+(\w:)\s+(.+)", line.rstrip()) if match: drives_list.append((match.group(2), match.group(3), match.group(1) == 'OK')) return drives_list
def ListMappedNetworkDrives(): ''' On Windows, returns a list of mapped network drives :return: tuple(string, string, bool) For each mapped netword drive, return 3 values tuple: - the local drive - the remote path- - True if the mapping is enabled (warning: not reliable) ''' if sys.platform != 'win32': raise NotImplementedError drives_list = [] netuse = _CallWindowsNetCommand(['use']) for line in netuse.split(EOL_STYLE_WINDOWS): match = re.match("(\w*)\s+(\w:)\s+(.+)", line.rstrip()) if match: drives_list.append((match.group(2), match.group(3), match.group(1) == 'OK')) return drives_list
[ "On", "Windows", "returns", "a", "list", "of", "mapped", "network", "drives" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1411-L1429
[ "def", "ListMappedNetworkDrives", "(", ")", ":", "if", "sys", ".", "platform", "!=", "'win32'", ":", "raise", "NotImplementedError", "drives_list", "=", "[", "]", "netuse", "=", "_CallWindowsNetCommand", "(", "[", "'use'", "]", ")", "for", "line", "in", "netuse", ".", "split", "(", "EOL_STYLE_WINDOWS", ")", ":", "match", "=", "re", ".", "match", "(", "\"(\\w*)\\s+(\\w:)\\s+(.+)\"", ",", "line", ".", "rstrip", "(", ")", ")", "if", "match", ":", "drives_list", ".", "append", "(", "(", "match", ".", "group", "(", "2", ")", ",", "match", ".", "group", "(", "3", ")", ",", "match", ".", "group", "(", "1", ")", "==", "'OK'", ")", ")", "return", "drives_list" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CreateLink
Create a symbolic link at `link_path` pointing to `target_path`. :param unicode target_path: Link target :param unicode link_path: Fullpath to link name :param bool override: If True and `link_path` already exists as a link, that link is overridden.
zerotk/easyfs/_easyfs.py
def CreateLink(target_path, link_path, override=True): ''' Create a symbolic link at `link_path` pointing to `target_path`. :param unicode target_path: Link target :param unicode link_path: Fullpath to link name :param bool override: If True and `link_path` already exists as a link, that link is overridden. ''' _AssertIsLocal(target_path) _AssertIsLocal(link_path) if override and IsLink(link_path): DeleteLink(link_path) # Create directories leading up to link dirname = os.path.dirname(link_path) if dirname: CreateDirectory(dirname) if sys.platform != 'win32': return os.symlink(target_path, link_path) # @UndefinedVariable else: #import ntfsutils.junction #return ntfsutils.junction.create(target_path, link_path) import jaraco.windows.filesystem return jaraco.windows.filesystem.symlink(target_path, link_path) from ._easyfs_win32 import CreateSymbolicLink try: dw_flags = 0 if target_path and os.path.isdir(target_path): dw_flags = 1 return CreateSymbolicLink(target_path, link_path, dw_flags) except Exception as e: reraise(e, 'Creating link "%(link_path)s" pointing to "%(target_path)s"' % locals())
def CreateLink(target_path, link_path, override=True): ''' Create a symbolic link at `link_path` pointing to `target_path`. :param unicode target_path: Link target :param unicode link_path: Fullpath to link name :param bool override: If True and `link_path` already exists as a link, that link is overridden. ''' _AssertIsLocal(target_path) _AssertIsLocal(link_path) if override and IsLink(link_path): DeleteLink(link_path) # Create directories leading up to link dirname = os.path.dirname(link_path) if dirname: CreateDirectory(dirname) if sys.platform != 'win32': return os.symlink(target_path, link_path) # @UndefinedVariable else: #import ntfsutils.junction #return ntfsutils.junction.create(target_path, link_path) import jaraco.windows.filesystem return jaraco.windows.filesystem.symlink(target_path, link_path) from ._easyfs_win32 import CreateSymbolicLink try: dw_flags = 0 if target_path and os.path.isdir(target_path): dw_flags = 1 return CreateSymbolicLink(target_path, link_path, dw_flags) except Exception as e: reraise(e, 'Creating link "%(link_path)s" pointing to "%(target_path)s"' % locals())
[ "Create", "a", "symbolic", "link", "at", "link_path", "pointing", "to", "target_path", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1451-L1491
[ "def", "CreateLink", "(", "target_path", ",", "link_path", ",", "override", "=", "True", ")", ":", "_AssertIsLocal", "(", "target_path", ")", "_AssertIsLocal", "(", "link_path", ")", "if", "override", "and", "IsLink", "(", "link_path", ")", ":", "DeleteLink", "(", "link_path", ")", "# Create directories leading up to link", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "link_path", ")", "if", "dirname", ":", "CreateDirectory", "(", "dirname", ")", "if", "sys", ".", "platform", "!=", "'win32'", ":", "return", "os", ".", "symlink", "(", "target_path", ",", "link_path", ")", "# @UndefinedVariable", "else", ":", "#import ntfsutils.junction", "#return ntfsutils.junction.create(target_path, link_path)", "import", "jaraco", ".", "windows", ".", "filesystem", "return", "jaraco", ".", "windows", ".", "filesystem", ".", "symlink", "(", "target_path", ",", "link_path", ")", "from", ".", "_easyfs_win32", "import", "CreateSymbolicLink", "try", ":", "dw_flags", "=", "0", "if", "target_path", "and", "os", ".", "path", ".", "isdir", "(", "target_path", ")", ":", "dw_flags", "=", "1", "return", "CreateSymbolicLink", "(", "target_path", ",", "link_path", ",", "dw_flags", ")", "except", "Exception", "as", "e", ":", "reraise", "(", "e", ",", "'Creating link \"%(link_path)s\" pointing to \"%(target_path)s\"'", "%", "locals", "(", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
IsLink
:param unicode path: Path being tested :returns bool: True if `path` is a link
zerotk/easyfs/_easyfs.py
def IsLink(path): ''' :param unicode path: Path being tested :returns bool: True if `path` is a link ''' _AssertIsLocal(path) if sys.platform != 'win32': return os.path.islink(path) import jaraco.windows.filesystem return jaraco.windows.filesystem.islink(path)
def IsLink(path): ''' :param unicode path: Path being tested :returns bool: True if `path` is a link ''' _AssertIsLocal(path) if sys.platform != 'win32': return os.path.islink(path) import jaraco.windows.filesystem return jaraco.windows.filesystem.islink(path)
[ ":", "param", "unicode", "path", ":", "Path", "being", "tested" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1498-L1512
[ "def", "IsLink", "(", "path", ")", ":", "_AssertIsLocal", "(", "path", ")", "if", "sys", ".", "platform", "!=", "'win32'", ":", "return", "os", ".", "path", ".", "islink", "(", "path", ")", "import", "jaraco", ".", "windows", ".", "filesystem", "return", "jaraco", ".", "windows", ".", "filesystem", ".", "islink", "(", "path", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ReadLink
Read the target of the symbolic link at `path`. :param unicode path: Path to a symbolic link :returns unicode: Target of a symbolic link
zerotk/easyfs/_easyfs.py
def ReadLink(path): ''' Read the target of the symbolic link at `path`. :param unicode path: Path to a symbolic link :returns unicode: Target of a symbolic link ''' _AssertIsLocal(path) if sys.platform != 'win32': return os.readlink(path) # @UndefinedVariable if not IsLink(path): from ._exceptions import FileNotFoundError raise FileNotFoundError(path) import jaraco.windows.filesystem result = jaraco.windows.filesystem.readlink(path) if '\\??\\' in result: result = result.split('\\??\\')[1] return result
def ReadLink(path): ''' Read the target of the symbolic link at `path`. :param unicode path: Path to a symbolic link :returns unicode: Target of a symbolic link ''' _AssertIsLocal(path) if sys.platform != 'win32': return os.readlink(path) # @UndefinedVariable if not IsLink(path): from ._exceptions import FileNotFoundError raise FileNotFoundError(path) import jaraco.windows.filesystem result = jaraco.windows.filesystem.readlink(path) if '\\??\\' in result: result = result.split('\\??\\')[1] return result
[ "Read", "the", "target", "of", "the", "symbolic", "link", "at", "path", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1519-L1542
[ "def", "ReadLink", "(", "path", ")", ":", "_AssertIsLocal", "(", "path", ")", "if", "sys", ".", "platform", "!=", "'win32'", ":", "return", "os", ".", "readlink", "(", "path", ")", "# @UndefinedVariable", "if", "not", "IsLink", "(", "path", ")", ":", "from", ".", "_exceptions", "import", "FileNotFoundError", "raise", "FileNotFoundError", "(", "path", ")", "import", "jaraco", ".", "windows", ".", "filesystem", "result", "=", "jaraco", ".", "windows", ".", "filesystem", ".", "readlink", "(", "path", ")", "if", "'\\\\??\\\\'", "in", "result", ":", "result", "=", "result", ".", "split", "(", "'\\\\??\\\\'", ")", "[", "1", "]", "return", "result" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
_AssertIsLocal
Checks if a given path is local, raise an exception if not. This is used in filesystem functions that do not support remote operations yet. :param unicode path: :raises NotImplementedForRemotePathError: If the given path is not local
zerotk/easyfs/_easyfs.py
def _AssertIsLocal(path): ''' Checks if a given path is local, raise an exception if not. This is used in filesystem functions that do not support remote operations yet. :param unicode path: :raises NotImplementedForRemotePathError: If the given path is not local ''' from six.moves.urllib.parse import urlparse if not _UrlIsLocal(urlparse(path)): from ._exceptions import NotImplementedForRemotePathError raise NotImplementedForRemotePathError
def _AssertIsLocal(path): ''' Checks if a given path is local, raise an exception if not. This is used in filesystem functions that do not support remote operations yet. :param unicode path: :raises NotImplementedForRemotePathError: If the given path is not local ''' from six.moves.urllib.parse import urlparse if not _UrlIsLocal(urlparse(path)): from ._exceptions import NotImplementedForRemotePathError raise NotImplementedForRemotePathError
[ "Checks", "if", "a", "given", "path", "is", "local", "raise", "an", "exception", "if", "not", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1564-L1578
[ "def", "_AssertIsLocal", "(", "path", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "if", "not", "_UrlIsLocal", "(", "urlparse", "(", "path", ")", ")", ":", "from", ".", "_exceptions", "import", "NotImplementedForRemotePathError", "raise", "NotImplementedForRemotePathError" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
_HandleContentsEol
Replaces eol on each line by the given eol_style. :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style:
zerotk/easyfs/_easyfs.py
def _HandleContentsEol(contents, eol_style): ''' Replaces eol on each line by the given eol_style. :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style: ''' if eol_style == EOL_STYLE_NONE: return contents if eol_style == EOL_STYLE_UNIX: return contents.replace('\r\n', eol_style).replace('\r', eol_style) if eol_style == EOL_STYLE_MAC: return contents.replace('\r\n', eol_style).replace('\n', eol_style) if eol_style == EOL_STYLE_WINDOWS: return contents.replace('\r\n', '\n').replace('\r', '\n').replace('\n', EOL_STYLE_WINDOWS) raise ValueError('Unexpected eol style: %r' % (eol_style,))
def _HandleContentsEol(contents, eol_style): ''' Replaces eol on each line by the given eol_style. :param unicode contents: :type eol_style: EOL_STYLE_XXX constant :param eol_style: ''' if eol_style == EOL_STYLE_NONE: return contents if eol_style == EOL_STYLE_UNIX: return contents.replace('\r\n', eol_style).replace('\r', eol_style) if eol_style == EOL_STYLE_MAC: return contents.replace('\r\n', eol_style).replace('\n', eol_style) if eol_style == EOL_STYLE_WINDOWS: return contents.replace('\r\n', '\n').replace('\r', '\n').replace('\n', EOL_STYLE_WINDOWS) raise ValueError('Unexpected eol style: %r' % (eol_style,))
[ "Replaces", "eol", "on", "each", "line", "by", "the", "given", "eol_style", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1581-L1601
[ "def", "_HandleContentsEol", "(", "contents", ",", "eol_style", ")", ":", "if", "eol_style", "==", "EOL_STYLE_NONE", ":", "return", "contents", "if", "eol_style", "==", "EOL_STYLE_UNIX", ":", "return", "contents", ".", "replace", "(", "'\\r\\n'", ",", "eol_style", ")", ".", "replace", "(", "'\\r'", ",", "eol_style", ")", "if", "eol_style", "==", "EOL_STYLE_MAC", ":", "return", "contents", ".", "replace", "(", "'\\r\\n'", ",", "eol_style", ")", ".", "replace", "(", "'\\n'", ",", "eol_style", ")", "if", "eol_style", "==", "EOL_STYLE_WINDOWS", ":", "return", "contents", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", ".", "replace", "(", "'\\r'", ",", "'\\n'", ")", ".", "replace", "(", "'\\n'", ",", "EOL_STYLE_WINDOWS", ")", "raise", "ValueError", "(", "'Unexpected eol style: %r'", "%", "(", "eol_style", ",", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
_CallWindowsNetCommand
Call Windows NET command, used to acquire/configure network services settings. :param parameters: list of command line parameters :return: command output
zerotk/easyfs/_easyfs.py
def _CallWindowsNetCommand(parameters): ''' Call Windows NET command, used to acquire/configure network services settings. :param parameters: list of command line parameters :return: command output ''' import subprocess popen = subprocess.Popen(["net"] + parameters, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdoutdata, stderrdata = popen.communicate() if stderrdata: raise OSError("Failed on call net.exe: %s" % stderrdata) return stdoutdata
def _CallWindowsNetCommand(parameters): ''' Call Windows NET command, used to acquire/configure network services settings. :param parameters: list of command line parameters :return: command output ''' import subprocess popen = subprocess.Popen(["net"] + parameters, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdoutdata, stderrdata = popen.communicate() if stderrdata: raise OSError("Failed on call net.exe: %s" % stderrdata) return stdoutdata
[ "Call", "Windows", "NET", "command", "used", "to", "acquire", "/", "configure", "network", "services", "settings", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1604-L1617
[ "def", "_CallWindowsNetCommand", "(", "parameters", ")", ":", "import", "subprocess", "popen", "=", "subprocess", ".", "Popen", "(", "[", "\"net\"", "]", "+", "parameters", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "stdoutdata", ",", "stderrdata", "=", "popen", ".", "communicate", "(", ")", "if", "stderrdata", ":", "raise", "OSError", "(", "\"Failed on call net.exe: %s\"", "%", "stderrdata", ")", "return", "stdoutdata" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
CheckForUpdate
Checks if the given target filename should be re-generated because the source has changed. :param source: the source filename. :param target: the target filename. :return bool: True if the target is out-dated, False otherwise.
zerotk/easyfs/_easyfs.py
def CheckForUpdate(source, target): ''' Checks if the given target filename should be re-generated because the source has changed. :param source: the source filename. :param target: the target filename. :return bool: True if the target is out-dated, False otherwise. ''' return \ not os.path.isfile(target) or \ os.path.getmtime(source) > os.path.getmtime(target)
def CheckForUpdate(source, target): ''' Checks if the given target filename should be re-generated because the source has changed. :param source: the source filename. :param target: the target filename. :return bool: True if the target is out-dated, False otherwise. ''' return \ not os.path.isfile(target) or \ os.path.getmtime(source) > os.path.getmtime(target)
[ "Checks", "if", "the", "given", "target", "filename", "should", "be", "re", "-", "generated", "because", "the", "source", "has", "changed", ".", ":", "param", "source", ":", "the", "source", "filename", ".", ":", "param", "target", ":", "the", "target", "filename", ".", ":", "return", "bool", ":", "True", "if", "the", "target", "is", "out", "-", "dated", "False", "otherwise", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1689-L1699
[ "def", "CheckForUpdate", "(", "source", ",", "target", ")", ":", "return", "not", "os", ".", "path", ".", "isfile", "(", "target", ")", "or", "os", ".", "path", ".", "getmtime", "(", "source", ")", ">", "os", ".", "path", ".", "getmtime", "(", "target", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
MatchMasks
Verifies if a filename match with given patterns. :param str filename: The filename to match. :param list(str) masks: The patterns to search in the filename. :return bool: True if the filename has matched with one pattern, False otherwise.
zerotk/easyfs/_easyfs.py
def MatchMasks(filename, masks): ''' Verifies if a filename match with given patterns. :param str filename: The filename to match. :param list(str) masks: The patterns to search in the filename. :return bool: True if the filename has matched with one pattern, False otherwise. ''' import fnmatch if not isinstance(masks, (list, tuple)): masks = [masks] for i_mask in masks: if fnmatch.fnmatch(filename, i_mask): return True return False
def MatchMasks(filename, masks): ''' Verifies if a filename match with given patterns. :param str filename: The filename to match. :param list(str) masks: The patterns to search in the filename. :return bool: True if the filename has matched with one pattern, False otherwise. ''' import fnmatch if not isinstance(masks, (list, tuple)): masks = [masks] for i_mask in masks: if fnmatch.fnmatch(filename, i_mask): return True return False
[ "Verifies", "if", "a", "filename", "match", "with", "given", "patterns", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1706-L1723
[ "def", "MatchMasks", "(", "filename", ",", "masks", ")", ":", "import", "fnmatch", "if", "not", "isinstance", "(", "masks", ",", "(", "list", ",", "tuple", ")", ")", ":", "masks", "=", "[", "masks", "]", "for", "i_mask", "in", "masks", ":", "if", "fnmatch", ".", "fnmatch", "(", "filename", ",", "i_mask", ")", ":", "return", "True", "return", "False" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
FindFiles
Searches for files in a given directory that match with the given patterns. :param str dir_: the directory root, to search the files. :param list(str) in_filters: a list with patterns to match (default = all). E.g.: ['*.py'] :param list(str) out_filters: a list with patterns to ignore (default = none). E.g.: ['*.py'] :param bool recursive: if True search in subdirectories, otherwise, just in the root. :param bool include_root_dir: if True, includes the directory being searched in the returned paths :param bool standard_paths: if True, always uses unix path separators "/" :return list(str): A list of strings with the files that matched (with the full path in the filesystem).
zerotk/easyfs/_easyfs.py
def FindFiles(dir_, in_filters=None, out_filters=None, recursive=True, include_root_dir=True, standard_paths=False): ''' Searches for files in a given directory that match with the given patterns. :param str dir_: the directory root, to search the files. :param list(str) in_filters: a list with patterns to match (default = all). E.g.: ['*.py'] :param list(str) out_filters: a list with patterns to ignore (default = none). E.g.: ['*.py'] :param bool recursive: if True search in subdirectories, otherwise, just in the root. :param bool include_root_dir: if True, includes the directory being searched in the returned paths :param bool standard_paths: if True, always uses unix path separators "/" :return list(str): A list of strings with the files that matched (with the full path in the filesystem). ''' # all files if in_filters is None: in_filters = ['*'] if out_filters is None: out_filters = [] result = [] # maintain just files that don't have a pattern that match with out_filters # walk through all directories based on dir for dir_root, directories, filenames in os.walk(dir_): for i_directory in directories[:]: if MatchMasks(i_directory, out_filters): directories.remove(i_directory) for filename in directories + filenames: if MatchMasks(filename, in_filters) and not MatchMasks(filename, out_filters): result.append(os.path.join(dir_root, filename)) if not recursive: break if not include_root_dir: # Remove root dir from all paths dir_prefix = len(dir_) + 1 result = [file[dir_prefix:] for file in result] if standard_paths: result = map(StandardizePath, result) return result
def FindFiles(dir_, in_filters=None, out_filters=None, recursive=True, include_root_dir=True, standard_paths=False): ''' Searches for files in a given directory that match with the given patterns. :param str dir_: the directory root, to search the files. :param list(str) in_filters: a list with patterns to match (default = all). E.g.: ['*.py'] :param list(str) out_filters: a list with patterns to ignore (default = none). E.g.: ['*.py'] :param bool recursive: if True search in subdirectories, otherwise, just in the root. :param bool include_root_dir: if True, includes the directory being searched in the returned paths :param bool standard_paths: if True, always uses unix path separators "/" :return list(str): A list of strings with the files that matched (with the full path in the filesystem). ''' # all files if in_filters is None: in_filters = ['*'] if out_filters is None: out_filters = [] result = [] # maintain just files that don't have a pattern that match with out_filters # walk through all directories based on dir for dir_root, directories, filenames in os.walk(dir_): for i_directory in directories[:]: if MatchMasks(i_directory, out_filters): directories.remove(i_directory) for filename in directories + filenames: if MatchMasks(filename, in_filters) and not MatchMasks(filename, out_filters): result.append(os.path.join(dir_root, filename)) if not recursive: break if not include_root_dir: # Remove root dir from all paths dir_prefix = len(dir_) + 1 result = [file[dir_prefix:] for file in result] if standard_paths: result = map(StandardizePath, result) return result
[ "Searches", "for", "files", "in", "a", "given", "directory", "that", "match", "with", "the", "given", "patterns", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1730-L1775
[ "def", "FindFiles", "(", "dir_", ",", "in_filters", "=", "None", ",", "out_filters", "=", "None", ",", "recursive", "=", "True", ",", "include_root_dir", "=", "True", ",", "standard_paths", "=", "False", ")", ":", "# all files", "if", "in_filters", "is", "None", ":", "in_filters", "=", "[", "'*'", "]", "if", "out_filters", "is", "None", ":", "out_filters", "=", "[", "]", "result", "=", "[", "]", "# maintain just files that don't have a pattern that match with out_filters", "# walk through all directories based on dir", "for", "dir_root", ",", "directories", ",", "filenames", "in", "os", ".", "walk", "(", "dir_", ")", ":", "for", "i_directory", "in", "directories", "[", ":", "]", ":", "if", "MatchMasks", "(", "i_directory", ",", "out_filters", ")", ":", "directories", ".", "remove", "(", "i_directory", ")", "for", "filename", "in", "directories", "+", "filenames", ":", "if", "MatchMasks", "(", "filename", ",", "in_filters", ")", "and", "not", "MatchMasks", "(", "filename", ",", "out_filters", ")", ":", "result", ".", "append", "(", "os", ".", "path", ".", "join", "(", "dir_root", ",", "filename", ")", ")", "if", "not", "recursive", ":", "break", "if", "not", "include_root_dir", ":", "# Remove root dir from all paths", "dir_prefix", "=", "len", "(", "dir_", ")", "+", "1", "result", "=", "[", "file", "[", "dir_prefix", ":", "]", "for", "file", "in", "result", "]", "if", "standard_paths", ":", "result", "=", "map", "(", "StandardizePath", ",", "result", ")", "return", "result" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ExpandUser
os.path.expanduser wrapper, necessary because it cannot handle unicode strings properly. This is not necessary in Python 3. :param path: .. seealso:: os.path.expanduser
zerotk/easyfs/_easyfs.py
def ExpandUser(path): ''' os.path.expanduser wrapper, necessary because it cannot handle unicode strings properly. This is not necessary in Python 3. :param path: .. seealso:: os.path.expanduser ''' if six.PY2: encoding = sys.getfilesystemencoding() path = path.encode(encoding) result = os.path.expanduser(path) if six.PY2: result = result.decode(encoding) return result
def ExpandUser(path): ''' os.path.expanduser wrapper, necessary because it cannot handle unicode strings properly. This is not necessary in Python 3. :param path: .. seealso:: os.path.expanduser ''' if six.PY2: encoding = sys.getfilesystemencoding() path = path.encode(encoding) result = os.path.expanduser(path) if six.PY2: result = result.decode(encoding) return result
[ "os", ".", "path", ".", "expanduser", "wrapper", "necessary", "because", "it", "cannot", "handle", "unicode", "strings", "properly", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1782-L1797
[ "def", "ExpandUser", "(", "path", ")", ":", "if", "six", ".", "PY2", ":", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", "path", "=", "path", ".", "encode", "(", "encoding", ")", "result", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if", "six", ".", "PY2", ":", "result", "=", "result", ".", "decode", "(", "encoding", ")", "return", "result" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
DumpDirHashToStringIO
Helper to iterate over the files in a directory putting those in the passed StringIO in ini format. :param unicode directory: The directory for which the hash should be done. :param StringIO stringio: The string to which the dump should be put. :param unicode base: If provided should be added (along with a '/') before the name=hash of file. :param unicode exclude: Pattern to match files to exclude from the hashing. E.g.: *.gz :param unicode include: Pattern to match files to include in the hashing. E.g.: *.zip
zerotk/easyfs/_easyfs.py
def DumpDirHashToStringIO(directory, stringio, base='', exclude=None, include=None): ''' Helper to iterate over the files in a directory putting those in the passed StringIO in ini format. :param unicode directory: The directory for which the hash should be done. :param StringIO stringio: The string to which the dump should be put. :param unicode base: If provided should be added (along with a '/') before the name=hash of file. :param unicode exclude: Pattern to match files to exclude from the hashing. E.g.: *.gz :param unicode include: Pattern to match files to include in the hashing. E.g.: *.zip ''' import fnmatch import os files = [(os.path.join(directory, i), i) for i in os.listdir(directory)] files = [i for i in files if os.path.isfile(i[0])] for fullname, filename in files: if include is not None: if not fnmatch.fnmatch(fullname, include): continue if exclude is not None: if fnmatch.fnmatch(fullname, exclude): continue md5 = Md5Hex(fullname) if base: stringio.write('%s/%s=%s\n' % (base, filename, md5)) else: stringio.write('%s=%s\n' % (filename, md5))
def DumpDirHashToStringIO(directory, stringio, base='', exclude=None, include=None): ''' Helper to iterate over the files in a directory putting those in the passed StringIO in ini format. :param unicode directory: The directory for which the hash should be done. :param StringIO stringio: The string to which the dump should be put. :param unicode base: If provided should be added (along with a '/') before the name=hash of file. :param unicode exclude: Pattern to match files to exclude from the hashing. E.g.: *.gz :param unicode include: Pattern to match files to include in the hashing. E.g.: *.zip ''' import fnmatch import os files = [(os.path.join(directory, i), i) for i in os.listdir(directory)] files = [i for i in files if os.path.isfile(i[0])] for fullname, filename in files: if include is not None: if not fnmatch.fnmatch(fullname, include): continue if exclude is not None: if fnmatch.fnmatch(fullname, exclude): continue md5 = Md5Hex(fullname) if base: stringio.write('%s/%s=%s\n' % (base, filename, md5)) else: stringio.write('%s=%s\n' % (filename, md5))
[ "Helper", "to", "iterate", "over", "the", "files", "in", "a", "directory", "putting", "those", "in", "the", "passed", "StringIO", "in", "ini", "format", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1804-L1842
[ "def", "DumpDirHashToStringIO", "(", "directory", ",", "stringio", ",", "base", "=", "''", ",", "exclude", "=", "None", ",", "include", "=", "None", ")", ":", "import", "fnmatch", "import", "os", "files", "=", "[", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "i", ")", ",", "i", ")", "for", "i", "in", "os", ".", "listdir", "(", "directory", ")", "]", "files", "=", "[", "i", "for", "i", "in", "files", "if", "os", ".", "path", ".", "isfile", "(", "i", "[", "0", "]", ")", "]", "for", "fullname", ",", "filename", "in", "files", ":", "if", "include", "is", "not", "None", ":", "if", "not", "fnmatch", ".", "fnmatch", "(", "fullname", ",", "include", ")", ":", "continue", "if", "exclude", "is", "not", "None", ":", "if", "fnmatch", ".", "fnmatch", "(", "fullname", ",", "exclude", ")", ":", "continue", "md5", "=", "Md5Hex", "(", "fullname", ")", "if", "base", ":", "stringio", ".", "write", "(", "'%s/%s=%s\\n'", "%", "(", "base", ",", "filename", ",", "md5", ")", ")", "else", ":", "stringio", ".", "write", "(", "'%s=%s\\n'", "%", "(", "filename", ",", "md5", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
Md5Hex
:param unicode filename: The file from which the md5 should be calculated. If the filename is given, the contents should NOT be given. :param unicode contents: The contents for which the md5 should be calculated. If the contents are given, the filename should NOT be given. :rtype: unicode :returns: Returns a string with the hex digest of the stream.
zerotk/easyfs/_easyfs.py
def Md5Hex(filename=None, contents=None): ''' :param unicode filename: The file from which the md5 should be calculated. If the filename is given, the contents should NOT be given. :param unicode contents: The contents for which the md5 should be calculated. If the contents are given, the filename should NOT be given. :rtype: unicode :returns: Returns a string with the hex digest of the stream. ''' import io import hashlib md5 = hashlib.md5() if filename: stream = io.open(filename, 'rb') try: while True: data = stream.read(md5.block_size * 128) if not data: break md5.update(data) finally: stream.close() else: md5.update(contents) return six.text_type(md5.hexdigest())
def Md5Hex(filename=None, contents=None): ''' :param unicode filename: The file from which the md5 should be calculated. If the filename is given, the contents should NOT be given. :param unicode contents: The contents for which the md5 should be calculated. If the contents are given, the filename should NOT be given. :rtype: unicode :returns: Returns a string with the hex digest of the stream. ''' import io import hashlib md5 = hashlib.md5() if filename: stream = io.open(filename, 'rb') try: while True: data = stream.read(md5.block_size * 128) if not data: break md5.update(data) finally: stream.close() else: md5.update(contents) return six.text_type(md5.hexdigest())
[ ":", "param", "unicode", "filename", ":", "The", "file", "from", "which", "the", "md5", "should", "be", "calculated", ".", "If", "the", "filename", "is", "given", "the", "contents", "should", "NOT", "be", "given", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1849-L1881
[ "def", "Md5Hex", "(", "filename", "=", "None", ",", "contents", "=", "None", ")", ":", "import", "io", "import", "hashlib", "md5", "=", "hashlib", ".", "md5", "(", ")", "if", "filename", ":", "stream", "=", "io", ".", "open", "(", "filename", ",", "'rb'", ")", "try", ":", "while", "True", ":", "data", "=", "stream", ".", "read", "(", "md5", ".", "block_size", "*", "128", ")", "if", "not", "data", ":", "break", "md5", ".", "update", "(", "data", ")", "finally", ":", "stream", ".", "close", "(", ")", "else", ":", "md5", ".", "update", "(", "contents", ")", "return", "six", ".", "text_type", "(", "md5", ".", "hexdigest", "(", ")", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
IterHashes
Iterator for random hexadecimal hashes :param iterator_size: Amount of hashes return before this iterator stops. Goes on forever if `iterator_size` is negative. :param int hash_length: Size of each hash returned. :return generator(unicode):
zerotk/easyfs/_easyfs.py
def IterHashes(iterator_size, hash_length=7): ''' Iterator for random hexadecimal hashes :param iterator_size: Amount of hashes return before this iterator stops. Goes on forever if `iterator_size` is negative. :param int hash_length: Size of each hash returned. :return generator(unicode): ''' if not isinstance(iterator_size, int): raise TypeError('iterator_size must be integer.') count = 0 while count != iterator_size: count += 1 yield GetRandomHash(hash_length)
def IterHashes(iterator_size, hash_length=7): ''' Iterator for random hexadecimal hashes :param iterator_size: Amount of hashes return before this iterator stops. Goes on forever if `iterator_size` is negative. :param int hash_length: Size of each hash returned. :return generator(unicode): ''' if not isinstance(iterator_size, int): raise TypeError('iterator_size must be integer.') count = 0 while count != iterator_size: count += 1 yield GetRandomHash(hash_length)
[ "Iterator", "for", "random", "hexadecimal", "hashes" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1904-L1923
[ "def", "IterHashes", "(", "iterator_size", ",", "hash_length", "=", "7", ")", ":", "if", "not", "isinstance", "(", "iterator_size", ",", "int", ")", ":", "raise", "TypeError", "(", "'iterator_size must be integer.'", ")", "count", "=", "0", "while", "count", "!=", "iterator_size", ":", "count", "+=", "1", "yield", "GetRandomHash", "(", "hash_length", ")" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
PushPopItem
A context manager to replace and restore a value using a getter and setter. :param object obj: The object to replace/restore. :param object key: The key to replace/restore in the object. :param object value: The value to replace. Example:: with PushPop2(sys.modules, 'alpha', None): pytest.raises(ImportError): import alpha
zerotk/easyfs/_easyfs.py
def PushPopItem(obj, key, value): ''' A context manager to replace and restore a value using a getter and setter. :param object obj: The object to replace/restore. :param object key: The key to replace/restore in the object. :param object value: The value to replace. Example:: with PushPop2(sys.modules, 'alpha', None): pytest.raises(ImportError): import alpha ''' if key in obj: old_value = obj[key] obj[key] = value yield value obj[key] = old_value else: obj[key] = value yield value del obj[key]
def PushPopItem(obj, key, value): ''' A context manager to replace and restore a value using a getter and setter. :param object obj: The object to replace/restore. :param object key: The key to replace/restore in the object. :param object value: The value to replace. Example:: with PushPop2(sys.modules, 'alpha', None): pytest.raises(ImportError): import alpha ''' if key in obj: old_value = obj[key] obj[key] = value yield value obj[key] = old_value else: obj[key] = value yield value del obj[key]
[ "A", "context", "manager", "to", "replace", "and", "restore", "a", "value", "using", "a", "getter", "and", "setter", "." ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1930-L1953
[ "def", "PushPopItem", "(", "obj", ",", "key", ",", "value", ")", ":", "if", "key", "in", "obj", ":", "old_value", "=", "obj", "[", "key", "]", "obj", "[", "key", "]", "=", "value", "yield", "value", "obj", "[", "key", "]", "=", "old_value", "else", ":", "obj", "[", "key", "]", "=", "value", "yield", "value", "del", "obj", "[", "key", "]" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
ExtendedPathMask.Split
Splits the given path into their components: recursive, dirname, in_filters and out_filters :param str: extended_path_mask: The "extended path mask" to split :rtype: tuple(bool,bool,str,list(str),list(str)) :returns: Returns the extended path 5 components: - The tree-recurse flag - The flat-recurse flag - The actual path - A list of masks to include - A list of masks to exclude
zerotk/easyfs/_easyfs.py
def Split(cls, extended_path_mask): ''' Splits the given path into their components: recursive, dirname, in_filters and out_filters :param str: extended_path_mask: The "extended path mask" to split :rtype: tuple(bool,bool,str,list(str),list(str)) :returns: Returns the extended path 5 components: - The tree-recurse flag - The flat-recurse flag - The actual path - A list of masks to include - A list of masks to exclude ''' import os.path r_tree_recurse = extended_path_mask[0] in '+-' r_flat_recurse = extended_path_mask[0] in '-' r_dirname, r_filters = os.path.split(extended_path_mask) if r_tree_recurse: r_dirname = r_dirname[1:] filters = r_filters.split(';') r_in_filters = [i for i in filters if not i.startswith('!')] r_out_filters = [i[1:] for i in filters if i.startswith('!')] return r_tree_recurse, r_flat_recurse, r_dirname, r_in_filters, r_out_filters
def Split(cls, extended_path_mask): ''' Splits the given path into their components: recursive, dirname, in_filters and out_filters :param str: extended_path_mask: The "extended path mask" to split :rtype: tuple(bool,bool,str,list(str),list(str)) :returns: Returns the extended path 5 components: - The tree-recurse flag - The flat-recurse flag - The actual path - A list of masks to include - A list of masks to exclude ''' import os.path r_tree_recurse = extended_path_mask[0] in '+-' r_flat_recurse = extended_path_mask[0] in '-' r_dirname, r_filters = os.path.split(extended_path_mask) if r_tree_recurse: r_dirname = r_dirname[1:] filters = r_filters.split(';') r_in_filters = [i for i in filters if not i.startswith('!')] r_out_filters = [i[1:] for i in filters if i.startswith('!')] return r_tree_recurse, r_flat_recurse, r_dirname, r_in_filters, r_out_filters
[ "Splits", "the", "given", "path", "into", "their", "components", ":", "recursive", "dirname", "in_filters", "and", "out_filters" ]
zerotk/easyfs
python
https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L1654-L1682
[ "def", "Split", "(", "cls", ",", "extended_path_mask", ")", ":", "import", "os", ".", "path", "r_tree_recurse", "=", "extended_path_mask", "[", "0", "]", "in", "'+-'", "r_flat_recurse", "=", "extended_path_mask", "[", "0", "]", "in", "'-'", "r_dirname", ",", "r_filters", "=", "os", ".", "path", ".", "split", "(", "extended_path_mask", ")", "if", "r_tree_recurse", ":", "r_dirname", "=", "r_dirname", "[", "1", ":", "]", "filters", "=", "r_filters", ".", "split", "(", "';'", ")", "r_in_filters", "=", "[", "i", "for", "i", "in", "filters", "if", "not", "i", ".", "startswith", "(", "'!'", ")", "]", "r_out_filters", "=", "[", "i", "[", "1", ":", "]", "for", "i", "in", "filters", "if", "i", ".", "startswith", "(", "'!'", ")", "]", "return", "r_tree_recurse", ",", "r_flat_recurse", ",", "r_dirname", ",", "r_in_filters", ",", "r_out_filters" ]
140923db51fb91d5a5847ad17412e8bce51ba3da
valid
MessageFactory.GetMessages
Gets all the messages from a specified file. This will find and resolve dependencies, failing if the descriptor pool cannot satisfy them. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message.
typy/google/protobuf/message_factory.py
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if the descriptor pool cannot satisfy them. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. """ result = {} for file_name in files: file_desc = self.pool.FindFileByName(file_name) for name, msg in file_desc.message_types_by_name.items(): if file_desc.package: full_name = '.'.join([file_desc.package, name]) else: full_name = msg.name result[full_name] = self.GetPrototype( self.pool.FindMessageTypeByName(full_name)) # While the extension FieldDescriptors are created by the descriptor pool, # the python classes created in the factory need them to be registered # explicitly, which is done below. # # The call to RegisterExtension will specifically check if the # extension was already registered on the object and either # ignore the registration if the original was the same, or raise # an error if they were different. for name, extension in file_desc.extensions_by_name.items(): if extension.containing_type.full_name not in self._classes: self.GetPrototype(extension.containing_type) extended_class = self._classes[extension.containing_type.full_name] extended_class.RegisterExtension(extension) return result
def GetMessages(self, files): """Gets all the messages from a specified file. This will find and resolve dependencies, failing if the descriptor pool cannot satisfy them. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. This will include any dependent messages as well as any messages defined in the same file as a specified message. """ result = {} for file_name in files: file_desc = self.pool.FindFileByName(file_name) for name, msg in file_desc.message_types_by_name.items(): if file_desc.package: full_name = '.'.join([file_desc.package, name]) else: full_name = msg.name result[full_name] = self.GetPrototype( self.pool.FindMessageTypeByName(full_name)) # While the extension FieldDescriptors are created by the descriptor pool, # the python classes created in the factory need them to be registered # explicitly, which is done below. # # The call to RegisterExtension will specifically check if the # extension was already registered on the object and either # ignore the registration if the original was the same, or raise # an error if they were different. for name, extension in file_desc.extensions_by_name.items(): if extension.containing_type.full_name not in self._classes: self.GetPrototype(extension.containing_type) extended_class = self._classes[extension.containing_type.full_name] extended_class.RegisterExtension(extension) return result
[ "Gets", "all", "the", "messages", "from", "a", "specified", "file", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/message_factory.py#L89-L128
[ "def", "GetMessages", "(", "self", ",", "files", ")", ":", "result", "=", "{", "}", "for", "file_name", "in", "files", ":", "file_desc", "=", "self", ".", "pool", ".", "FindFileByName", "(", "file_name", ")", "for", "name", ",", "msg", "in", "file_desc", ".", "message_types_by_name", ".", "items", "(", ")", ":", "if", "file_desc", ".", "package", ":", "full_name", "=", "'.'", ".", "join", "(", "[", "file_desc", ".", "package", ",", "name", "]", ")", "else", ":", "full_name", "=", "msg", ".", "name", "result", "[", "full_name", "]", "=", "self", ".", "GetPrototype", "(", "self", ".", "pool", ".", "FindMessageTypeByName", "(", "full_name", ")", ")", "# While the extension FieldDescriptors are created by the descriptor pool,", "# the python classes created in the factory need them to be registered", "# explicitly, which is done below.", "#", "# The call to RegisterExtension will specifically check if the", "# extension was already registered on the object and either", "# ignore the registration if the original was the same, or raise", "# an error if they were different.", "for", "name", ",", "extension", "in", "file_desc", ".", "extensions_by_name", ".", "items", "(", ")", ":", "if", "extension", ".", "containing_type", ".", "full_name", "not", "in", "self", ".", "_classes", ":", "self", ".", "GetPrototype", "(", "extension", ".", "containing_type", ")", "extended_class", "=", "self", ".", "_classes", "[", "extension", ".", "containing_type", ".", "full_name", "]", "extended_class", ".", "RegisterExtension", "(", "extension", ")", "return", "result" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
multinest
**MultiNest Nested Sampling** via `PyMultiNest <http://johannesbuchner.github.com/PyMultiNest/index.html>`_. :param parameter_names: name of parameters; not directly used here, but for multinest_marginal.py plotting tool.
jbopt/mn.py
def multinest(parameter_names, transform, loglikelihood, output_basename, **problem): """ **MultiNest Nested Sampling** via `PyMultiNest <http://johannesbuchner.github.com/PyMultiNest/index.html>`_. :param parameter_names: name of parameters; not directly used here, but for multinest_marginal.py plotting tool. """ import numpy from numpy import log, exp import pymultinest # n observations # number of dimensions our problem has parameters = parameter_names n_params = len(parameters) def myprior(cube, ndim, nparams): params = transform([cube[i] for i in range(ndim)]) for i in range(ndim): cube[i] = params[i] def myloglike(cube, ndim, nparams): l = loglikelihood([cube[i] for i in range(ndim)]) return l # run MultiNest mn_args = dict( importance_nested_sampling = False, outputfiles_basename = output_basename, resume = problem.get('resume', False), verbose = True, n_live_points = problem.get('n_live_points', 400), const_efficiency_mode = False) if 'seed' in problem: mn_args['seed'] = problem['seed'] pymultinest.run(myloglike, myprior, n_params, **mn_args) import json # store name of parameters, always useful with file('%sparams.json' % output_basename, 'w') as f: json.dump(parameters, f, indent=2) # analyse a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename = output_basename) s = a.get_stats() with open('%sstats.json' % a.outputfiles_basename, mode='w') as f: json.dump(s, f, indent=2) chain = a.get_equal_weighted_posterior()[:,:-1] lower = [m['1sigma'][0] for m in s['marginals']] upper = [m['1sigma'][1] for m in s['marginals']] stdev = (numpy.array(upper) - numpy.array(lower)) / 2 center = [m['median'] for m in s['marginals']] #final = a.get_best_fit()['parameters'] # is already transformed data = numpy.loadtxt('%slive.points' % output_basename) i = data[:,-1].argmax() final = data[i,:-1] # untransformed return dict(start=final, chain=chain, stdev=stdev, upper=upper, lower=lower, method='MultiNest')
def multinest(parameter_names, transform, loglikelihood, output_basename, **problem): """ **MultiNest Nested Sampling** via `PyMultiNest <http://johannesbuchner.github.com/PyMultiNest/index.html>`_. :param parameter_names: name of parameters; not directly used here, but for multinest_marginal.py plotting tool. """ import numpy from numpy import log, exp import pymultinest # n observations # number of dimensions our problem has parameters = parameter_names n_params = len(parameters) def myprior(cube, ndim, nparams): params = transform([cube[i] for i in range(ndim)]) for i in range(ndim): cube[i] = params[i] def myloglike(cube, ndim, nparams): l = loglikelihood([cube[i] for i in range(ndim)]) return l # run MultiNest mn_args = dict( importance_nested_sampling = False, outputfiles_basename = output_basename, resume = problem.get('resume', False), verbose = True, n_live_points = problem.get('n_live_points', 400), const_efficiency_mode = False) if 'seed' in problem: mn_args['seed'] = problem['seed'] pymultinest.run(myloglike, myprior, n_params, **mn_args) import json # store name of parameters, always useful with file('%sparams.json' % output_basename, 'w') as f: json.dump(parameters, f, indent=2) # analyse a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename = output_basename) s = a.get_stats() with open('%sstats.json' % a.outputfiles_basename, mode='w') as f: json.dump(s, f, indent=2) chain = a.get_equal_weighted_posterior()[:,:-1] lower = [m['1sigma'][0] for m in s['marginals']] upper = [m['1sigma'][1] for m in s['marginals']] stdev = (numpy.array(upper) - numpy.array(lower)) / 2 center = [m['median'] for m in s['marginals']] #final = a.get_best_fit()['parameters'] # is already transformed data = numpy.loadtxt('%slive.points' % output_basename) i = data[:,-1].argmax() final = data[i,:-1] # untransformed return dict(start=final, chain=chain, stdev=stdev, upper=upper, lower=lower, method='MultiNest')
[ "**", "MultiNest", "Nested", "Sampling", "**", "via", "PyMultiNest", "<http", ":", "//", "johannesbuchner", ".", "github", ".", "com", "/", "PyMultiNest", "/", "index", ".", "html", ">", "_", ".", ":", "param", "parameter_names", ":", "name", "of", "parameters", ";", "not", "directly", "used", "here", "but", "for", "multinest_marginal", ".", "py", "plotting", "tool", "." ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/mn.py#L5-L69
[ "def", "multinest", "(", "parameter_names", ",", "transform", ",", "loglikelihood", ",", "output_basename", ",", "*", "*", "problem", ")", ":", "import", "numpy", "from", "numpy", "import", "log", ",", "exp", "import", "pymultinest", "# n observations", "# number of dimensions our problem has", "parameters", "=", "parameter_names", "n_params", "=", "len", "(", "parameters", ")", "def", "myprior", "(", "cube", ",", "ndim", ",", "nparams", ")", ":", "params", "=", "transform", "(", "[", "cube", "[", "i", "]", "for", "i", "in", "range", "(", "ndim", ")", "]", ")", "for", "i", "in", "range", "(", "ndim", ")", ":", "cube", "[", "i", "]", "=", "params", "[", "i", "]", "def", "myloglike", "(", "cube", ",", "ndim", ",", "nparams", ")", ":", "l", "=", "loglikelihood", "(", "[", "cube", "[", "i", "]", "for", "i", "in", "range", "(", "ndim", ")", "]", ")", "return", "l", "# run MultiNest", "mn_args", "=", "dict", "(", "importance_nested_sampling", "=", "False", ",", "outputfiles_basename", "=", "output_basename", ",", "resume", "=", "problem", ".", "get", "(", "'resume'", ",", "False", ")", ",", "verbose", "=", "True", ",", "n_live_points", "=", "problem", ".", "get", "(", "'n_live_points'", ",", "400", ")", ",", "const_efficiency_mode", "=", "False", ")", "if", "'seed'", "in", "problem", ":", "mn_args", "[", "'seed'", "]", "=", "problem", "[", "'seed'", "]", "pymultinest", ".", "run", "(", "myloglike", ",", "myprior", ",", "n_params", ",", "*", "*", "mn_args", ")", "import", "json", "# store name of parameters, always useful", "with", "file", "(", "'%sparams.json'", "%", "output_basename", ",", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "parameters", ",", "f", ",", "indent", "=", "2", ")", "# analyse", "a", "=", "pymultinest", ".", "Analyzer", "(", "n_params", "=", "n_params", ",", "outputfiles_basename", "=", "output_basename", ")", "s", "=", "a", ".", "get_stats", "(", ")", "with", "open", "(", "'%sstats.json'", "%", "a", ".", "outputfiles_basename", ",", "mode", "=", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "s", ",", "f", ",", "indent", "=", "2", ")", "chain", "=", "a", ".", "get_equal_weighted_posterior", "(", ")", "[", ":", ",", ":", "-", "1", "]", "lower", "=", "[", "m", "[", "'1sigma'", "]", "[", "0", "]", "for", "m", "in", "s", "[", "'marginals'", "]", "]", "upper", "=", "[", "m", "[", "'1sigma'", "]", "[", "1", "]", "for", "m", "in", "s", "[", "'marginals'", "]", "]", "stdev", "=", "(", "numpy", ".", "array", "(", "upper", ")", "-", "numpy", ".", "array", "(", "lower", ")", ")", "/", "2", "center", "=", "[", "m", "[", "'median'", "]", "for", "m", "in", "s", "[", "'marginals'", "]", "]", "#final = a.get_best_fit()['parameters'] # is already transformed", "data", "=", "numpy", ".", "loadtxt", "(", "'%slive.points'", "%", "output_basename", ")", "i", "=", "data", "[", ":", ",", "-", "1", "]", ".", "argmax", "(", ")", "final", "=", "data", "[", "i", ",", ":", "-", "1", "]", "# untransformed", "return", "dict", "(", "start", "=", "final", ",", "chain", "=", "chain", ",", "stdev", "=", "stdev", ",", "upper", "=", "upper", ",", "lower", "=", "lower", ",", "method", "=", "'MultiNest'", ")" ]
11b721ea001625ad7820f71ff684723c71216646
valid
specifier_to_db
Return the database string for a database specifier. The database specifier takes a custom format for specifying local and remote databases. A local database is specified by the following format: local:<db_name> For example, a database called 'sessions' would be specified by the string ``'local:sessions'``. Remote databases are specified like this: remote:<host>:<port_num>:<db_name> For example, a database called 'log' on the server 'dev.example.com' at port number 5984 would be specified by ``'remote:dev.example.com:5984:log'``. These specifiers are translated into strings acceptable to CouchDB; local specs are turned into the database name alone, and remote specs are turned into ``'http://host:port/db_name'`` URLs.
relax/couchdb/__init__.py
def specifier_to_db(db_spec): """ Return the database string for a database specifier. The database specifier takes a custom format for specifying local and remote databases. A local database is specified by the following format: local:<db_name> For example, a database called 'sessions' would be specified by the string ``'local:sessions'``. Remote databases are specified like this: remote:<host>:<port_num>:<db_name> For example, a database called 'log' on the server 'dev.example.com' at port number 5984 would be specified by ``'remote:dev.example.com:5984:log'``. These specifiers are translated into strings acceptable to CouchDB; local specs are turned into the database name alone, and remote specs are turned into ``'http://host:port/db_name'`` URLs. """ local_match = LOCAL_RE.match(db_spec) remote_match = REMOTE_RE.match(db_spec) plain_match = PLAIN_RE.match(db_spec) # If this looks like a local specifier: if local_match: return local_match.groupdict()['database'] # If this looks like a remote specifier: elif remote_match: # A fancy 'unpacking'... hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) local_url = settings._('COUCHDB_SERVER', 'http://127.0.0.1:5984/') localhost, localport = urlparse.urlparse(local_url)[1].split(':') # If it's local, return a local DB string. if (localhost == hostname) and (localport == portnum): return database # Otherwise, get a remote URL. return 'http://%s:%s/%s' % (hostname, portnum, database) # If this looks like a plain database name, return it. elif plain_match: return plain_match.groupdict()['database'] # Throw a wobbly. raise ValueError('Invalid database spec: %r' % (db_spec,))
def specifier_to_db(db_spec): """ Return the database string for a database specifier. The database specifier takes a custom format for specifying local and remote databases. A local database is specified by the following format: local:<db_name> For example, a database called 'sessions' would be specified by the string ``'local:sessions'``. Remote databases are specified like this: remote:<host>:<port_num>:<db_name> For example, a database called 'log' on the server 'dev.example.com' at port number 5984 would be specified by ``'remote:dev.example.com:5984:log'``. These specifiers are translated into strings acceptable to CouchDB; local specs are turned into the database name alone, and remote specs are turned into ``'http://host:port/db_name'`` URLs. """ local_match = LOCAL_RE.match(db_spec) remote_match = REMOTE_RE.match(db_spec) plain_match = PLAIN_RE.match(db_spec) # If this looks like a local specifier: if local_match: return local_match.groupdict()['database'] # If this looks like a remote specifier: elif remote_match: # A fancy 'unpacking'... hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) local_url = settings._('COUCHDB_SERVER', 'http://127.0.0.1:5984/') localhost, localport = urlparse.urlparse(local_url)[1].split(':') # If it's local, return a local DB string. if (localhost == hostname) and (localport == portnum): return database # Otherwise, get a remote URL. return 'http://%s:%s/%s' % (hostname, portnum, database) # If this looks like a plain database name, return it. elif plain_match: return plain_match.groupdict()['database'] # Throw a wobbly. raise ValueError('Invalid database spec: %r' % (db_spec,))
[ "Return", "the", "database", "string", "for", "a", "database", "specifier", ".", "The", "database", "specifier", "takes", "a", "custom", "format", "for", "specifying", "local", "and", "remote", "databases", ".", "A", "local", "database", "is", "specified", "by", "the", "following", "format", ":", "local", ":", "<db_name", ">", "For", "example", "a", "database", "called", "sessions", "would", "be", "specified", "by", "the", "string", "local", ":", "sessions", ".", "Remote", "databases", "are", "specified", "like", "this", ":", "remote", ":", "<host", ">", ":", "<port_num", ">", ":", "<db_name", ">", "For", "example", "a", "database", "called", "log", "on", "the", "server", "dev", ".", "example", ".", "com", "at", "port", "number", "5984", "would", "be", "specified", "by", "remote", ":", "dev", ".", "example", ".", "com", ":", "5984", ":", "log", ".", "These", "specifiers", "are", "translated", "into", "strings", "acceptable", "to", "CouchDB", ";", "local", "specs", "are", "turned", "into", "the", "database", "name", "alone", "and", "remote", "specs", "are", "turned", "into", "http", ":", "//", "host", ":", "port", "/", "db_name", "URLs", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/__init__.py#L25-L68
[ "def", "specifier_to_db", "(", "db_spec", ")", ":", "local_match", "=", "LOCAL_RE", ".", "match", "(", "db_spec", ")", "remote_match", "=", "REMOTE_RE", ".", "match", "(", "db_spec", ")", "plain_match", "=", "PLAIN_RE", ".", "match", "(", "db_spec", ")", "# If this looks like a local specifier:", "if", "local_match", ":", "return", "local_match", ".", "groupdict", "(", ")", "[", "'database'", "]", "# If this looks like a remote specifier:", "elif", "remote_match", ":", "# A fancy 'unpacking'...", "hostname", ",", "portnum", ",", "database", "=", "map", "(", "remote_match", ".", "groupdict", "(", ")", ".", "get", ",", "(", "'hostname'", ",", "'portnum'", ",", "'database'", ")", ")", "local_url", "=", "settings", ".", "_", "(", "'COUCHDB_SERVER'", ",", "'http://127.0.0.1:5984/'", ")", "localhost", ",", "localport", "=", "urlparse", ".", "urlparse", "(", "local_url", ")", "[", "1", "]", ".", "split", "(", "':'", ")", "# If it's local, return a local DB string.", "if", "(", "localhost", "==", "hostname", ")", "and", "(", "localport", "==", "portnum", ")", ":", "return", "database", "# Otherwise, get a remote URL.", "return", "'http://%s:%s/%s'", "%", "(", "hostname", ",", "portnum", ",", "database", ")", "# If this looks like a plain database name, return it.", "elif", "plain_match", ":", "return", "plain_match", ".", "groupdict", "(", ")", "[", "'database'", "]", "# Throw a wobbly.", "raise", "ValueError", "(", "'Invalid database spec: %r'", "%", "(", "db_spec", ",", ")", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
db_to_specifier
Return the database specifier for a database string. This accepts a database name or URL, and returns a database specifier in the format accepted by ``specifier_to_db``. It is recommended that you consult the documentation for that function for an explanation of the format.
relax/couchdb/__init__.py
def db_to_specifier(db_string): """ Return the database specifier for a database string. This accepts a database name or URL, and returns a database specifier in the format accepted by ``specifier_to_db``. It is recommended that you consult the documentation for that function for an explanation of the format. """ local_match = PLAIN_RE.match(db_string) remote_match = URL_RE.match(db_string) # If this looks like a local specifier: if local_match: return 'local:' + local_match.groupdict()['database'] # If this looks like a remote specifier: elif remote_match: # Just a fancy way of getting 3 variables in 2 lines... hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) local_url = settings._('COUCHDB_SERVER', 'http://127.0.0.1:5984/') localhost, localport = urlparse.urlparse(local_url)[1].split(':') # If it's the local server, then return a local specifier. if (localhost == hostname) and (localport == portnum): return 'local:' + database # Otherwise, prepare and return the remote specifier. return 'remote:%s:%s:%s' % (hostname, portnum, database) # Throw a wobbly. raise ValueError('Invalid database string: %r' % (db_string,))
def db_to_specifier(db_string): """ Return the database specifier for a database string. This accepts a database name or URL, and returns a database specifier in the format accepted by ``specifier_to_db``. It is recommended that you consult the documentation for that function for an explanation of the format. """ local_match = PLAIN_RE.match(db_string) remote_match = URL_RE.match(db_string) # If this looks like a local specifier: if local_match: return 'local:' + local_match.groupdict()['database'] # If this looks like a remote specifier: elif remote_match: # Just a fancy way of getting 3 variables in 2 lines... hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) local_url = settings._('COUCHDB_SERVER', 'http://127.0.0.1:5984/') localhost, localport = urlparse.urlparse(local_url)[1].split(':') # If it's the local server, then return a local specifier. if (localhost == hostname) and (localport == portnum): return 'local:' + database # Otherwise, prepare and return the remote specifier. return 'remote:%s:%s:%s' % (hostname, portnum, database) # Throw a wobbly. raise ValueError('Invalid database string: %r' % (db_string,))
[ "Return", "the", "database", "specifier", "for", "a", "database", "string", ".", "This", "accepts", "a", "database", "name", "or", "URL", "and", "returns", "a", "database", "specifier", "in", "the", "format", "accepted", "by", "specifier_to_db", ".", "It", "is", "recommended", "that", "you", "consult", "the", "documentation", "for", "that", "function", "for", "an", "explanation", "of", "the", "format", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/__init__.py#L71-L97
[ "def", "db_to_specifier", "(", "db_string", ")", ":", "local_match", "=", "PLAIN_RE", ".", "match", "(", "db_string", ")", "remote_match", "=", "URL_RE", ".", "match", "(", "db_string", ")", "# If this looks like a local specifier:", "if", "local_match", ":", "return", "'local:'", "+", "local_match", ".", "groupdict", "(", ")", "[", "'database'", "]", "# If this looks like a remote specifier:", "elif", "remote_match", ":", "# Just a fancy way of getting 3 variables in 2 lines...", "hostname", ",", "portnum", ",", "database", "=", "map", "(", "remote_match", ".", "groupdict", "(", ")", ".", "get", ",", "(", "'hostname'", ",", "'portnum'", ",", "'database'", ")", ")", "local_url", "=", "settings", ".", "_", "(", "'COUCHDB_SERVER'", ",", "'http://127.0.0.1:5984/'", ")", "localhost", ",", "localport", "=", "urlparse", ".", "urlparse", "(", "local_url", ")", "[", "1", "]", ".", "split", "(", "':'", ")", "# If it's the local server, then return a local specifier.", "if", "(", "localhost", "==", "hostname", ")", "and", "(", "localport", "==", "portnum", ")", ":", "return", "'local:'", "+", "database", "# Otherwise, prepare and return the remote specifier.", "return", "'remote:%s:%s:%s'", "%", "(", "hostname", ",", "portnum", ",", "database", ")", "# Throw a wobbly.", "raise", "ValueError", "(", "'Invalid database string: %r'", "%", "(", "db_string", ",", ")", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
get_db_from_db
Return a CouchDB database instance from a database string.
relax/couchdb/__init__.py
def get_db_from_db(db_string): """Return a CouchDB database instance from a database string.""" server = get_server_from_db(db_string) local_match = PLAIN_RE.match(db_string) remote_match = URL_RE.match(db_string) # If this looks like a local specifier: if local_match: return server[local_match.groupdict()['database']] elif remote_match: return server[remote_match.groupdict()['database']] raise ValueError('Invalid database string: %r' % (db_string,))
def get_db_from_db(db_string): """Return a CouchDB database instance from a database string.""" server = get_server_from_db(db_string) local_match = PLAIN_RE.match(db_string) remote_match = URL_RE.match(db_string) # If this looks like a local specifier: if local_match: return server[local_match.groupdict()['database']] elif remote_match: return server[remote_match.groupdict()['database']] raise ValueError('Invalid database string: %r' % (db_string,))
[ "Return", "a", "CouchDB", "database", "instance", "from", "a", "database", "string", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/__init__.py#L122-L132
[ "def", "get_db_from_db", "(", "db_string", ")", ":", "server", "=", "get_server_from_db", "(", "db_string", ")", "local_match", "=", "PLAIN_RE", ".", "match", "(", "db_string", ")", "remote_match", "=", "URL_RE", ".", "match", "(", "db_string", ")", "# If this looks like a local specifier:", "if", "local_match", ":", "return", "server", "[", "local_match", ".", "groupdict", "(", ")", "[", "'database'", "]", "]", "elif", "remote_match", ":", "return", "server", "[", "remote_match", ".", "groupdict", "(", ")", "[", "'database'", "]", "]", "raise", "ValueError", "(", "'Invalid database string: %r'", "%", "(", "db_string", ",", ")", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
ensure_specifier_exists
Make sure a DB specifier exists, creating it if necessary.
relax/couchdb/__init__.py
def ensure_specifier_exists(db_spec): """Make sure a DB specifier exists, creating it if necessary.""" local_match = LOCAL_RE.match(db_spec) remote_match = REMOTE_RE.match(db_spec) plain_match = PLAIN_RE.match(db_spec) if local_match: db_name = local_match.groupdict().get('database') server = shortcuts.get_server() if db_name not in server: server.create(db_name) return True elif remote_match: hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) server = shortcuts.get_server( server_url=('http://%s:%s' % (hostname, portnum))) if database not in server: server.create(database) return True elif plain_match: db_name = plain_match.groupdict().get('database') server = shortcuts.get_server() if db_name not in server: server.create(db_name) return True return False
def ensure_specifier_exists(db_spec): """Make sure a DB specifier exists, creating it if necessary.""" local_match = LOCAL_RE.match(db_spec) remote_match = REMOTE_RE.match(db_spec) plain_match = PLAIN_RE.match(db_spec) if local_match: db_name = local_match.groupdict().get('database') server = shortcuts.get_server() if db_name not in server: server.create(db_name) return True elif remote_match: hostname, portnum, database = map(remote_match.groupdict().get, ('hostname', 'portnum', 'database')) server = shortcuts.get_server( server_url=('http://%s:%s' % (hostname, portnum))) if database not in server: server.create(database) return True elif plain_match: db_name = plain_match.groupdict().get('database') server = shortcuts.get_server() if db_name not in server: server.create(db_name) return True return False
[ "Make", "sure", "a", "DB", "specifier", "exists", "creating", "it", "if", "necessary", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/__init__.py#L138-L163
[ "def", "ensure_specifier_exists", "(", "db_spec", ")", ":", "local_match", "=", "LOCAL_RE", ".", "match", "(", "db_spec", ")", "remote_match", "=", "REMOTE_RE", ".", "match", "(", "db_spec", ")", "plain_match", "=", "PLAIN_RE", ".", "match", "(", "db_spec", ")", "if", "local_match", ":", "db_name", "=", "local_match", ".", "groupdict", "(", ")", ".", "get", "(", "'database'", ")", "server", "=", "shortcuts", ".", "get_server", "(", ")", "if", "db_name", "not", "in", "server", ":", "server", ".", "create", "(", "db_name", ")", "return", "True", "elif", "remote_match", ":", "hostname", ",", "portnum", ",", "database", "=", "map", "(", "remote_match", ".", "groupdict", "(", ")", ".", "get", ",", "(", "'hostname'", ",", "'portnum'", ",", "'database'", ")", ")", "server", "=", "shortcuts", ".", "get_server", "(", "server_url", "=", "(", "'http://%s:%s'", "%", "(", "hostname", ",", "portnum", ")", ")", ")", "if", "database", "not", "in", "server", ":", "server", ".", "create", "(", "database", ")", "return", "True", "elif", "plain_match", ":", "db_name", "=", "plain_match", ".", "groupdict", "(", ")", ".", "get", "(", "'database'", ")", "server", "=", "shortcuts", ".", "get_server", "(", ")", "if", "db_name", "not", "in", "server", ":", "server", ".", "create", "(", "db_name", ")", "return", "True", "return", "False" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
coerce
Exclude NoSet objec .. code-block:: >>> coerce(NoSet, 'value') 'value'
src/custom_settings/utils.py
def coerce(value1, value2, default=None): """Exclude NoSet objec .. code-block:: >>> coerce(NoSet, 'value') 'value' """ if value1 is not NoSet: return value1 elif value2 is not NoSet: return value2 else: return default
def coerce(value1, value2, default=None): """Exclude NoSet objec .. code-block:: >>> coerce(NoSet, 'value') 'value' """ if value1 is not NoSet: return value1 elif value2 is not NoSet: return value2 else: return default
[ "Exclude", "NoSet", "objec" ]
TakesxiSximada/custom_settings
python
https://github.com/TakesxiSximada/custom_settings/blob/0e478ea2b5d7ad46eb1ece705b649e5651cd20ad/src/custom_settings/utils.py#L8-L22
[ "def", "coerce", "(", "value1", ",", "value2", ",", "default", "=", "None", ")", ":", "if", "value1", "is", "not", "NoSet", ":", "return", "value1", "elif", "value2", "is", "not", "NoSet", ":", "return", "value2", "else", ":", "return", "default" ]
0e478ea2b5d7ad46eb1ece705b649e5651cd20ad
valid
DposNode.get_events_vote_cluster
Returns all transactions and forged blocks by voters clustered around a single delegate_address
dpostools/dbtools.py
def get_events_vote_cluster(self, delegate_address): ''' Returns all transactions and forged blocks by voters clustered around a single delegate_address''' delegate_pubkey = self.account_details(address=delegate_address)['public_key'] plusvote = '+{delegate_pubkey}'.format(delegate_pubkey=delegate_pubkey) resultset = self._cursor.execute_and_fetchall(""" SELECT * FROM ( SELECT trs."{transactions[id]}" AS a, 'transaction' AS b, trs."{transactions[amount]}" AS c, trs."{transactions[timestamp]}" AS d, trs."{transactions[recipient_id]}" AS e, trs."{transactions[sender_id]}" AS f, trs."{transactions[rawasset]}" AS g, trs."{transactions[type]}" AS h, trs."{transactions[fee]}" AS i, trs."{transactions[block_id]}" AS j, blocks."{blocks[height]}" AS k FROM {transactions[table]} AS trs INNER JOIN {blocks[table]} AS blocks ON (blocks."{blocks[id]}" = trs."{transactions[block_id]}") WHERE trs."{transactions[sender_id]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') OR trs."{transactions[recipient_id]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') UNION SELECT blocks."{blocks[id]}" AS a, 'block' AS b, blocks."{blocks[reward]}"as c, blocks."{blocks[total_fee]}" AS d, ENCODE(mem."{mem_accounts[public_key]}"::BYTEA, 'hex') AS e, mem."{mem_accounts[address]}" AS f, mem."{mem_accounts[username]}" AS g, NULL AS h, blocks."{blocks[timestamp]}" AS i, NULL AS j, blocks."{blocks[height]}" AS k FROM blocks INNER JOIN {mem_accounts[table]} AS mem ON (mem."{mem_accounts[public_key]}" = blocks."{blocks[generator_public_key]}") WHERE blocks."{blocks[generator_public_key]}" IN ( SELECT mem2."{mem_accounts[public_key]}" FROM {mem_accounts[table]} mem2 WHERE mem2."{mem_accounts[address]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') )) total ORDER BY total.k ASC;""".format( address=delegate_address, transactions=self.scheme['transactions'], blocks=self.scheme['blocks'], mem_accounts=self.scheme['mem_accounts'], mem_accounts2delegates=self.scheme['mem_accounts2delegates'], votes=self.scheme['votes'], plusvote=plusvote)) res = {} for i in resultset: if i[1] == 'transaction': res.update({i[0]: { 'tx_id': i[0], 'event_type': i[1], 'amount': i[2], 'timestamp': i[3], 'recipient_id': i[4], 'sender_id': i[5], 'rawasset': i[6], 'type': i[7], 'fee': i[8], 'block_id': i[9], 'height': i[10] }}) elif i[1] == 'block': res.update({i[0]: { 'block_id': i[0], 'event_type': i[1], 'reward': i[2], 'total_fee': i[3], 'timestamp': i[8], 'address': i[5], 'username': i[6], 'public_key': i[4], 'height': i[10] }}) return res
def get_events_vote_cluster(self, delegate_address): ''' Returns all transactions and forged blocks by voters clustered around a single delegate_address''' delegate_pubkey = self.account_details(address=delegate_address)['public_key'] plusvote = '+{delegate_pubkey}'.format(delegate_pubkey=delegate_pubkey) resultset = self._cursor.execute_and_fetchall(""" SELECT * FROM ( SELECT trs."{transactions[id]}" AS a, 'transaction' AS b, trs."{transactions[amount]}" AS c, trs."{transactions[timestamp]}" AS d, trs."{transactions[recipient_id]}" AS e, trs."{transactions[sender_id]}" AS f, trs."{transactions[rawasset]}" AS g, trs."{transactions[type]}" AS h, trs."{transactions[fee]}" AS i, trs."{transactions[block_id]}" AS j, blocks."{blocks[height]}" AS k FROM {transactions[table]} AS trs INNER JOIN {blocks[table]} AS blocks ON (blocks."{blocks[id]}" = trs."{transactions[block_id]}") WHERE trs."{transactions[sender_id]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') OR trs."{transactions[recipient_id]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') UNION SELECT blocks."{blocks[id]}" AS a, 'block' AS b, blocks."{blocks[reward]}"as c, blocks."{blocks[total_fee]}" AS d, ENCODE(mem."{mem_accounts[public_key]}"::BYTEA, 'hex') AS e, mem."{mem_accounts[address]}" AS f, mem."{mem_accounts[username]}" AS g, NULL AS h, blocks."{blocks[timestamp]}" AS i, NULL AS j, blocks."{blocks[height]}" AS k FROM blocks INNER JOIN {mem_accounts[table]} AS mem ON (mem."{mem_accounts[public_key]}" = blocks."{blocks[generator_public_key]}") WHERE blocks."{blocks[generator_public_key]}" IN ( SELECT mem2."{mem_accounts[public_key]}" FROM {mem_accounts[table]} mem2 WHERE mem2."{mem_accounts[address]}" IN (SELECT trs."{transactions[sender_id]}" FROM {transactions[table]} AS trs, {votes[table]} AS votes WHERE trs."{transactions[id]}" = votes."{votes[transaction_id]}" AND votes."{votes[votes]}" = '{plusvote}') )) total ORDER BY total.k ASC;""".format( address=delegate_address, transactions=self.scheme['transactions'], blocks=self.scheme['blocks'], mem_accounts=self.scheme['mem_accounts'], mem_accounts2delegates=self.scheme['mem_accounts2delegates'], votes=self.scheme['votes'], plusvote=plusvote)) res = {} for i in resultset: if i[1] == 'transaction': res.update({i[0]: { 'tx_id': i[0], 'event_type': i[1], 'amount': i[2], 'timestamp': i[3], 'recipient_id': i[4], 'sender_id': i[5], 'rawasset': i[6], 'type': i[7], 'fee': i[8], 'block_id': i[9], 'height': i[10] }}) elif i[1] == 'block': res.update({i[0]: { 'block_id': i[0], 'event_type': i[1], 'reward': i[2], 'total_fee': i[3], 'timestamp': i[8], 'address': i[5], 'username': i[6], 'public_key': i[4], 'height': i[10] }}) return res
[ "Returns", "all", "transactions", "and", "forged", "blocks", "by", "voters", "clustered", "around", "a", "single", "delegate_address" ]
BlockHub/blockhubdpostools
python
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/dbtools.py#L416-L517
[ "def", "get_events_vote_cluster", "(", "self", ",", "delegate_address", ")", ":", "delegate_pubkey", "=", "self", ".", "account_details", "(", "address", "=", "delegate_address", ")", "[", "'public_key'", "]", "plusvote", "=", "'+{delegate_pubkey}'", ".", "format", "(", "delegate_pubkey", "=", "delegate_pubkey", ")", "resultset", "=", "self", ".", "_cursor", ".", "execute_and_fetchall", "(", "\"\"\"\n SELECT *\n FROM (\n SELECT \n trs.\"{transactions[id]}\" AS a,\n 'transaction' AS b, \n trs.\"{transactions[amount]}\" AS c,\n trs.\"{transactions[timestamp]}\" AS d, \n trs.\"{transactions[recipient_id]}\" AS e,\n trs.\"{transactions[sender_id]}\" AS f, \n trs.\"{transactions[rawasset]}\" AS g,\n trs.\"{transactions[type]}\" AS h, \n trs.\"{transactions[fee]}\" AS i, \n trs.\"{transactions[block_id]}\" AS j,\n blocks.\"{blocks[height]}\" AS k\n FROM {transactions[table]} AS trs\n INNER JOIN {blocks[table]} AS blocks\n ON (blocks.\"{blocks[id]}\" = trs.\"{transactions[block_id]}\")\n WHERE trs.\"{transactions[sender_id]}\" IN\n (SELECT trs.\"{transactions[sender_id]}\"\n FROM {transactions[table]} AS trs, {votes[table]} AS votes\n WHERE trs.\"{transactions[id]}\" = votes.\"{votes[transaction_id]}\"\n AND votes.\"{votes[votes]}\" = '{plusvote}') \n OR trs.\"{transactions[recipient_id]}\" IN\n (SELECT trs.\"{transactions[sender_id]}\"\n FROM {transactions[table]} AS trs, {votes[table]} AS votes\n WHERE trs.\"{transactions[id]}\" = votes.\"{votes[transaction_id]}\"\n AND votes.\"{votes[votes]}\" = '{plusvote}') \n UNION\n SELECT \n blocks.\"{blocks[id]}\" AS a, \n 'block' AS b, \n blocks.\"{blocks[reward]}\"as c, \n blocks.\"{blocks[total_fee]}\" AS d,\n ENCODE(mem.\"{mem_accounts[public_key]}\"::BYTEA, 'hex') AS e,\n mem.\"{mem_accounts[address]}\" AS f,\n mem.\"{mem_accounts[username]}\" AS g,\n NULL AS h,\n blocks.\"{blocks[timestamp]}\" AS i,\n NULL AS j,\n blocks.\"{blocks[height]}\" AS k\n FROM blocks\n INNER JOIN {mem_accounts[table]} AS mem\n ON (mem.\"{mem_accounts[public_key]}\" = blocks.\"{blocks[generator_public_key]}\") \n WHERE\n blocks.\"{blocks[generator_public_key]}\" IN (\n SELECT mem2.\"{mem_accounts[public_key]}\"\n FROM {mem_accounts[table]} mem2\n WHERE mem2.\"{mem_accounts[address]}\" IN \n (SELECT trs.\"{transactions[sender_id]}\"\n FROM {transactions[table]} AS trs, {votes[table]} AS votes\n WHERE trs.\"{transactions[id]}\" = votes.\"{votes[transaction_id]}\"\n AND votes.\"{votes[votes]}\" = '{plusvote}') \n )) total\n \n ORDER BY total.k ASC;\"\"\"", ".", "format", "(", "address", "=", "delegate_address", ",", "transactions", "=", "self", ".", "scheme", "[", "'transactions'", "]", ",", "blocks", "=", "self", ".", "scheme", "[", "'blocks'", "]", ",", "mem_accounts", "=", "self", ".", "scheme", "[", "'mem_accounts'", "]", ",", "mem_accounts2delegates", "=", "self", ".", "scheme", "[", "'mem_accounts2delegates'", "]", ",", "votes", "=", "self", ".", "scheme", "[", "'votes'", "]", ",", "plusvote", "=", "plusvote", ")", ")", "res", "=", "{", "}", "for", "i", "in", "resultset", ":", "if", "i", "[", "1", "]", "==", "'transaction'", ":", "res", ".", "update", "(", "{", "i", "[", "0", "]", ":", "{", "'tx_id'", ":", "i", "[", "0", "]", ",", "'event_type'", ":", "i", "[", "1", "]", ",", "'amount'", ":", "i", "[", "2", "]", ",", "'timestamp'", ":", "i", "[", "3", "]", ",", "'recipient_id'", ":", "i", "[", "4", "]", ",", "'sender_id'", ":", "i", "[", "5", "]", ",", "'rawasset'", ":", "i", "[", "6", "]", ",", "'type'", ":", "i", "[", "7", "]", ",", "'fee'", ":", "i", "[", "8", "]", ",", "'block_id'", ":", "i", "[", "9", "]", ",", "'height'", ":", "i", "[", "10", "]", "}", "}", ")", "elif", "i", "[", "1", "]", "==", "'block'", ":", "res", ".", "update", "(", "{", "i", "[", "0", "]", ":", "{", "'block_id'", ":", "i", "[", "0", "]", ",", "'event_type'", ":", "i", "[", "1", "]", ",", "'reward'", ":", "i", "[", "2", "]", ",", "'total_fee'", ":", "i", "[", "3", "]", ",", "'timestamp'", ":", "i", "[", "8", "]", ",", "'address'", ":", "i", "[", "5", "]", ",", "'username'", ":", "i", "[", "6", "]", ",", "'public_key'", ":", "i", "[", "4", "]", ",", "'height'", ":", "i", "[", "10", "]", "}", "}", ")", "return", "res" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
valid
DposNode.tbw
This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script
dpostools/dbtools.py
def tbw(self, delegate_address, blacklist=None, share_fees=False, compound_interest=False): """This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script""" if not blacklist: blacklist = [] delegate_public_key = self.account_details(address=delegate_address)['public_key'] height_at_calculation = self.node_height_details()['height'] # string format of the rawasset minvote = '{{"votes":["-{0}"]}}'.format(delegate_public_key) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_public_key) events = self.get_events_vote_cluster(delegate_address) votes = self.get_historic_voters(delegate_address) blocks = self.get_blocks(delegate_address) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter: { 'balance': 0.0, 'status': False, 'last_payout': votes[voter]['height'], 'share': 0.0, 'vote_height': votes[voter]['height'], 'blocks_forged': []} }) for blacklisted_address in blacklist: voter_dict.pop(blacklisted_address, None) last_payout = self.get_last_out_transactions(delegate_address) # not all voters have had a payout, thus a KeyError is thrown for payout in last_payout: try: voter_dict[payout]['last_payout'] = last_payout[payout]['height'] except KeyError: pass # the change in the previous state of the voter_dict. This is added to the voterdict if # no state change occurs in the blockchain. delta_state = {} no_state_change = False block_keys = sorted(list(blocks.keys())) block_nr = 0 try: for id in events: # calculating poolbalances and updating shares if events[id]['height'] > blocks[block_keys[block_nr]]['height']: # if the state is the same for the votepool, the previous calculation can be reused. block_nr += 1 if no_state_change: for x in delta_state: voter_dict[x]['share'] += delta_state[x] continue # update pool balances poolbalance = 0 delta_state = {} for i in voter_dict: # here we update the poolbalance if compound_interest: balance = voter_dict[i]['balance'] + voter_dict[i]['share'] else: balance = voter_dict[i]['balance'] if voter_dict[i]['status']: # if not voter_dict[i]['balance'] < 0: poolbalance += balance # else: # raise exceptions.NegativeBalanceError('balance lower than zero for: {0}. balance: {1}'.format(i, voter_dict[i]['balance'])) # here we calculate the share per voter for i in voter_dict: if compound_interest: balance = voter_dict[i]['balance'] + voter_dict[i]['share'] else: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_keys[block_nr]]['height']: if share_fees: share = (balance / poolbalance) * (blocks[block_keys[block_nr]]['reward'] + blocks[block_keys[block_nr]]['totalFee']) else: share = (balance / poolbalance) * blocks[block_keys[block_nr]]['reward'] voter_dict[i]['share'] += share delta_state.update({i: share}) no_state_change = True continue # parsing an event no_state_change = False if events[id]['event_type'] == 'transaction': if events[id]['recipient_id'] == 'Acw2vAVA48TcV8EnoBmZKJdV8bxnW6Y4E9': print(events[id]['amount']) # parsing a transaction if events[id]['event_type'] == 'transaction': if events[id]['recipient_id'] in voter_dict: voter_dict[events[id]['recipient_id']]['balance'] += events[id]['amount'] if events[id]['sender_id'] in voter_dict: voter_dict[events[id]['sender_id']]['balance'] -= (events[id]['amount'] + events[id]['fee']) if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and plusvote in events[id]['rawasset']: voter_dict[events[id]['sender_id']]['status'] = True if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and minvote in events[id]['rawasset']: voter_dict[events[id]['sender_id']]['status'] = False # parsing a forged block (if forged by a voter) if events[id]['event_type'] == 'block': voter_dict[events[id]['address']]['balance'] += (events[id]['reward'] + events[id]['total_fee']) # the transaction for loop ends with the final transaction. However more blocks may be forged. This copies # the final delta share and adds it to the share x the amount of blocks left. remaining_blocks = len(block_keys) - block_nr - 1 for i in range(remaining_blocks): for x in delta_state: voter_dict[x]['share'] += delta_state[x] # and indexerror indicates that we have ran out of forged blocks, thus the calculation is done (blocks[block_nr] # throw the error) except IndexError: raise return voter_dict, height_at_calculation
def tbw(self, delegate_address, blacklist=None, share_fees=False, compound_interest=False): """This function doesn't work yet. Instead use legacy.trueshare() for a functional tbw script""" if not blacklist: blacklist = [] delegate_public_key = self.account_details(address=delegate_address)['public_key'] height_at_calculation = self.node_height_details()['height'] # string format of the rawasset minvote = '{{"votes":["-{0}"]}}'.format(delegate_public_key) plusvote = '{{"votes":["+{0}"]}}'.format(delegate_public_key) events = self.get_events_vote_cluster(delegate_address) votes = self.get_historic_voters(delegate_address) blocks = self.get_blocks(delegate_address) # create a map of voters voter_dict = {} for voter in votes: voter_dict.update({voter: { 'balance': 0.0, 'status': False, 'last_payout': votes[voter]['height'], 'share': 0.0, 'vote_height': votes[voter]['height'], 'blocks_forged': []} }) for blacklisted_address in blacklist: voter_dict.pop(blacklisted_address, None) last_payout = self.get_last_out_transactions(delegate_address) # not all voters have had a payout, thus a KeyError is thrown for payout in last_payout: try: voter_dict[payout]['last_payout'] = last_payout[payout]['height'] except KeyError: pass # the change in the previous state of the voter_dict. This is added to the voterdict if # no state change occurs in the blockchain. delta_state = {} no_state_change = False block_keys = sorted(list(blocks.keys())) block_nr = 0 try: for id in events: # calculating poolbalances and updating shares if events[id]['height'] > blocks[block_keys[block_nr]]['height']: # if the state is the same for the votepool, the previous calculation can be reused. block_nr += 1 if no_state_change: for x in delta_state: voter_dict[x]['share'] += delta_state[x] continue # update pool balances poolbalance = 0 delta_state = {} for i in voter_dict: # here we update the poolbalance if compound_interest: balance = voter_dict[i]['balance'] + voter_dict[i]['share'] else: balance = voter_dict[i]['balance'] if voter_dict[i]['status']: # if not voter_dict[i]['balance'] < 0: poolbalance += balance # else: # raise exceptions.NegativeBalanceError('balance lower than zero for: {0}. balance: {1}'.format(i, voter_dict[i]['balance'])) # here we calculate the share per voter for i in voter_dict: if compound_interest: balance = voter_dict[i]['balance'] + voter_dict[i]['share'] else: balance = voter_dict[i]['balance'] if voter_dict[i]['status'] and voter_dict[i]['last_payout'] < blocks[block_keys[block_nr]]['height']: if share_fees: share = (balance / poolbalance) * (blocks[block_keys[block_nr]]['reward'] + blocks[block_keys[block_nr]]['totalFee']) else: share = (balance / poolbalance) * blocks[block_keys[block_nr]]['reward'] voter_dict[i]['share'] += share delta_state.update({i: share}) no_state_change = True continue # parsing an event no_state_change = False if events[id]['event_type'] == 'transaction': if events[id]['recipient_id'] == 'Acw2vAVA48TcV8EnoBmZKJdV8bxnW6Y4E9': print(events[id]['amount']) # parsing a transaction if events[id]['event_type'] == 'transaction': if events[id]['recipient_id'] in voter_dict: voter_dict[events[id]['recipient_id']]['balance'] += events[id]['amount'] if events[id]['sender_id'] in voter_dict: voter_dict[events[id]['sender_id']]['balance'] -= (events[id]['amount'] + events[id]['fee']) if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and plusvote in events[id]['rawasset']: voter_dict[events[id]['sender_id']]['status'] = True if events[id]['sender_id'] in voter_dict and events[id]['type'] == 3 and minvote in events[id]['rawasset']: voter_dict[events[id]['sender_id']]['status'] = False # parsing a forged block (if forged by a voter) if events[id]['event_type'] == 'block': voter_dict[events[id]['address']]['balance'] += (events[id]['reward'] + events[id]['total_fee']) # the transaction for loop ends with the final transaction. However more blocks may be forged. This copies # the final delta share and adds it to the share x the amount of blocks left. remaining_blocks = len(block_keys) - block_nr - 1 for i in range(remaining_blocks): for x in delta_state: voter_dict[x]['share'] += delta_state[x] # and indexerror indicates that we have ran out of forged blocks, thus the calculation is done (blocks[block_nr] # throw the error) except IndexError: raise return voter_dict, height_at_calculation
[ "This", "function", "doesn", "t", "work", "yet", ".", "Instead", "use", "legacy", ".", "trueshare", "()", "for", "a", "functional", "tbw", "script" ]
BlockHub/blockhubdpostools
python
https://github.com/BlockHub/blockhubdpostools/blob/27712cd97cd3658ee54a4330ff3135b51a01d7d1/dpostools/dbtools.py#L519-L654
[ "def", "tbw", "(", "self", ",", "delegate_address", ",", "blacklist", "=", "None", ",", "share_fees", "=", "False", ",", "compound_interest", "=", "False", ")", ":", "if", "not", "blacklist", ":", "blacklist", "=", "[", "]", "delegate_public_key", "=", "self", ".", "account_details", "(", "address", "=", "delegate_address", ")", "[", "'public_key'", "]", "height_at_calculation", "=", "self", ".", "node_height_details", "(", ")", "[", "'height'", "]", "# string format of the rawasset", "minvote", "=", "'{{\"votes\":[\"-{0}\"]}}'", ".", "format", "(", "delegate_public_key", ")", "plusvote", "=", "'{{\"votes\":[\"+{0}\"]}}'", ".", "format", "(", "delegate_public_key", ")", "events", "=", "self", ".", "get_events_vote_cluster", "(", "delegate_address", ")", "votes", "=", "self", ".", "get_historic_voters", "(", "delegate_address", ")", "blocks", "=", "self", ".", "get_blocks", "(", "delegate_address", ")", "# create a map of voters", "voter_dict", "=", "{", "}", "for", "voter", "in", "votes", ":", "voter_dict", ".", "update", "(", "{", "voter", ":", "{", "'balance'", ":", "0.0", ",", "'status'", ":", "False", ",", "'last_payout'", ":", "votes", "[", "voter", "]", "[", "'height'", "]", ",", "'share'", ":", "0.0", ",", "'vote_height'", ":", "votes", "[", "voter", "]", "[", "'height'", "]", ",", "'blocks_forged'", ":", "[", "]", "}", "}", ")", "for", "blacklisted_address", "in", "blacklist", ":", "voter_dict", ".", "pop", "(", "blacklisted_address", ",", "None", ")", "last_payout", "=", "self", ".", "get_last_out_transactions", "(", "delegate_address", ")", "# not all voters have had a payout, thus a KeyError is thrown", "for", "payout", "in", "last_payout", ":", "try", ":", "voter_dict", "[", "payout", "]", "[", "'last_payout'", "]", "=", "last_payout", "[", "payout", "]", "[", "'height'", "]", "except", "KeyError", ":", "pass", "# the change in the previous state of the voter_dict. This is added to the voterdict if", "# no state change occurs in the blockchain.", "delta_state", "=", "{", "}", "no_state_change", "=", "False", "block_keys", "=", "sorted", "(", "list", "(", "blocks", ".", "keys", "(", ")", ")", ")", "block_nr", "=", "0", "try", ":", "for", "id", "in", "events", ":", "# calculating poolbalances and updating shares", "if", "events", "[", "id", "]", "[", "'height'", "]", ">", "blocks", "[", "block_keys", "[", "block_nr", "]", "]", "[", "'height'", "]", ":", "# if the state is the same for the votepool, the previous calculation can be reused.", "block_nr", "+=", "1", "if", "no_state_change", ":", "for", "x", "in", "delta_state", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "delta_state", "[", "x", "]", "continue", "# update pool balances", "poolbalance", "=", "0", "delta_state", "=", "{", "}", "for", "i", "in", "voter_dict", ":", "# here we update the poolbalance", "if", "compound_interest", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "+", "voter_dict", "[", "i", "]", "[", "'share'", "]", "else", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", ":", "# if not voter_dict[i]['balance'] < 0:", "poolbalance", "+=", "balance", "# else:", "# raise exceptions.NegativeBalanceError('balance lower than zero for: {0}. balance: {1}'.format(i, voter_dict[i]['balance']))", "# here we calculate the share per voter", "for", "i", "in", "voter_dict", ":", "if", "compound_interest", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "+", "voter_dict", "[", "i", "]", "[", "'share'", "]", "else", ":", "balance", "=", "voter_dict", "[", "i", "]", "[", "'balance'", "]", "if", "voter_dict", "[", "i", "]", "[", "'status'", "]", "and", "voter_dict", "[", "i", "]", "[", "'last_payout'", "]", "<", "blocks", "[", "block_keys", "[", "block_nr", "]", "]", "[", "'height'", "]", ":", "if", "share_fees", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "(", "blocks", "[", "block_keys", "[", "block_nr", "]", "]", "[", "'reward'", "]", "+", "blocks", "[", "block_keys", "[", "block_nr", "]", "]", "[", "'totalFee'", "]", ")", "else", ":", "share", "=", "(", "balance", "/", "poolbalance", ")", "*", "blocks", "[", "block_keys", "[", "block_nr", "]", "]", "[", "'reward'", "]", "voter_dict", "[", "i", "]", "[", "'share'", "]", "+=", "share", "delta_state", ".", "update", "(", "{", "i", ":", "share", "}", ")", "no_state_change", "=", "True", "continue", "# parsing an event", "no_state_change", "=", "False", "if", "events", "[", "id", "]", "[", "'event_type'", "]", "==", "'transaction'", ":", "if", "events", "[", "id", "]", "[", "'recipient_id'", "]", "==", "'Acw2vAVA48TcV8EnoBmZKJdV8bxnW6Y4E9'", ":", "print", "(", "events", "[", "id", "]", "[", "'amount'", "]", ")", "# parsing a transaction", "if", "events", "[", "id", "]", "[", "'event_type'", "]", "==", "'transaction'", ":", "if", "events", "[", "id", "]", "[", "'recipient_id'", "]", "in", "voter_dict", ":", "voter_dict", "[", "events", "[", "id", "]", "[", "'recipient_id'", "]", "]", "[", "'balance'", "]", "+=", "events", "[", "id", "]", "[", "'amount'", "]", "if", "events", "[", "id", "]", "[", "'sender_id'", "]", "in", "voter_dict", ":", "voter_dict", "[", "events", "[", "id", "]", "[", "'sender_id'", "]", "]", "[", "'balance'", "]", "-=", "(", "events", "[", "id", "]", "[", "'amount'", "]", "+", "events", "[", "id", "]", "[", "'fee'", "]", ")", "if", "events", "[", "id", "]", "[", "'sender_id'", "]", "in", "voter_dict", "and", "events", "[", "id", "]", "[", "'type'", "]", "==", "3", "and", "plusvote", "in", "events", "[", "id", "]", "[", "'rawasset'", "]", ":", "voter_dict", "[", "events", "[", "id", "]", "[", "'sender_id'", "]", "]", "[", "'status'", "]", "=", "True", "if", "events", "[", "id", "]", "[", "'sender_id'", "]", "in", "voter_dict", "and", "events", "[", "id", "]", "[", "'type'", "]", "==", "3", "and", "minvote", "in", "events", "[", "id", "]", "[", "'rawasset'", "]", ":", "voter_dict", "[", "events", "[", "id", "]", "[", "'sender_id'", "]", "]", "[", "'status'", "]", "=", "False", "# parsing a forged block (if forged by a voter)", "if", "events", "[", "id", "]", "[", "'event_type'", "]", "==", "'block'", ":", "voter_dict", "[", "events", "[", "id", "]", "[", "'address'", "]", "]", "[", "'balance'", "]", "+=", "(", "events", "[", "id", "]", "[", "'reward'", "]", "+", "events", "[", "id", "]", "[", "'total_fee'", "]", ")", "# the transaction for loop ends with the final transaction. However more blocks may be forged. This copies", "# the final delta share and adds it to the share x the amount of blocks left.", "remaining_blocks", "=", "len", "(", "block_keys", ")", "-", "block_nr", "-", "1", "for", "i", "in", "range", "(", "remaining_blocks", ")", ":", "for", "x", "in", "delta_state", ":", "voter_dict", "[", "x", "]", "[", "'share'", "]", "+=", "delta_state", "[", "x", "]", "# and indexerror indicates that we have ran out of forged blocks, thus the calculation is done (blocks[block_nr]", "# throw the error)", "except", "IndexError", ":", "raise", "return", "voter_dict", ",", "height_at_calculation" ]
27712cd97cd3658ee54a4330ff3135b51a01d7d1
valid
classical
**Classic optimization methods** :param start: start position vector (before transform) :param ftol: accuracy required to stop at optimum :param disp: verbosity :param nsteps: number of steps :param method: string neldermead, cobyla (via `scipy.optimize <http://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html>`_) bobyqa, ralg, algencan, ipopt, mma, auglag and many others from the OpenOpt framework (via `openopt.NLP <http://openopt.org/NLP>`_) minuit (via `PyMinuit <https://code.google.com/p/pyminuit/>`_)
jbopt/classic.py
def classical(transform, loglikelihood, parameter_names, prior, start = 0.5, ftol=0.1, disp=0, nsteps=40000, method='neldermead', **args): """ **Classic optimization methods** :param start: start position vector (before transform) :param ftol: accuracy required to stop at optimum :param disp: verbosity :param nsteps: number of steps :param method: string neldermead, cobyla (via `scipy.optimize <http://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html>`_) bobyqa, ralg, algencan, ipopt, mma, auglag and many others from the OpenOpt framework (via `openopt.NLP <http://openopt.org/NLP>`_) minuit (via `PyMinuit <https://code.google.com/p/pyminuit/>`_) """ import scipy.optimize n_params = len(parameter_names) def minfunc(params): l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p def minfunc_cube(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return 1e100 params = transform(cube) l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p start = start + numpy.zeros(n_params) ret = {} if method == 'neldermead': final, value, _niter, neval, warnflag = scipy.optimize.fmin(minfunc_cube, start, ftol=ftol, disp=disp, maxfun=nsteps, full_output=True) elif method == 'cobyla': cons = [lambda params: params[i] for i in range(n_params)] cons += [lambda params: 1 - params[i] for i in range(n_params)] final = scipy.optimize.fmin_cobyla(minfunc_cube, start, cons, rhoend=ftol / 10, disp=disp, maxfun=nsteps) neval = nsteps elif method == 'minuit' or method == 'hesse': """ We use eval here, and it is a dangerous thing to do. But Minuit probes the objective function for parameter names, and there is no way to create the objective function dynamically with an unknown number of parameters other than through eval. """ s = ', '.join(parameter_names) s = """lambda %s: minfunc([%s])""" % (s, s) if method == 'hesse': f = eval(s, dict(minfunc=minfunc, numpy=numpy)) start = transform(start) else: f = eval(s, dict(minfunc=minfunc_cube, numpy=numpy)) import minuit m = minuit.Minuit(f) for i, p in enumerate(parameter_names): m.values[p] = start[i] if method == 'minuit': m.limits[p] = (1e-10, 1 - 1e-10) m.up = 0.5 m.tol = ftol * 100 m.printMode = disp if method == 'minuit': m.migrad() elif method == 'hesse': m.hesse() final = [m.values[p] for p in parameter_names] neval = m.ncalls errors = [m.errors[p] for p in parameter_names] if method == 'minuit': c0 = final p0 = transform(c0) stdev = numpy.zeros(n_params) lower = numpy.zeros(n_params) upper = numpy.zeros(n_params) for i, w in enumerate(errors): c1 = numpy.copy(c0) c1[i] -= w c2 = numpy.copy(c0) c2[i] += w p1 = transform(c1) p2 = transform(c2) stdev[i] = numpy.abs(p2[i] - p1[i]) / 2 lower[i] = min(p2[i], p1[i]) upper[i] = max(p2[i], p1[i]) ret['stdev'] = stdev ret['upper'] = upper ret['lower'] = lower elif method == 'hesse': ret['stdev'] = errors ret['cov'] = numpy.matrix([[m.covariance[(a, b)] for b in parameter_names] for a in parameter_names]) else: from openopt import NLP lo = [1e-10] * n_params hi = [1-1e-10] * n_params iprint = 0 if disp == 0 else 10 if disp == 1 else 1 p = NLP(f=minfunc_cube, x0=start, lb=lo, ub=hi, maxFunEvals=nsteps, ftol=ftol, iprint=iprint) r = p.solve(method) final = r.xf neval = r.evals['f'] ret.update(dict(start=final, maximum=transform(final), method=method, neval=neval)) return ret
def classical(transform, loglikelihood, parameter_names, prior, start = 0.5, ftol=0.1, disp=0, nsteps=40000, method='neldermead', **args): """ **Classic optimization methods** :param start: start position vector (before transform) :param ftol: accuracy required to stop at optimum :param disp: verbosity :param nsteps: number of steps :param method: string neldermead, cobyla (via `scipy.optimize <http://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html>`_) bobyqa, ralg, algencan, ipopt, mma, auglag and many others from the OpenOpt framework (via `openopt.NLP <http://openopt.org/NLP>`_) minuit (via `PyMinuit <https://code.google.com/p/pyminuit/>`_) """ import scipy.optimize n_params = len(parameter_names) def minfunc(params): l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p def minfunc_cube(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return 1e100 params = transform(cube) l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p start = start + numpy.zeros(n_params) ret = {} if method == 'neldermead': final, value, _niter, neval, warnflag = scipy.optimize.fmin(minfunc_cube, start, ftol=ftol, disp=disp, maxfun=nsteps, full_output=True) elif method == 'cobyla': cons = [lambda params: params[i] for i in range(n_params)] cons += [lambda params: 1 - params[i] for i in range(n_params)] final = scipy.optimize.fmin_cobyla(minfunc_cube, start, cons, rhoend=ftol / 10, disp=disp, maxfun=nsteps) neval = nsteps elif method == 'minuit' or method == 'hesse': """ We use eval here, and it is a dangerous thing to do. But Minuit probes the objective function for parameter names, and there is no way to create the objective function dynamically with an unknown number of parameters other than through eval. """ s = ', '.join(parameter_names) s = """lambda %s: minfunc([%s])""" % (s, s) if method == 'hesse': f = eval(s, dict(minfunc=minfunc, numpy=numpy)) start = transform(start) else: f = eval(s, dict(minfunc=minfunc_cube, numpy=numpy)) import minuit m = minuit.Minuit(f) for i, p in enumerate(parameter_names): m.values[p] = start[i] if method == 'minuit': m.limits[p] = (1e-10, 1 - 1e-10) m.up = 0.5 m.tol = ftol * 100 m.printMode = disp if method == 'minuit': m.migrad() elif method == 'hesse': m.hesse() final = [m.values[p] for p in parameter_names] neval = m.ncalls errors = [m.errors[p] for p in parameter_names] if method == 'minuit': c0 = final p0 = transform(c0) stdev = numpy.zeros(n_params) lower = numpy.zeros(n_params) upper = numpy.zeros(n_params) for i, w in enumerate(errors): c1 = numpy.copy(c0) c1[i] -= w c2 = numpy.copy(c0) c2[i] += w p1 = transform(c1) p2 = transform(c2) stdev[i] = numpy.abs(p2[i] - p1[i]) / 2 lower[i] = min(p2[i], p1[i]) upper[i] = max(p2[i], p1[i]) ret['stdev'] = stdev ret['upper'] = upper ret['lower'] = lower elif method == 'hesse': ret['stdev'] = errors ret['cov'] = numpy.matrix([[m.covariance[(a, b)] for b in parameter_names] for a in parameter_names]) else: from openopt import NLP lo = [1e-10] * n_params hi = [1-1e-10] * n_params iprint = 0 if disp == 0 else 10 if disp == 1 else 1 p = NLP(f=minfunc_cube, x0=start, lb=lo, ub=hi, maxFunEvals=nsteps, ftol=ftol, iprint=iprint) r = p.solve(method) final = r.xf neval = r.evals['f'] ret.update(dict(start=final, maximum=transform(final), method=method, neval=neval)) return ret
[ "**", "Classic", "optimization", "methods", "**" ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/classic.py#L7-L125
[ "def", "classical", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "prior", ",", "start", "=", "0.5", ",", "ftol", "=", "0.1", ",", "disp", "=", "0", ",", "nsteps", "=", "40000", ",", "method", "=", "'neldermead'", ",", "*", "*", "args", ")", ":", "import", "scipy", ".", "optimize", "n_params", "=", "len", "(", "parameter_names", ")", "def", "minfunc", "(", "params", ")", ":", "l", "=", "loglikelihood", "(", "params", ")", "p", "=", "prior", "(", "params", ")", "if", "numpy", ".", "isinf", "(", "p", ")", "and", "p", "<", "0", ":", "print", "' prior rejection'", "return", "-", "1e300", "if", "numpy", ".", "isnan", "(", "l", ")", ":", "return", "-", "1e300", "return", "-", "l", "-", "p", "def", "minfunc_cube", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "1e100", "params", "=", "transform", "(", "cube", ")", "l", "=", "loglikelihood", "(", "params", ")", "p", "=", "prior", "(", "params", ")", "if", "numpy", ".", "isinf", "(", "p", ")", "and", "p", "<", "0", ":", "print", "' prior rejection'", "return", "-", "1e300", "if", "numpy", ".", "isnan", "(", "l", ")", ":", "return", "-", "1e300", "return", "-", "l", "-", "p", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "ret", "=", "{", "}", "if", "method", "==", "'neldermead'", ":", "final", ",", "value", ",", "_niter", ",", "neval", ",", "warnflag", "=", "scipy", ".", "optimize", ".", "fmin", "(", "minfunc_cube", ",", "start", ",", "ftol", "=", "ftol", ",", "disp", "=", "disp", ",", "maxfun", "=", "nsteps", ",", "full_output", "=", "True", ")", "elif", "method", "==", "'cobyla'", ":", "cons", "=", "[", "lambda", "params", ":", "params", "[", "i", "]", "for", "i", "in", "range", "(", "n_params", ")", "]", "cons", "+=", "[", "lambda", "params", ":", "1", "-", "params", "[", "i", "]", "for", "i", "in", "range", "(", "n_params", ")", "]", "final", "=", "scipy", ".", "optimize", ".", "fmin_cobyla", "(", "minfunc_cube", ",", "start", ",", "cons", ",", "rhoend", "=", "ftol", "/", "10", ",", "disp", "=", "disp", ",", "maxfun", "=", "nsteps", ")", "neval", "=", "nsteps", "elif", "method", "==", "'minuit'", "or", "method", "==", "'hesse'", ":", "\"\"\"\n\t\tWe use eval here, and it is a dangerous thing to do.\n\t\tBut Minuit probes the objective function for parameter names,\n\t\tand there is no way to create the objective function \n\t\tdynamically with an unknown number of parameters other than\n\t\tthrough eval.\n\t\t\"\"\"", "s", "=", "', '", ".", "join", "(", "parameter_names", ")", "s", "=", "\"\"\"lambda %s: minfunc([%s])\"\"\"", "%", "(", "s", ",", "s", ")", "if", "method", "==", "'hesse'", ":", "f", "=", "eval", "(", "s", ",", "dict", "(", "minfunc", "=", "minfunc", ",", "numpy", "=", "numpy", ")", ")", "start", "=", "transform", "(", "start", ")", "else", ":", "f", "=", "eval", "(", "s", ",", "dict", "(", "minfunc", "=", "minfunc_cube", ",", "numpy", "=", "numpy", ")", ")", "import", "minuit", "m", "=", "minuit", ".", "Minuit", "(", "f", ")", "for", "i", ",", "p", "in", "enumerate", "(", "parameter_names", ")", ":", "m", ".", "values", "[", "p", "]", "=", "start", "[", "i", "]", "if", "method", "==", "'minuit'", ":", "m", ".", "limits", "[", "p", "]", "=", "(", "1e-10", ",", "1", "-", "1e-10", ")", "m", ".", "up", "=", "0.5", "m", ".", "tol", "=", "ftol", "*", "100", "m", ".", "printMode", "=", "disp", "if", "method", "==", "'minuit'", ":", "m", ".", "migrad", "(", ")", "elif", "method", "==", "'hesse'", ":", "m", ".", "hesse", "(", ")", "final", "=", "[", "m", ".", "values", "[", "p", "]", "for", "p", "in", "parameter_names", "]", "neval", "=", "m", ".", "ncalls", "errors", "=", "[", "m", ".", "errors", "[", "p", "]", "for", "p", "in", "parameter_names", "]", "if", "method", "==", "'minuit'", ":", "c0", "=", "final", "p0", "=", "transform", "(", "c0", ")", "stdev", "=", "numpy", ".", "zeros", "(", "n_params", ")", "lower", "=", "numpy", ".", "zeros", "(", "n_params", ")", "upper", "=", "numpy", ".", "zeros", "(", "n_params", ")", "for", "i", ",", "w", "in", "enumerate", "(", "errors", ")", ":", "c1", "=", "numpy", ".", "copy", "(", "c0", ")", "c1", "[", "i", "]", "-=", "w", "c2", "=", "numpy", ".", "copy", "(", "c0", ")", "c2", "[", "i", "]", "+=", "w", "p1", "=", "transform", "(", "c1", ")", "p2", "=", "transform", "(", "c2", ")", "stdev", "[", "i", "]", "=", "numpy", ".", "abs", "(", "p2", "[", "i", "]", "-", "p1", "[", "i", "]", ")", "/", "2", "lower", "[", "i", "]", "=", "min", "(", "p2", "[", "i", "]", ",", "p1", "[", "i", "]", ")", "upper", "[", "i", "]", "=", "max", "(", "p2", "[", "i", "]", ",", "p1", "[", "i", "]", ")", "ret", "[", "'stdev'", "]", "=", "stdev", "ret", "[", "'upper'", "]", "=", "upper", "ret", "[", "'lower'", "]", "=", "lower", "elif", "method", "==", "'hesse'", ":", "ret", "[", "'stdev'", "]", "=", "errors", "ret", "[", "'cov'", "]", "=", "numpy", ".", "matrix", "(", "[", "[", "m", ".", "covariance", "[", "(", "a", ",", "b", ")", "]", "for", "b", "in", "parameter_names", "]", "for", "a", "in", "parameter_names", "]", ")", "else", ":", "from", "openopt", "import", "NLP", "lo", "=", "[", "1e-10", "]", "*", "n_params", "hi", "=", "[", "1", "-", "1e-10", "]", "*", "n_params", "iprint", "=", "0", "if", "disp", "==", "0", "else", "10", "if", "disp", "==", "1", "else", "1", "p", "=", "NLP", "(", "f", "=", "minfunc_cube", ",", "x0", "=", "start", ",", "lb", "=", "lo", ",", "ub", "=", "hi", ",", "maxFunEvals", "=", "nsteps", ",", "ftol", "=", "ftol", ",", "iprint", "=", "iprint", ")", "r", "=", "p", ".", "solve", "(", "method", ")", "final", "=", "r", ".", "xf", "neval", "=", "r", ".", "evals", "[", "'f'", "]", "ret", ".", "update", "(", "dict", "(", "start", "=", "final", ",", "maximum", "=", "transform", "(", "final", ")", ",", "method", "=", "method", ",", "neval", "=", "neval", ")", ")", "return", "ret" ]
11b721ea001625ad7820f71ff684723c71216646
valid
onebyone
**Convex optimization based on Brent's method** A strict assumption of one optimum between the parameter limits is used. The bounds are narrowed until it is found, i.e. the likelihood function is flat within the bounds. * If optimum outside bracket, expands bracket until contained. * Thus guaranteed to return local optimum. * Supports parallelization (multiple parameters are treated independently) * Supports finding ML uncertainties (Delta-Chi^2=1) Very useful for 1-3d problems. Otherwise useful, reproducible/deterministic algorithm for finding the minimum in well-behaved likelihoods, where the parameters are weakly independent, or to find a good starting point. Optimizes each parameter in order, assuming they are largely independent. For 1-dimensional algorithm used, see :func:`jbopt.opt_grid` :param ftol: difference in values at which the function can be considered flat :param compute_errors: compute standard deviation of gaussian around optimum
jbopt/classic.py
def onebyone(transform, loglikelihood, parameter_names, prior, start = 0.5, ftol=0.1, disp=0, nsteps=40000, parallel=False, find_uncertainties=False, **args): """ **Convex optimization based on Brent's method** A strict assumption of one optimum between the parameter limits is used. The bounds are narrowed until it is found, i.e. the likelihood function is flat within the bounds. * If optimum outside bracket, expands bracket until contained. * Thus guaranteed to return local optimum. * Supports parallelization (multiple parameters are treated independently) * Supports finding ML uncertainties (Delta-Chi^2=1) Very useful for 1-3d problems. Otherwise useful, reproducible/deterministic algorithm for finding the minimum in well-behaved likelihoods, where the parameters are weakly independent, or to find a good starting point. Optimizes each parameter in order, assuming they are largely independent. For 1-dimensional algorithm used, see :func:`jbopt.opt_grid` :param ftol: difference in values at which the function can be considered flat :param compute_errors: compute standard deviation of gaussian around optimum """ def minfunc(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return 1e100 params = transform(cube) l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p if parallel: func = opt_grid_parallel else: func = opt_grid n_params = len(parameter_names) start = start + numpy.zeros(n_params) ret = func(start, minfunc, [(1e-10, 1-1e-10)] * n_params, ftol=ftol, disp=disp, compute_errors=find_uncertainties) if find_uncertainties: c0 = ret[0] p0 = transform(c0) stdev = numpy.zeros(n_params) lower = numpy.zeros(n_params) upper = numpy.zeros(n_params) for i, (lo, hi) in enumerate(ret[1]): c1 = numpy.copy(c0) c1[i] = lo c2 = numpy.copy(c0) c2[i] = hi p1 = transform(c1) p2 = transform(c2) stdev[i] = numpy.abs(p2[i] - p1[i]) / 2 lower[i] = min(p2[i], p1[i]) upper[i] = max(p2[i], p1[i]) return dict(start=ret[0], maximum=p0, stdev=stdev, upper=upper, lower=lower, method='opt_grid') else: return dict(start=ret, maximum=transform(ret), method='opt_grid')
def onebyone(transform, loglikelihood, parameter_names, prior, start = 0.5, ftol=0.1, disp=0, nsteps=40000, parallel=False, find_uncertainties=False, **args): """ **Convex optimization based on Brent's method** A strict assumption of one optimum between the parameter limits is used. The bounds are narrowed until it is found, i.e. the likelihood function is flat within the bounds. * If optimum outside bracket, expands bracket until contained. * Thus guaranteed to return local optimum. * Supports parallelization (multiple parameters are treated independently) * Supports finding ML uncertainties (Delta-Chi^2=1) Very useful for 1-3d problems. Otherwise useful, reproducible/deterministic algorithm for finding the minimum in well-behaved likelihoods, where the parameters are weakly independent, or to find a good starting point. Optimizes each parameter in order, assuming they are largely independent. For 1-dimensional algorithm used, see :func:`jbopt.opt_grid` :param ftol: difference in values at which the function can be considered flat :param compute_errors: compute standard deviation of gaussian around optimum """ def minfunc(cube): cube = numpy.array(cube) if (cube <= 1e-10).any() or (cube >= 1-1e-10).any(): return 1e100 params = transform(cube) l = loglikelihood(params) p = prior(params) if numpy.isinf(p) and p < 0: print ' prior rejection' return -1e300 if numpy.isnan(l): return -1e300 return -l - p if parallel: func = opt_grid_parallel else: func = opt_grid n_params = len(parameter_names) start = start + numpy.zeros(n_params) ret = func(start, minfunc, [(1e-10, 1-1e-10)] * n_params, ftol=ftol, disp=disp, compute_errors=find_uncertainties) if find_uncertainties: c0 = ret[0] p0 = transform(c0) stdev = numpy.zeros(n_params) lower = numpy.zeros(n_params) upper = numpy.zeros(n_params) for i, (lo, hi) in enumerate(ret[1]): c1 = numpy.copy(c0) c1[i] = lo c2 = numpy.copy(c0) c2[i] = hi p1 = transform(c1) p2 = transform(c2) stdev[i] = numpy.abs(p2[i] - p1[i]) / 2 lower[i] = min(p2[i], p1[i]) upper[i] = max(p2[i], p1[i]) return dict(start=ret[0], maximum=p0, stdev=stdev, upper=upper, lower=lower, method='opt_grid') else: return dict(start=ret, maximum=transform(ret), method='opt_grid')
[ "**", "Convex", "optimization", "based", "on", "Brent", "s", "method", "**", "A", "strict", "assumption", "of", "one", "optimum", "between", "the", "parameter", "limits", "is", "used", ".", "The", "bounds", "are", "narrowed", "until", "it", "is", "found", "i", ".", "e", ".", "the", "likelihood", "function", "is", "flat", "within", "the", "bounds", ".", "*", "If", "optimum", "outside", "bracket", "expands", "bracket", "until", "contained", ".", "*", "Thus", "guaranteed", "to", "return", "local", "optimum", ".", "*", "Supports", "parallelization", "(", "multiple", "parameters", "are", "treated", "independently", ")", "*", "Supports", "finding", "ML", "uncertainties", "(", "Delta", "-", "Chi^2", "=", "1", ")" ]
JohannesBuchner/jbopt
python
https://github.com/JohannesBuchner/jbopt/blob/11b721ea001625ad7820f71ff684723c71216646/jbopt/classic.py#L128-L196
[ "def", "onebyone", "(", "transform", ",", "loglikelihood", ",", "parameter_names", ",", "prior", ",", "start", "=", "0.5", ",", "ftol", "=", "0.1", ",", "disp", "=", "0", ",", "nsteps", "=", "40000", ",", "parallel", "=", "False", ",", "find_uncertainties", "=", "False", ",", "*", "*", "args", ")", ":", "def", "minfunc", "(", "cube", ")", ":", "cube", "=", "numpy", ".", "array", "(", "cube", ")", "if", "(", "cube", "<=", "1e-10", ")", ".", "any", "(", ")", "or", "(", "cube", ">=", "1", "-", "1e-10", ")", ".", "any", "(", ")", ":", "return", "1e100", "params", "=", "transform", "(", "cube", ")", "l", "=", "loglikelihood", "(", "params", ")", "p", "=", "prior", "(", "params", ")", "if", "numpy", ".", "isinf", "(", "p", ")", "and", "p", "<", "0", ":", "print", "' prior rejection'", "return", "-", "1e300", "if", "numpy", ".", "isnan", "(", "l", ")", ":", "return", "-", "1e300", "return", "-", "l", "-", "p", "if", "parallel", ":", "func", "=", "opt_grid_parallel", "else", ":", "func", "=", "opt_grid", "n_params", "=", "len", "(", "parameter_names", ")", "start", "=", "start", "+", "numpy", ".", "zeros", "(", "n_params", ")", "ret", "=", "func", "(", "start", ",", "minfunc", ",", "[", "(", "1e-10", ",", "1", "-", "1e-10", ")", "]", "*", "n_params", ",", "ftol", "=", "ftol", ",", "disp", "=", "disp", ",", "compute_errors", "=", "find_uncertainties", ")", "if", "find_uncertainties", ":", "c0", "=", "ret", "[", "0", "]", "p0", "=", "transform", "(", "c0", ")", "stdev", "=", "numpy", ".", "zeros", "(", "n_params", ")", "lower", "=", "numpy", ".", "zeros", "(", "n_params", ")", "upper", "=", "numpy", ".", "zeros", "(", "n_params", ")", "for", "i", ",", "(", "lo", ",", "hi", ")", "in", "enumerate", "(", "ret", "[", "1", "]", ")", ":", "c1", "=", "numpy", ".", "copy", "(", "c0", ")", "c1", "[", "i", "]", "=", "lo", "c2", "=", "numpy", ".", "copy", "(", "c0", ")", "c2", "[", "i", "]", "=", "hi", "p1", "=", "transform", "(", "c1", ")", "p2", "=", "transform", "(", "c2", ")", "stdev", "[", "i", "]", "=", "numpy", ".", "abs", "(", "p2", "[", "i", "]", "-", "p1", "[", "i", "]", ")", "/", "2", "lower", "[", "i", "]", "=", "min", "(", "p2", "[", "i", "]", ",", "p1", "[", "i", "]", ")", "upper", "[", "i", "]", "=", "max", "(", "p2", "[", "i", "]", ",", "p1", "[", "i", "]", ")", "return", "dict", "(", "start", "=", "ret", "[", "0", "]", ",", "maximum", "=", "p0", ",", "stdev", "=", "stdev", ",", "upper", "=", "upper", ",", "lower", "=", "lower", ",", "method", "=", "'opt_grid'", ")", "else", ":", "return", "dict", "(", "start", "=", "ret", ",", "maximum", "=", "transform", "(", "ret", ")", ",", "method", "=", "'opt_grid'", ")" ]
11b721ea001625ad7820f71ff684723c71216646
valid
parse_hub_key
Parse a hub key into a dictionary of component parts :param key: str, a hub key :returns: dict, hub key split into parts :raises: ValueError
bass/hubkey.py
def parse_hub_key(key): """Parse a hub key into a dictionary of component parts :param key: str, a hub key :returns: dict, hub key split into parts :raises: ValueError """ if key is None: raise ValueError('Not a valid key') match = re.match(PATTERN, key) if not match: match = re.match(PATTERN_S0, key) if not match: raise ValueError('Not a valid key') return dict(map(normalise_part, zip([p for p in PARTS_S0.keys()], match.groups()))) return dict(zip(PARTS.keys(), match.groups()))
def parse_hub_key(key): """Parse a hub key into a dictionary of component parts :param key: str, a hub key :returns: dict, hub key split into parts :raises: ValueError """ if key is None: raise ValueError('Not a valid key') match = re.match(PATTERN, key) if not match: match = re.match(PATTERN_S0, key) if not match: raise ValueError('Not a valid key') return dict(map(normalise_part, zip([p for p in PARTS_S0.keys()], match.groups()))) return dict(zip(PARTS.keys(), match.groups()))
[ "Parse", "a", "hub", "key", "into", "a", "dictionary", "of", "component", "parts" ]
openpermissions/bass
python
https://github.com/openpermissions/bass/blob/fb606d3804e1f86b90253b25363bdfa8758ccf39/bass/hubkey.py#L81-L99
[ "def", "parse_hub_key", "(", "key", ")", ":", "if", "key", "is", "None", ":", "raise", "ValueError", "(", "'Not a valid key'", ")", "match", "=", "re", ".", "match", "(", "PATTERN", ",", "key", ")", "if", "not", "match", ":", "match", "=", "re", ".", "match", "(", "PATTERN_S0", ",", "key", ")", "if", "not", "match", ":", "raise", "ValueError", "(", "'Not a valid key'", ")", "return", "dict", "(", "map", "(", "normalise_part", ",", "zip", "(", "[", "p", "for", "p", "in", "PARTS_S0", ".", "keys", "(", ")", "]", ",", "match", ".", "groups", "(", ")", ")", ")", ")", "return", "dict", "(", "zip", "(", "PARTS", ".", "keys", "(", ")", ",", "match", ".", "groups", "(", ")", ")", ")" ]
fb606d3804e1f86b90253b25363bdfa8758ccf39
valid
match_part
Raise an exception if string doesn't match a part's regex :param string: str :param part: a key in the PARTS dict :raises: ValueError, TypeError
bass/hubkey.py
def match_part(string, part): """Raise an exception if string doesn't match a part's regex :param string: str :param part: a key in the PARTS dict :raises: ValueError, TypeError """ if not string or not re.match('^(' + PARTS[part] + ')$', string): raise ValueError('{} should match {}'.format(part, PARTS[part]))
def match_part(string, part): """Raise an exception if string doesn't match a part's regex :param string: str :param part: a key in the PARTS dict :raises: ValueError, TypeError """ if not string or not re.match('^(' + PARTS[part] + ')$', string): raise ValueError('{} should match {}'.format(part, PARTS[part]))
[ "Raise", "an", "exception", "if", "string", "doesn", "t", "match", "a", "part", "s", "regex" ]
openpermissions/bass
python
https://github.com/openpermissions/bass/blob/fb606d3804e1f86b90253b25363bdfa8758ccf39/bass/hubkey.py#L114-L122
[ "def", "match_part", "(", "string", ",", "part", ")", ":", "if", "not", "string", "or", "not", "re", ".", "match", "(", "'^('", "+", "PARTS", "[", "part", "]", "+", "')$'", ",", "string", ")", ":", "raise", "ValueError", "(", "'{} should match {}'", ".", "format", "(", "part", ",", "PARTS", "[", "part", "]", ")", ")" ]
fb606d3804e1f86b90253b25363bdfa8758ccf39
valid
generate_hub_key
Create and return an array of hub keys :param resolver_id: the service that can resolve this key :param hub_id: the unique id of the hub :param repository_id: the type of id that the provider recognises :param entity_type: the type of the entity to which the key refers. :param entity_id: ID of entity (UUID) :returns: a hub key :raises: :AttributeError: if a parameter has a bad value :TypeError: if a parameter has a bad value :ValueError: if a parameter has a bad value
bass/hubkey.py
def generate_hub_key(resolver_id, hub_id, repository_id, entity_type, entity_id=None): """Create and return an array of hub keys :param resolver_id: the service that can resolve this key :param hub_id: the unique id of the hub :param repository_id: the type of id that the provider recognises :param entity_type: the type of the entity to which the key refers. :param entity_id: ID of entity (UUID) :returns: a hub key :raises: :AttributeError: if a parameter has a bad value :TypeError: if a parameter has a bad value :ValueError: if a parameter has a bad value """ parsed = urlparse(resolver_id) if not parsed.scheme: parsed = parsed._replace(scheme=PROTOCOL, netloc=idna_encode(parsed.path.lower()), path=u'') else: parsed = parsed._replace(netloc=idna_encode(parsed.netloc.lower())) resolver_id = urlunparse(parsed) hub_id = url_quote(hub_id.lower()) if not entity_id: entity_id = str(uuid.uuid4()).replace('-', '') else: match_part(entity_id, 'entity_id') # If any of these checks fail a ValueError exception is raised match_part(resolver_id, 'resolver_id') match_part(hub_id, 'hub_id') match_part(repository_id, 'repository_id') match_part(entity_type, 'entity_type') hub_key = SEPARATOR.join( [resolver_id, SCHEMA, hub_id, repository_id, entity_type, entity_id]) return hub_key
def generate_hub_key(resolver_id, hub_id, repository_id, entity_type, entity_id=None): """Create and return an array of hub keys :param resolver_id: the service that can resolve this key :param hub_id: the unique id of the hub :param repository_id: the type of id that the provider recognises :param entity_type: the type of the entity to which the key refers. :param entity_id: ID of entity (UUID) :returns: a hub key :raises: :AttributeError: if a parameter has a bad value :TypeError: if a parameter has a bad value :ValueError: if a parameter has a bad value """ parsed = urlparse(resolver_id) if not parsed.scheme: parsed = parsed._replace(scheme=PROTOCOL, netloc=idna_encode(parsed.path.lower()), path=u'') else: parsed = parsed._replace(netloc=idna_encode(parsed.netloc.lower())) resolver_id = urlunparse(parsed) hub_id = url_quote(hub_id.lower()) if not entity_id: entity_id = str(uuid.uuid4()).replace('-', '') else: match_part(entity_id, 'entity_id') # If any of these checks fail a ValueError exception is raised match_part(resolver_id, 'resolver_id') match_part(hub_id, 'hub_id') match_part(repository_id, 'repository_id') match_part(entity_type, 'entity_type') hub_key = SEPARATOR.join( [resolver_id, SCHEMA, hub_id, repository_id, entity_type, entity_id]) return hub_key
[ "Create", "and", "return", "an", "array", "of", "hub", "keys", ":", "param", "resolver_id", ":", "the", "service", "that", "can", "resolve", "this", "key", ":", "param", "hub_id", ":", "the", "unique", "id", "of", "the", "hub", ":", "param", "repository_id", ":", "the", "type", "of", "id", "that", "the", "provider", "recognises", ":", "param", "entity_type", ":", "the", "type", "of", "the", "entity", "to", "which", "the", "key", "refers", ".", ":", "param", "entity_id", ":", "ID", "of", "entity", "(", "UUID", ")", ":", "returns", ":", "a", "hub", "key", ":", "raises", ":", ":", "AttributeError", ":", "if", "a", "parameter", "has", "a", "bad", "value", ":", "TypeError", ":", "if", "a", "parameter", "has", "a", "bad", "value", ":", "ValueError", ":", "if", "a", "parameter", "has", "a", "bad", "value" ]
openpermissions/bass
python
https://github.com/openpermissions/bass/blob/fb606d3804e1f86b90253b25363bdfa8758ccf39/bass/hubkey.py#L145-L181
[ "def", "generate_hub_key", "(", "resolver_id", ",", "hub_id", ",", "repository_id", ",", "entity_type", ",", "entity_id", "=", "None", ")", ":", "parsed", "=", "urlparse", "(", "resolver_id", ")", "if", "not", "parsed", ".", "scheme", ":", "parsed", "=", "parsed", ".", "_replace", "(", "scheme", "=", "PROTOCOL", ",", "netloc", "=", "idna_encode", "(", "parsed", ".", "path", ".", "lower", "(", ")", ")", ",", "path", "=", "u''", ")", "else", ":", "parsed", "=", "parsed", ".", "_replace", "(", "netloc", "=", "idna_encode", "(", "parsed", ".", "netloc", ".", "lower", "(", ")", ")", ")", "resolver_id", "=", "urlunparse", "(", "parsed", ")", "hub_id", "=", "url_quote", "(", "hub_id", ".", "lower", "(", ")", ")", "if", "not", "entity_id", ":", "entity_id", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ".", "replace", "(", "'-'", ",", "''", ")", "else", ":", "match_part", "(", "entity_id", ",", "'entity_id'", ")", "# If any of these checks fail a ValueError exception is raised", "match_part", "(", "resolver_id", ",", "'resolver_id'", ")", "match_part", "(", "hub_id", ",", "'hub_id'", ")", "match_part", "(", "repository_id", ",", "'repository_id'", ")", "match_part", "(", "entity_type", ",", "'entity_type'", ")", "hub_key", "=", "SEPARATOR", ".", "join", "(", "[", "resolver_id", ",", "SCHEMA", ",", "hub_id", ",", "repository_id", ",", "entity_type", ",", "entity_id", "]", ")", "return", "hub_key" ]
fb606d3804e1f86b90253b25363bdfa8758ccf39
valid
compact
Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server.
relax/couchdb/compact.py
def compact(db_spec, poll_interval=0): """ Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server. """ server = get_server_from_specifier(db_spec) db = get_db_from_specifier(db_spec) # Get logger logger = logging.getLogger('relax.couchdb.compact') logger.info('Pre-compact size of %r: %s' % (db_spec, repr_bytes(db.info()['disk_size']),)) logger.debug('POST ' + urlparse.urljoin(db.resource.uri + '/', '_compact')) # Start compaction process by issuing a POST to '/<db_name>/_compact'. resp_headers, resp_body = db.resource.post('/_compact') # Asynchronous compaction if not poll_interval: if not (resp_body.get('ok', False) and resp_headers['status'] == '202'): err = CompactionError('Compaction of %r failed.') # Give the exception some useful information. err.response = (resp_headers, resp_body) raise err # Return a function which, when called, will return whether or not the # compaction process is still running. def check_completed(): logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') db_info = db.info() completed = not db_info.get('compact_running', False) if completed and db_info.get('disk_size', None): logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(db_info['disk_size']))) return completed return check_completed # Synchronous compaction elif poll_interval > 0: logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') # Shows whether compaction is running or not. running = db.info().get('compact_running', False) # Poll the running state of the compaction. while running: time.sleep(poll_interval) logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') running = db.info().get('compact_running', False) size_after = db.info().get('disk_size', None) if size_after: logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(size_after))) return True else: raise ValueError('Poll interval must be greater than zero.')
def compact(db_spec, poll_interval=0): """ Compact a CouchDB database with optional synchronicity. The ``compact`` function will compact a CouchDB database stored on an running CouchDB server. By default, this process occurs *asynchronously*, meaning that the compaction will occur in the background. Often, you'll want to know when the process has completed; for this reason, ``compact`` will return a function which, when called, will return the state of the compaction. If it has completed, ``True`` will be returned; otherwise, ``False``. This may be called multiple times. Alternatively, you may opt to run ``compact`` in synchronous mode, for debugging or profiling purposes. If this is the case, an optional keyword argument ``poll_interval`` is accepted, which should be a number (in seconds) representing the time to take between polls. A sensible default may be around 0.5 (seconds). Because this function operates on database specifiers, you can choose to operate on the local server or any remote server. """ server = get_server_from_specifier(db_spec) db = get_db_from_specifier(db_spec) # Get logger logger = logging.getLogger('relax.couchdb.compact') logger.info('Pre-compact size of %r: %s' % (db_spec, repr_bytes(db.info()['disk_size']),)) logger.debug('POST ' + urlparse.urljoin(db.resource.uri + '/', '_compact')) # Start compaction process by issuing a POST to '/<db_name>/_compact'. resp_headers, resp_body = db.resource.post('/_compact') # Asynchronous compaction if not poll_interval: if not (resp_body.get('ok', False) and resp_headers['status'] == '202'): err = CompactionError('Compaction of %r failed.') # Give the exception some useful information. err.response = (resp_headers, resp_body) raise err # Return a function which, when called, will return whether or not the # compaction process is still running. def check_completed(): logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') db_info = db.info() completed = not db_info.get('compact_running', False) if completed and db_info.get('disk_size', None): logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(db_info['disk_size']))) return completed return check_completed # Synchronous compaction elif poll_interval > 0: logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') # Shows whether compaction is running or not. running = db.info().get('compact_running', False) # Poll the running state of the compaction. while running: time.sleep(poll_interval) logger.debug( 'Polling database to check if compaction has completed') logger.debug('GET ' + db.resource.uri + '/') running = db.info().get('compact_running', False) size_after = db.info().get('disk_size', None) if size_after: logger.info('Post-compact size of %r: %s' % (db_spec, repr_bytes(size_after))) return True else: raise ValueError('Poll interval must be greater than zero.')
[ "Compact", "a", "CouchDB", "database", "with", "optional", "synchronicity", ".", "The", "compact", "function", "will", "compact", "a", "CouchDB", "database", "stored", "on", "an", "running", "CouchDB", "server", ".", "By", "default", "this", "process", "occurs", "*", "asynchronously", "*", "meaning", "that", "the", "compaction", "will", "occur", "in", "the", "background", ".", "Often", "you", "ll", "want", "to", "know", "when", "the", "process", "has", "completed", ";", "for", "this", "reason", "compact", "will", "return", "a", "function", "which", "when", "called", "will", "return", "the", "state", "of", "the", "compaction", ".", "If", "it", "has", "completed", "True", "will", "be", "returned", ";", "otherwise", "False", ".", "This", "may", "be", "called", "multiple", "times", ".", "Alternatively", "you", "may", "opt", "to", "run", "compact", "in", "synchronous", "mode", "for", "debugging", "or", "profiling", "purposes", ".", "If", "this", "is", "the", "case", "an", "optional", "keyword", "argument", "poll_interval", "is", "accepted", "which", "should", "be", "a", "number", "(", "in", "seconds", ")", "representing", "the", "time", "to", "take", "between", "polls", ".", "A", "sensible", "default", "may", "be", "around", "0", ".", "5", "(", "seconds", ")", ".", "Because", "this", "function", "operates", "on", "database", "specifiers", "you", "can", "choose", "to", "operate", "on", "the", "local", "server", "or", "any", "remote", "server", "." ]
zvoase/django-relax
python
https://github.com/zvoase/django-relax/blob/10bb37bf3a512b290816856a6877c17fa37e930f/relax/couchdb/compact.py#L18-L91
[ "def", "compact", "(", "db_spec", ",", "poll_interval", "=", "0", ")", ":", "server", "=", "get_server_from_specifier", "(", "db_spec", ")", "db", "=", "get_db_from_specifier", "(", "db_spec", ")", "# Get logger", "logger", "=", "logging", ".", "getLogger", "(", "'relax.couchdb.compact'", ")", "logger", ".", "info", "(", "'Pre-compact size of %r: %s'", "%", "(", "db_spec", ",", "repr_bytes", "(", "db", ".", "info", "(", ")", "[", "'disk_size'", "]", ")", ",", ")", ")", "logger", ".", "debug", "(", "'POST '", "+", "urlparse", ".", "urljoin", "(", "db", ".", "resource", ".", "uri", "+", "'/'", ",", "'_compact'", ")", ")", "# Start compaction process by issuing a POST to '/<db_name>/_compact'.", "resp_headers", ",", "resp_body", "=", "db", ".", "resource", ".", "post", "(", "'/_compact'", ")", "# Asynchronous compaction", "if", "not", "poll_interval", ":", "if", "not", "(", "resp_body", ".", "get", "(", "'ok'", ",", "False", ")", "and", "resp_headers", "[", "'status'", "]", "==", "'202'", ")", ":", "err", "=", "CompactionError", "(", "'Compaction of %r failed.'", ")", "# Give the exception some useful information.", "err", ".", "response", "=", "(", "resp_headers", ",", "resp_body", ")", "raise", "err", "# Return a function which, when called, will return whether or not the", "# compaction process is still running.", "def", "check_completed", "(", ")", ":", "logger", ".", "debug", "(", "'Polling database to check if compaction has completed'", ")", "logger", ".", "debug", "(", "'GET '", "+", "db", ".", "resource", ".", "uri", "+", "'/'", ")", "db_info", "=", "db", ".", "info", "(", ")", "completed", "=", "not", "db_info", ".", "get", "(", "'compact_running'", ",", "False", ")", "if", "completed", "and", "db_info", ".", "get", "(", "'disk_size'", ",", "None", ")", ":", "logger", ".", "info", "(", "'Post-compact size of %r: %s'", "%", "(", "db_spec", ",", "repr_bytes", "(", "db_info", "[", "'disk_size'", "]", ")", ")", ")", "return", "completed", "return", "check_completed", "# Synchronous compaction", "elif", "poll_interval", ">", "0", ":", "logger", ".", "debug", "(", "'Polling database to check if compaction has completed'", ")", "logger", ".", "debug", "(", "'GET '", "+", "db", ".", "resource", ".", "uri", "+", "'/'", ")", "# Shows whether compaction is running or not.", "running", "=", "db", ".", "info", "(", ")", ".", "get", "(", "'compact_running'", ",", "False", ")", "# Poll the running state of the compaction.", "while", "running", ":", "time", ".", "sleep", "(", "poll_interval", ")", "logger", ".", "debug", "(", "'Polling database to check if compaction has completed'", ")", "logger", ".", "debug", "(", "'GET '", "+", "db", ".", "resource", ".", "uri", "+", "'/'", ")", "running", "=", "db", ".", "info", "(", ")", ".", "get", "(", "'compact_running'", ",", "False", ")", "size_after", "=", "db", ".", "info", "(", ")", ".", "get", "(", "'disk_size'", ",", "None", ")", "if", "size_after", ":", "logger", ".", "info", "(", "'Post-compact size of %r: %s'", "%", "(", "db_spec", ",", "repr_bytes", "(", "size_after", ")", ")", ")", "return", "True", "else", ":", "raise", "ValueError", "(", "'Poll interval must be greater than zero.'", ")" ]
10bb37bf3a512b290816856a6877c17fa37e930f
valid
Clifier.apply_defaults
apply default settings to commands not static, shadow "self" in eval
clifier/clifier.py
def apply_defaults(self, commands): """ apply default settings to commands not static, shadow "self" in eval """ for command in commands: if 'action' in command and "()" in command['action']: command['action'] = eval("self.{}".format(command['action'])) if command['keys'][0].startswith('-'): if 'required' not in command: command['required'] = False
def apply_defaults(self, commands): """ apply default settings to commands not static, shadow "self" in eval """ for command in commands: if 'action' in command and "()" in command['action']: command['action'] = eval("self.{}".format(command['action'])) if command['keys'][0].startswith('-'): if 'required' not in command: command['required'] = False
[ "apply", "default", "settings", "to", "commands", "not", "static", "shadow", "self", "in", "eval" ]
xnuinside/clifier
python
https://github.com/xnuinside/clifier/blob/3d704a30dc985bea3b876216accc53c19dc8b0df/clifier/clifier.py#L48-L57
[ "def", "apply_defaults", "(", "self", ",", "commands", ")", ":", "for", "command", "in", "commands", ":", "if", "'action'", "in", "command", "and", "\"()\"", "in", "command", "[", "'action'", "]", ":", "command", "[", "'action'", "]", "=", "eval", "(", "\"self.{}\"", ".", "format", "(", "command", "[", "'action'", "]", ")", ")", "if", "command", "[", "'keys'", "]", "[", "0", "]", ".", "startswith", "(", "'-'", ")", ":", "if", "'required'", "not", "in", "command", ":", "command", "[", "'required'", "]", "=", "False" ]
3d704a30dc985bea3b876216accc53c19dc8b0df
valid
Clifier.create_commands
add commands to parser
clifier/clifier.py
def create_commands(self, commands, parser): """ add commands to parser """ self.apply_defaults(commands) def create_single_command(command): keys = command['keys'] del command['keys'] kwargs = {} for item in command: kwargs[item] = command[item] parser.add_argument(*keys, **kwargs) if len(commands) > 1: for command in commands: create_single_command(command) else: create_single_command(commands[0])
def create_commands(self, commands, parser): """ add commands to parser """ self.apply_defaults(commands) def create_single_command(command): keys = command['keys'] del command['keys'] kwargs = {} for item in command: kwargs[item] = command[item] parser.add_argument(*keys, **kwargs) if len(commands) > 1: for command in commands: create_single_command(command) else: create_single_command(commands[0])
[ "add", "commands", "to", "parser" ]
xnuinside/clifier
python
https://github.com/xnuinside/clifier/blob/3d704a30dc985bea3b876216accc53c19dc8b0df/clifier/clifier.py#L59-L74
[ "def", "create_commands", "(", "self", ",", "commands", ",", "parser", ")", ":", "self", ".", "apply_defaults", "(", "commands", ")", "def", "create_single_command", "(", "command", ")", ":", "keys", "=", "command", "[", "'keys'", "]", "del", "command", "[", "'keys'", "]", "kwargs", "=", "{", "}", "for", "item", "in", "command", ":", "kwargs", "[", "item", "]", "=", "command", "[", "item", "]", "parser", ".", "add_argument", "(", "*", "keys", ",", "*", "*", "kwargs", ")", "if", "len", "(", "commands", ")", ">", "1", ":", "for", "command", "in", "commands", ":", "create_single_command", "(", "command", ")", "else", ":", "create_single_command", "(", "commands", "[", "0", "]", ")" ]
3d704a30dc985bea3b876216accc53c19dc8b0df
valid
Clifier.create_subparsers
get config for subparser and create commands
clifier/clifier.py
def create_subparsers(self, parser): """ get config for subparser and create commands""" subparsers = parser.add_subparsers() for name in self.config['subparsers']: subparser = subparsers.add_parser(name) self.create_commands(self.config['subparsers'][name], subparser)
def create_subparsers(self, parser): """ get config for subparser and create commands""" subparsers = parser.add_subparsers() for name in self.config['subparsers']: subparser = subparsers.add_parser(name) self.create_commands(self.config['subparsers'][name], subparser)
[ "get", "config", "for", "subparser", "and", "create", "commands" ]
xnuinside/clifier
python
https://github.com/xnuinside/clifier/blob/3d704a30dc985bea3b876216accc53c19dc8b0df/clifier/clifier.py#L77-L82
[ "def", "create_subparsers", "(", "self", ",", "parser", ")", ":", "subparsers", "=", "parser", ".", "add_subparsers", "(", ")", "for", "name", "in", "self", ".", "config", "[", "'subparsers'", "]", ":", "subparser", "=", "subparsers", ".", "add_parser", "(", "name", ")", "self", ".", "create_commands", "(", "self", ".", "config", "[", "'subparsers'", "]", "[", "name", "]", ",", "subparser", ")" ]
3d704a30dc985bea3b876216accc53c19dc8b0df
valid
Clifier.show_version
custom command line action to show version
clifier/clifier.py
def show_version(self): """ custom command line action to show version """ class ShowVersionAction(argparse.Action): def __init__(inner_self, nargs=0, **kw): super(ShowVersionAction, inner_self).__init__(nargs=nargs, **kw) def __call__(inner_self, parser, args, value, option_string=None): print("{parser_name} version: {version}".format( parser_name=self.config.get( "parser", {}).get("prog"), version=self.prog_version)) return ShowVersionAction
def show_version(self): """ custom command line action to show version """ class ShowVersionAction(argparse.Action): def __init__(inner_self, nargs=0, **kw): super(ShowVersionAction, inner_self).__init__(nargs=nargs, **kw) def __call__(inner_self, parser, args, value, option_string=None): print("{parser_name} version: {version}".format( parser_name=self.config.get( "parser", {}).get("prog"), version=self.prog_version)) return ShowVersionAction
[ "custom", "command", "line", "action", "to", "show", "version" ]
xnuinside/clifier
python
https://github.com/xnuinside/clifier/blob/3d704a30dc985bea3b876216accc53c19dc8b0df/clifier/clifier.py#L97-L108
[ "def", "show_version", "(", "self", ")", ":", "class", "ShowVersionAction", "(", "argparse", ".", "Action", ")", ":", "def", "__init__", "(", "inner_self", ",", "nargs", "=", "0", ",", "*", "*", "kw", ")", ":", "super", "(", "ShowVersionAction", ",", "inner_self", ")", ".", "__init__", "(", "nargs", "=", "nargs", ",", "*", "*", "kw", ")", "def", "__call__", "(", "inner_self", ",", "parser", ",", "args", ",", "value", ",", "option_string", "=", "None", ")", ":", "print", "(", "\"{parser_name} version: {version}\"", ".", "format", "(", "parser_name", "=", "self", ".", "config", ".", "get", "(", "\"parser\"", ",", "{", "}", ")", ".", "get", "(", "\"prog\"", ")", ",", "version", "=", "self", ".", "prog_version", ")", ")", "return", "ShowVersionAction" ]
3d704a30dc985bea3b876216accc53c19dc8b0df
valid
Clifier.check_path_action
custom command line action to check file exist
clifier/clifier.py
def check_path_action(self): """ custom command line action to check file exist """ class CheckPathAction(argparse.Action): def __call__(self, parser, args, value, option_string=None): if type(value) is list: value = value[0] user_value = value if option_string == 'None': if not os.path.isdir(value): _current_user = os.path.expanduser("~") if not value.startswith(_current_user) \ and not value.startswith(os.getcwd()): if os.path.isdir(os.path.join(_current_user, value)): value = os.path.join(_current_user, value) elif os.path.isdir(os.path.join(os.getcwd(), value)): value = os.path.join(os.getcwd(), value) else: value = None else: value = None elif option_string == '--template-name': if not os.path.isdir(value): if not os.path.isdir(os.path.join(args.target, value)): value = None if not value: logger.error("Could not to find path %s. Please provide " "correct path to %s option", user_value, option_string) exit(1) setattr(args, self.dest, value) return CheckPathAction
def check_path_action(self): """ custom command line action to check file exist """ class CheckPathAction(argparse.Action): def __call__(self, parser, args, value, option_string=None): if type(value) is list: value = value[0] user_value = value if option_string == 'None': if not os.path.isdir(value): _current_user = os.path.expanduser("~") if not value.startswith(_current_user) \ and not value.startswith(os.getcwd()): if os.path.isdir(os.path.join(_current_user, value)): value = os.path.join(_current_user, value) elif os.path.isdir(os.path.join(os.getcwd(), value)): value = os.path.join(os.getcwd(), value) else: value = None else: value = None elif option_string == '--template-name': if not os.path.isdir(value): if not os.path.isdir(os.path.join(args.target, value)): value = None if not value: logger.error("Could not to find path %s. Please provide " "correct path to %s option", user_value, option_string) exit(1) setattr(args, self.dest, value) return CheckPathAction
[ "custom", "command", "line", "action", "to", "check", "file", "exist" ]
xnuinside/clifier
python
https://github.com/xnuinside/clifier/blob/3d704a30dc985bea3b876216accc53c19dc8b0df/clifier/clifier.py#L110-L141
[ "def", "check_path_action", "(", "self", ")", ":", "class", "CheckPathAction", "(", "argparse", ".", "Action", ")", ":", "def", "__call__", "(", "self", ",", "parser", ",", "args", ",", "value", ",", "option_string", "=", "None", ")", ":", "if", "type", "(", "value", ")", "is", "list", ":", "value", "=", "value", "[", "0", "]", "user_value", "=", "value", "if", "option_string", "==", "'None'", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "value", ")", ":", "_current_user", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "if", "not", "value", ".", "startswith", "(", "_current_user", ")", "and", "not", "value", ".", "startswith", "(", "os", ".", "getcwd", "(", ")", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "_current_user", ",", "value", ")", ")", ":", "value", "=", "os", ".", "path", ".", "join", "(", "_current_user", ",", "value", ")", "elif", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "value", ")", ")", ":", "value", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "value", ")", "else", ":", "value", "=", "None", "else", ":", "value", "=", "None", "elif", "option_string", "==", "'--template-name'", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "value", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "args", ".", "target", ",", "value", ")", ")", ":", "value", "=", "None", "if", "not", "value", ":", "logger", ".", "error", "(", "\"Could not to find path %s. Please provide \"", "\"correct path to %s option\"", ",", "user_value", ",", "option_string", ")", "exit", "(", "1", ")", "setattr", "(", "args", ",", "self", ".", "dest", ",", "value", ")", "return", "CheckPathAction" ]
3d704a30dc985bea3b876216accc53c19dc8b0df
valid
new_user
Return the consumer and oauth tokens with three-legged OAuth process and save in a yaml file in the user's home directory.
interactive_console.py
def new_user(yaml_path): ''' Return the consumer and oauth tokens with three-legged OAuth process and save in a yaml file in the user's home directory. ''' print 'Retrieve API Key from https://www.shirts.io/accounts/api_console/' api_key = raw_input('Shirts.io API Key: ') tokens = { 'api_key': api_key, } yaml_file = open(yaml_path, 'w+') yaml.dump(tokens, yaml_file, indent=2) yaml_file.close() return tokens
def new_user(yaml_path): ''' Return the consumer and oauth tokens with three-legged OAuth process and save in a yaml file in the user's home directory. ''' print 'Retrieve API Key from https://www.shirts.io/accounts/api_console/' api_key = raw_input('Shirts.io API Key: ') tokens = { 'api_key': api_key, } yaml_file = open(yaml_path, 'w+') yaml.dump(tokens, yaml_file, indent=2) yaml_file.close() return tokens
[ "Return", "the", "consumer", "and", "oauth", "tokens", "with", "three", "-", "legged", "OAuth", "process", "and", "save", "in", "a", "yaml", "file", "in", "the", "user", "s", "home", "directory", "." ]
tklovett/PyShirtsIO
python
https://github.com/tklovett/PyShirtsIO/blob/ff2f2d3b5e4ab2813abbce8545b27319c6af0def/interactive_console.py#L8-L25
[ "def", "new_user", "(", "yaml_path", ")", ":", "print", "'Retrieve API Key from https://www.shirts.io/accounts/api_console/'", "api_key", "=", "raw_input", "(", "'Shirts.io API Key: '", ")", "tokens", "=", "{", "'api_key'", ":", "api_key", ",", "}", "yaml_file", "=", "open", "(", "yaml_path", ",", "'w+'", ")", "yaml", ".", "dump", "(", "tokens", ",", "yaml_file", ",", "indent", "=", "2", ")", "yaml_file", ".", "close", "(", ")", "return", "tokens" ]
ff2f2d3b5e4ab2813abbce8545b27319c6af0def
valid
_AddPropertiesForExtensions
Adds properties for all fields in this protocol message type.
typy/google/protobuf/internal/python_message.py
def _AddPropertiesForExtensions(descriptor, cls): """Adds properties for all fields in this protocol message type.""" extension_dict = descriptor.extensions_by_name for extension_name, extension_field in extension_dict.items(): constant_name = extension_name.upper() + "_FIELD_NUMBER" setattr(cls, constant_name, extension_field.number)
def _AddPropertiesForExtensions(descriptor, cls): """Adds properties for all fields in this protocol message type.""" extension_dict = descriptor.extensions_by_name for extension_name, extension_field in extension_dict.items(): constant_name = extension_name.upper() + "_FIELD_NUMBER" setattr(cls, constant_name, extension_field.number)
[ "Adds", "properties", "for", "all", "fields", "in", "this", "protocol", "message", "type", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/python_message.py#L743-L748
[ "def", "_AddPropertiesForExtensions", "(", "descriptor", ",", "cls", ")", ":", "extension_dict", "=", "descriptor", ".", "extensions_by_name", "for", "extension_name", ",", "extension_field", "in", "extension_dict", ".", "items", "(", ")", ":", "constant_name", "=", "extension_name", ".", "upper", "(", ")", "+", "\"_FIELD_NUMBER\"", "setattr", "(", "cls", ",", "constant_name", ",", "extension_field", ".", "number", ")" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7
valid
_InternalUnpackAny
Unpacks Any message and returns the unpacked message. This internal method is differnt from public Any Unpack method which takes the target message as argument. _InternalUnpackAny method does not have target message type and need to find the message type in descriptor pool. Args: msg: An Any message to be unpacked. Returns: The unpacked message.
typy/google/protobuf/internal/python_message.py
def _InternalUnpackAny(msg): """Unpacks Any message and returns the unpacked message. This internal method is differnt from public Any Unpack method which takes the target message as argument. _InternalUnpackAny method does not have target message type and need to find the message type in descriptor pool. Args: msg: An Any message to be unpacked. Returns: The unpacked message. """ type_url = msg.type_url db = symbol_database.Default() if not type_url: return None # TODO(haberman): For now we just strip the hostname. Better logic will be # required. type_name = type_url.split("/")[-1] descriptor = db.pool.FindMessageTypeByName(type_name) if descriptor is None: return None message_class = db.GetPrototype(descriptor) message = message_class() message.ParseFromString(msg.value) return message
def _InternalUnpackAny(msg): """Unpacks Any message and returns the unpacked message. This internal method is differnt from public Any Unpack method which takes the target message as argument. _InternalUnpackAny method does not have target message type and need to find the message type in descriptor pool. Args: msg: An Any message to be unpacked. Returns: The unpacked message. """ type_url = msg.type_url db = symbol_database.Default() if not type_url: return None # TODO(haberman): For now we just strip the hostname. Better logic will be # required. type_name = type_url.split("/")[-1] descriptor = db.pool.FindMessageTypeByName(type_name) if descriptor is None: return None message_class = db.GetPrototype(descriptor) message = message_class() message.ParseFromString(msg.value) return message
[ "Unpacks", "Any", "message", "and", "returns", "the", "unpacked", "message", "." ]
ibelie/typy
python
https://github.com/ibelie/typy/blob/3616845fb91459aacd8df6bf82c5d91f4542bee7/typy/google/protobuf/internal/python_message.py#L916-L947
[ "def", "_InternalUnpackAny", "(", "msg", ")", ":", "type_url", "=", "msg", ".", "type_url", "db", "=", "symbol_database", ".", "Default", "(", ")", "if", "not", "type_url", ":", "return", "None", "# TODO(haberman): For now we just strip the hostname. Better logic will be", "# required.", "type_name", "=", "type_url", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "descriptor", "=", "db", ".", "pool", ".", "FindMessageTypeByName", "(", "type_name", ")", "if", "descriptor", "is", "None", ":", "return", "None", "message_class", "=", "db", ".", "GetPrototype", "(", "descriptor", ")", "message", "=", "message_class", "(", ")", "message", ".", "ParseFromString", "(", "msg", ".", "value", ")", "return", "message" ]
3616845fb91459aacd8df6bf82c5d91f4542bee7