repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
4
175
func_name
stringlengths
1
129
whole_func_string
stringlengths
91
50.9k
language
stringclasses
1 value
func_code_string
stringlengths
91
50.9k
func_code_tokens
sequence
func_documentation_string
stringlengths
1
31.6k
func_documentation_tokens
sequence
split_name
stringclasses
1 value
func_code_url
stringlengths
89
268
score
float64
0
0.09
tjcsl/ion
intranet/apps/polls/models.py
PollManager.visible_to_user
def visible_to_user(self, user): """Get a list of visible polls for a given user (usually request.user). These visible polls will be those that either have no groups assigned to them (and are therefore public) or those in which the user is a member. """ return Poll.objects.filter(Q(groups__in=user.groups.all()) | Q(groups__isnull=True))
python
def visible_to_user(self, user): """Get a list of visible polls for a given user (usually request.user). These visible polls will be those that either have no groups assigned to them (and are therefore public) or those in which the user is a member. """ return Poll.objects.filter(Q(groups__in=user.groups.all()) | Q(groups__isnull=True))
[ "def", "visible_to_user", "(", "self", ",", "user", ")", ":", "return", "Poll", ".", "objects", ".", "filter", "(", "Q", "(", "groups__in", "=", "user", ".", "groups", ".", "all", "(", ")", ")", "|", "Q", "(", "groups__isnull", "=", "True", ")", ")" ]
Get a list of visible polls for a given user (usually request.user). These visible polls will be those that either have no groups assigned to them (and are therefore public) or those in which the user is a member.
[ "Get", "a", "list", "of", "visible", "polls", "for", "a", "given", "user", "(", "usually", "request", ".", "user", ")", "." ]
train
https://github.com/tjcsl/ion/blob/5d722b0725d572039bb0929fd5715a4070c82c72/intranet/apps/polls/models.py#L28-L37
0.007712
saltstack/salt
salt/modules/qemu_img.py
make_image
def make_image(location, size, fmt): ''' Create a blank virtual machine image file of the specified size in megabytes. The image can be created in any format supported by qemu CLI Example: .. code-block:: bash salt '*' qemu_img.make_image /tmp/image.qcow 2048 qcow2 salt '*' qemu_img.make_image /tmp/image.raw 10240 raw ''' if not os.path.isabs(location): return '' if not os.path.isdir(os.path.dirname(location)): return '' if not __salt__['cmd.retcode']( 'qemu-img create -f {0} {1} {2}M'.format( fmt, location, size), python_shell=False): return location return ''
python
def make_image(location, size, fmt): ''' Create a blank virtual machine image file of the specified size in megabytes. The image can be created in any format supported by qemu CLI Example: .. code-block:: bash salt '*' qemu_img.make_image /tmp/image.qcow 2048 qcow2 salt '*' qemu_img.make_image /tmp/image.raw 10240 raw ''' if not os.path.isabs(location): return '' if not os.path.isdir(os.path.dirname(location)): return '' if not __salt__['cmd.retcode']( 'qemu-img create -f {0} {1} {2}M'.format( fmt, location, size), python_shell=False): return location return ''
[ "def", "make_image", "(", "location", ",", "size", ",", "fmt", ")", ":", "if", "not", "os", ".", "path", ".", "isabs", "(", "location", ")", ":", "return", "''", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "location", ")", ")", ":", "return", "''", "if", "not", "__salt__", "[", "'cmd.retcode'", "]", "(", "'qemu-img create -f {0} {1} {2}M'", ".", "format", "(", "fmt", ",", "location", ",", "size", ")", ",", "python_shell", "=", "False", ")", ":", "return", "location", "return", "''" ]
Create a blank virtual machine image file of the specified size in megabytes. The image can be created in any format supported by qemu CLI Example: .. code-block:: bash salt '*' qemu_img.make_image /tmp/image.qcow 2048 qcow2 salt '*' qemu_img.make_image /tmp/image.raw 10240 raw
[ "Create", "a", "blank", "virtual", "machine", "image", "file", "of", "the", "specified", "size", "in", "megabytes", ".", "The", "image", "can", "be", "created", "in", "any", "format", "supported", "by", "qemu" ]
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/qemu_img.py#L28-L51
0.002755
fabioz/PyDev.Debugger
pydev_ipython/inputhook.py
InputHookManager.enable_gtk
def enable_gtk(self, app=None): """Enable event loop integration with PyGTK. Parameters ---------- app : ignored Ignored, it's only a placeholder to keep the call signature of all gui activation methods consistent, which simplifies the logic of supporting magics. Notes ----- This methods sets the PyOS_InputHook for PyGTK, which allows the PyGTK to integrate with terminal based applications like IPython. """ from pydev_ipython.inputhookgtk import create_inputhook_gtk self.set_inputhook(create_inputhook_gtk(self._stdin_file)) self._current_gui = GUI_GTK
python
def enable_gtk(self, app=None): """Enable event loop integration with PyGTK. Parameters ---------- app : ignored Ignored, it's only a placeholder to keep the call signature of all gui activation methods consistent, which simplifies the logic of supporting magics. Notes ----- This methods sets the PyOS_InputHook for PyGTK, which allows the PyGTK to integrate with terminal based applications like IPython. """ from pydev_ipython.inputhookgtk import create_inputhook_gtk self.set_inputhook(create_inputhook_gtk(self._stdin_file)) self._current_gui = GUI_GTK
[ "def", "enable_gtk", "(", "self", ",", "app", "=", "None", ")", ":", "from", "pydev_ipython", ".", "inputhookgtk", "import", "create_inputhook_gtk", "self", ".", "set_inputhook", "(", "create_inputhook_gtk", "(", "self", ".", "_stdin_file", ")", ")", "self", ".", "_current_gui", "=", "GUI_GTK" ]
Enable event loop integration with PyGTK. Parameters ---------- app : ignored Ignored, it's only a placeholder to keep the call signature of all gui activation methods consistent, which simplifies the logic of supporting magics. Notes ----- This methods sets the PyOS_InputHook for PyGTK, which allows the PyGTK to integrate with terminal based applications like IPython.
[ "Enable", "event", "loop", "integration", "with", "PyGTK", "." ]
train
https://github.com/fabioz/PyDev.Debugger/blob/ed9c4307662a5593b8a7f1f3389ecd0e79b8c503/pydev_ipython/inputhook.py#L235-L253
0.002874
osrg/ryu
ryu/services/protocols/bgp/net_ctrl.py
_validate_rpc_port
def _validate_rpc_port(port): """Validates give port for use as rpc server port. """ if not port: raise NetworkControllerError(desc='Invalid rpc port number.') if isinstance(port, str): port = int(port) if port <= 0: raise NetworkControllerError(desc='Invalid rpc port number %s' % port) return port
python
def _validate_rpc_port(port): """Validates give port for use as rpc server port. """ if not port: raise NetworkControllerError(desc='Invalid rpc port number.') if isinstance(port, str): port = int(port) if port <= 0: raise NetworkControllerError(desc='Invalid rpc port number %s' % port) return port
[ "def", "_validate_rpc_port", "(", "port", ")", ":", "if", "not", "port", ":", "raise", "NetworkControllerError", "(", "desc", "=", "'Invalid rpc port number.'", ")", "if", "isinstance", "(", "port", ",", "str", ")", ":", "port", "=", "int", "(", "port", ")", "if", "port", "<=", "0", ":", "raise", "NetworkControllerError", "(", "desc", "=", "'Invalid rpc port number %s'", "%", "port", ")", "return", "port" ]
Validates give port for use as rpc server port.
[ "Validates", "give", "port", "for", "use", "as", "rpc", "server", "port", "." ]
train
https://github.com/osrg/ryu/blob/6f906e72c92e10bd0264c9b91a2f7bb85b97780c/ryu/services/protocols/bgp/net_ctrl.py#L323-L333
0.002874
saltstack/salt
salt/states/boto_datapipeline.py
_cleaned
def _cleaned(_pipeline_objects): """Return standardized pipeline objects to be used for comparing Remove year, month, and day components of the startDateTime so that data pipelines with the same time of day but different days are considered equal. """ pipeline_objects = copy.deepcopy(_pipeline_objects) for pipeline_object in pipeline_objects: if pipeline_object['id'] == 'DefaultSchedule': for field_object in pipeline_object['fields']: if field_object['key'] == 'startDateTime': start_date_time_string = field_object['stringValue'] start_date_time = datetime.datetime.strptime(start_date_time_string, "%Y-%m-%dT%H:%M:%S") field_object['stringValue'] = start_date_time.strftime("%H:%M:%S") return pipeline_objects
python
def _cleaned(_pipeline_objects): """Return standardized pipeline objects to be used for comparing Remove year, month, and day components of the startDateTime so that data pipelines with the same time of day but different days are considered equal. """ pipeline_objects = copy.deepcopy(_pipeline_objects) for pipeline_object in pipeline_objects: if pipeline_object['id'] == 'DefaultSchedule': for field_object in pipeline_object['fields']: if field_object['key'] == 'startDateTime': start_date_time_string = field_object['stringValue'] start_date_time = datetime.datetime.strptime(start_date_time_string, "%Y-%m-%dT%H:%M:%S") field_object['stringValue'] = start_date_time.strftime("%H:%M:%S") return pipeline_objects
[ "def", "_cleaned", "(", "_pipeline_objects", ")", ":", "pipeline_objects", "=", "copy", ".", "deepcopy", "(", "_pipeline_objects", ")", "for", "pipeline_object", "in", "pipeline_objects", ":", "if", "pipeline_object", "[", "'id'", "]", "==", "'DefaultSchedule'", ":", "for", "field_object", "in", "pipeline_object", "[", "'fields'", "]", ":", "if", "field_object", "[", "'key'", "]", "==", "'startDateTime'", ":", "start_date_time_string", "=", "field_object", "[", "'stringValue'", "]", "start_date_time", "=", "datetime", ".", "datetime", ".", "strptime", "(", "start_date_time_string", ",", "\"%Y-%m-%dT%H:%M:%S\"", ")", "field_object", "[", "'stringValue'", "]", "=", "start_date_time", ".", "strftime", "(", "\"%H:%M:%S\"", ")", "return", "pipeline_objects" ]
Return standardized pipeline objects to be used for comparing Remove year, month, and day components of the startDateTime so that data pipelines with the same time of day but different days are considered equal.
[ "Return", "standardized", "pipeline", "objects", "to", "be", "used", "for", "comparing" ]
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/boto_datapipeline.py#L334-L350
0.0044
pkgw/pwkit
pwkit/environments/casa/dftphotom.py
dftphotom_cli
def dftphotom_cli(argv): """Command-line access to the :func:`dftphotom` algorithm. This function implements the behavior of the command-line ``casatask dftphotom`` tool, wrapped up into a single callable function. The argument *argv* is a list of command-line arguments, in Unix style where the zeroth item is the name of the command. """ check_usage(dftphotom_doc, argv, usageifnoargs=True) cfg = Config().parse(argv[1:]) util.logger(cfg.loglevel) dftphotom(cfg)
python
def dftphotom_cli(argv): """Command-line access to the :func:`dftphotom` algorithm. This function implements the behavior of the command-line ``casatask dftphotom`` tool, wrapped up into a single callable function. The argument *argv* is a list of command-line arguments, in Unix style where the zeroth item is the name of the command. """ check_usage(dftphotom_doc, argv, usageifnoargs=True) cfg = Config().parse(argv[1:]) util.logger(cfg.loglevel) dftphotom(cfg)
[ "def", "dftphotom_cli", "(", "argv", ")", ":", "check_usage", "(", "dftphotom_doc", ",", "argv", ",", "usageifnoargs", "=", "True", ")", "cfg", "=", "Config", "(", ")", ".", "parse", "(", "argv", "[", "1", ":", "]", ")", "util", ".", "logger", "(", "cfg", ".", "loglevel", ")", "dftphotom", "(", "cfg", ")" ]
Command-line access to the :func:`dftphotom` algorithm. This function implements the behavior of the command-line ``casatask dftphotom`` tool, wrapped up into a single callable function. The argument *argv* is a list of command-line arguments, in Unix style where the zeroth item is the name of the command.
[ "Command", "-", "line", "access", "to", "the", ":", "func", ":", "dftphotom", "algorithm", "." ]
train
https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/environments/casa/dftphotom.py#L384-L396
0.001976
amperser/proselint
proselint/checks/mixed_metaphors/misc.py
check_misc
def check_misc(text): """Avoid mixing metaphors. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY """ err = "mixed_metaphors.misc.misc" msg = u"Mixed metaphor. Try '{}'." preferences = [ ["cream rises to the top", ["cream rises to the crop"]], ["fasten your seatbelts", ["button your seatbelts"]], ["a minute to decompress", ["a minute to decompose"]], ["sharpest tool in the shed", ["sharpest marble in the (shed|box)"]], ["not rocket science", ["not rocket surgery"]], ] return preferred_forms_check(text, preferences, err, msg)
python
def check_misc(text): """Avoid mixing metaphors. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY """ err = "mixed_metaphors.misc.misc" msg = u"Mixed metaphor. Try '{}'." preferences = [ ["cream rises to the top", ["cream rises to the crop"]], ["fasten your seatbelts", ["button your seatbelts"]], ["a minute to decompress", ["a minute to decompose"]], ["sharpest tool in the shed", ["sharpest marble in the (shed|box)"]], ["not rocket science", ["not rocket surgery"]], ] return preferred_forms_check(text, preferences, err, msg)
[ "def", "check_misc", "(", "text", ")", ":", "err", "=", "\"mixed_metaphors.misc.misc\"", "msg", "=", "u\"Mixed metaphor. Try '{}'.\"", "preferences", "=", "[", "[", "\"cream rises to the top\"", ",", "[", "\"cream rises to the crop\"", "]", "]", ",", "[", "\"fasten your seatbelts\"", ",", "[", "\"button your seatbelts\"", "]", "]", ",", "[", "\"a minute to decompress\"", ",", "[", "\"a minute to decompose\"", "]", "]", ",", "[", "\"sharpest tool in the shed\"", ",", "[", "\"sharpest marble in the (shed|box)\"", "]", "]", ",", "[", "\"not rocket science\"", ",", "[", "\"not rocket surgery\"", "]", "]", ",", "]", "return", "preferred_forms_check", "(", "text", ",", "preferences", ",", "err", ",", "msg", ")" ]
Avoid mixing metaphors. source: Garner's Modern American Usage source_url: http://bit.ly/1T4alrY
[ "Avoid", "mixing", "metaphors", "." ]
train
https://github.com/amperser/proselint/blob/cb619ee4023cc7856f5fb96aec2a33a2c9f1a2e2/proselint/checks/mixed_metaphors/misc.py#L31-L49
0.001527
yougov/vr.runners
vr/runners/image.py
ImageRunner.ensure_image
def ensure_image(self): """ Ensure that config.image_url has been downloaded and unpacked. """ image_folder = self.get_image_folder() if os.path.exists(image_folder): print( 'OS image directory {} exists...not overwriting' .format( image_folder)) return ensure_image( self.config.image_name, self.config.image_url, IMAGES_ROOT, getattr(self.config, 'image_md5', None), self.get_image_folder() )
python
def ensure_image(self): """ Ensure that config.image_url has been downloaded and unpacked. """ image_folder = self.get_image_folder() if os.path.exists(image_folder): print( 'OS image directory {} exists...not overwriting' .format( image_folder)) return ensure_image( self.config.image_name, self.config.image_url, IMAGES_ROOT, getattr(self.config, 'image_md5', None), self.get_image_folder() )
[ "def", "ensure_image", "(", "self", ")", ":", "image_folder", "=", "self", ".", "get_image_folder", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "image_folder", ")", ":", "print", "(", "'OS image directory {} exists...not overwriting'", ".", "format", "(", "image_folder", ")", ")", "return", "ensure_image", "(", "self", ".", "config", ".", "image_name", ",", "self", ".", "config", ".", "image_url", ",", "IMAGES_ROOT", ",", "getattr", "(", "self", ".", "config", ",", "'image_md5'", ",", "None", ")", ",", "self", ".", "get_image_folder", "(", ")", ")" ]
Ensure that config.image_url has been downloaded and unpacked.
[ "Ensure", "that", "config", ".", "image_url", "has", "been", "downloaded", "and", "unpacked", "." ]
train
https://github.com/yougov/vr.runners/blob/f43ba50a64b17ee4f07596fe225bcb38ca6652ad/vr/runners/image.py#L79-L96
0.003503
GetmeUK/MongoFrames
snippets/publishing.py
PublisherFrame.published_context
def published_context(cls): """Set the context to published""" previous_state = g.get('draft') try: g.draft = False yield finally: g.draft = previous_state
python
def published_context(cls): """Set the context to published""" previous_state = g.get('draft') try: g.draft = False yield finally: g.draft = previous_state
[ "def", "published_context", "(", "cls", ")", ":", "previous_state", "=", "g", ".", "get", "(", "'draft'", ")", "try", ":", "g", ".", "draft", "=", "False", "yield", "finally", ":", "g", ".", "draft", "=", "previous_state" ]
Set the context to published
[ "Set", "the", "context", "to", "published" ]
train
https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L198-L205
0.008969
borntyping/python-dice
dice/elements.py
Element.evaluate_object
def evaluate_object(obj, cls=None, cache=False, **kwargs): """Evaluates elements, and coerces objects to a class if needed""" old_obj = obj if isinstance(obj, Element): if cache: obj = obj.evaluate_cached(**kwargs) else: obj = obj.evaluate(cache=cache, **kwargs) if cls is not None and type(obj) != cls: obj = cls(obj) for attr in ('string', 'location', 'tokens'): if hasattr(old_obj, attr): setattr(obj, attr, getattr(old_obj, attr)) return obj
python
def evaluate_object(obj, cls=None, cache=False, **kwargs): """Evaluates elements, and coerces objects to a class if needed""" old_obj = obj if isinstance(obj, Element): if cache: obj = obj.evaluate_cached(**kwargs) else: obj = obj.evaluate(cache=cache, **kwargs) if cls is not None and type(obj) != cls: obj = cls(obj) for attr in ('string', 'location', 'tokens'): if hasattr(old_obj, attr): setattr(obj, attr, getattr(old_obj, attr)) return obj
[ "def", "evaluate_object", "(", "obj", ",", "cls", "=", "None", ",", "cache", "=", "False", ",", "*", "*", "kwargs", ")", ":", "old_obj", "=", "obj", "if", "isinstance", "(", "obj", ",", "Element", ")", ":", "if", "cache", ":", "obj", "=", "obj", ".", "evaluate_cached", "(", "*", "*", "kwargs", ")", "else", ":", "obj", "=", "obj", ".", "evaluate", "(", "cache", "=", "cache", ",", "*", "*", "kwargs", ")", "if", "cls", "is", "not", "None", "and", "type", "(", "obj", ")", "!=", "cls", ":", "obj", "=", "cls", "(", "obj", ")", "for", "attr", "in", "(", "'string'", ",", "'location'", ",", "'tokens'", ")", ":", "if", "hasattr", "(", "old_obj", ",", "attr", ")", ":", "setattr", "(", "obj", ",", "attr", ",", "getattr", "(", "old_obj", ",", "attr", ")", ")", "return", "obj" ]
Evaluates elements, and coerces objects to a class if needed
[ "Evaluates", "elements", "and", "coerces", "objects", "to", "a", "class", "if", "needed" ]
train
https://github.com/borntyping/python-dice/blob/88398c77534ebec19f1f18478e475d0b7a5bc717/dice/elements.py#L44-L60
0.003378
saltstack/salt
salt/modules/win_dsc.py
get_config
def get_config(): ''' Get the current DSC Configuration Returns: dict: A dictionary representing the DSC Configuration on the machine Raises: CommandExecutionError: On failure CLI Example: .. code-block:: bash salt '*' dsc.get_config ''' cmd = 'Get-DscConfiguration | Select-Object * -ExcludeProperty Cim*' try: raw_config = _pshell(cmd, ignore_retcode=True) except CommandExecutionError as exc: if 'Current configuration does not exist' in exc.info['stderr']: raise CommandExecutionError('Not Configured') raise config = dict() if raw_config: # Get DSC Configuration Name if 'ConfigurationName' in raw_config[0]: config[raw_config[0]['ConfigurationName']] = {} # Add all DSC Configurations by ResourceId for item in raw_config: config[item['ConfigurationName']][item['ResourceId']] = {} for key in item: if key not in ['ConfigurationName', 'ResourceId']: config[item['ConfigurationName']][item['ResourceId']][key] = item[key] return config
python
def get_config(): ''' Get the current DSC Configuration Returns: dict: A dictionary representing the DSC Configuration on the machine Raises: CommandExecutionError: On failure CLI Example: .. code-block:: bash salt '*' dsc.get_config ''' cmd = 'Get-DscConfiguration | Select-Object * -ExcludeProperty Cim*' try: raw_config = _pshell(cmd, ignore_retcode=True) except CommandExecutionError as exc: if 'Current configuration does not exist' in exc.info['stderr']: raise CommandExecutionError('Not Configured') raise config = dict() if raw_config: # Get DSC Configuration Name if 'ConfigurationName' in raw_config[0]: config[raw_config[0]['ConfigurationName']] = {} # Add all DSC Configurations by ResourceId for item in raw_config: config[item['ConfigurationName']][item['ResourceId']] = {} for key in item: if key not in ['ConfigurationName', 'ResourceId']: config[item['ConfigurationName']][item['ResourceId']][key] = item[key] return config
[ "def", "get_config", "(", ")", ":", "cmd", "=", "'Get-DscConfiguration | Select-Object * -ExcludeProperty Cim*'", "try", ":", "raw_config", "=", "_pshell", "(", "cmd", ",", "ignore_retcode", "=", "True", ")", "except", "CommandExecutionError", "as", "exc", ":", "if", "'Current configuration does not exist'", "in", "exc", ".", "info", "[", "'stderr'", "]", ":", "raise", "CommandExecutionError", "(", "'Not Configured'", ")", "raise", "config", "=", "dict", "(", ")", "if", "raw_config", ":", "# Get DSC Configuration Name", "if", "'ConfigurationName'", "in", "raw_config", "[", "0", "]", ":", "config", "[", "raw_config", "[", "0", "]", "[", "'ConfigurationName'", "]", "]", "=", "{", "}", "# Add all DSC Configurations by ResourceId", "for", "item", "in", "raw_config", ":", "config", "[", "item", "[", "'ConfigurationName'", "]", "]", "[", "item", "[", "'ResourceId'", "]", "]", "=", "{", "}", "for", "key", "in", "item", ":", "if", "key", "not", "in", "[", "'ConfigurationName'", ",", "'ResourceId'", "]", ":", "config", "[", "item", "[", "'ConfigurationName'", "]", "]", "[", "item", "[", "'ResourceId'", "]", "]", "[", "key", "]", "=", "item", "[", "key", "]", "return", "config" ]
Get the current DSC Configuration Returns: dict: A dictionary representing the DSC Configuration on the machine Raises: CommandExecutionError: On failure CLI Example: .. code-block:: bash salt '*' dsc.get_config
[ "Get", "the", "current", "DSC", "Configuration" ]
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/win_dsc.py#L411-L448
0.001718
marcinmiklitz/pywindow
pywindow/utilities.py
create_supercell
def create_supercell(system, supercell=[[-1, 1], [-1, 1], [-1, 1]]): """Create a supercell.""" if 'lattice' not in system.keys(): matrix = unit_cell_to_lattice_array(system['unit_cell']) else: matrix = system['lattice'] coordinates = deepcopy(system['coordinates']) multiplication_matrices = [] for a_ in range(supercell[0][0], supercell[0][1] + 1): for b_ in range(supercell[1][0], supercell[1][1] + 1): for c_ in range(supercell[2][0], supercell[2][1] + 1): mult_matrix = np.array([[a_, b_, c_]]) mult_matrix = np.repeat( mult_matrix, coordinates.shape[0], axis=0) multiplication_matrices.append(mult_matrix) frac_coordinates = cart2frac_all(coordinates, matrix) updated_coordinates = [] for mat in multiplication_matrices: updated_coor = frac_coordinates + mat updated_coordinates.append(updated_coor) supercell_frac_coordinates = np.concatenate(updated_coordinates, axis=0) supercell_coordinates = frac2cart_all(supercell_frac_coordinates, matrix) # Now for each new cell in the supercell we need to repeat the # elements array so that it maches new_elements = deepcopy(system['elements']) new_ids = deepcopy(system['atom_ids']) for i in range(len(updated_coordinates) - 1): new_elements = np.concatenate((new_elements, system['elements'])) new_ids = np.concatenate((new_ids, system['atom_ids'])) cryst = lattice_array_to_unit_cell(matrix) supercell_system = { 'elements': new_elements, 'atom_ids': new_ids, 'coordinates': supercell_coordinates, 'unit_cell': cryst, 'lattice': matrix, } return supercell_system
python
def create_supercell(system, supercell=[[-1, 1], [-1, 1], [-1, 1]]): """Create a supercell.""" if 'lattice' not in system.keys(): matrix = unit_cell_to_lattice_array(system['unit_cell']) else: matrix = system['lattice'] coordinates = deepcopy(system['coordinates']) multiplication_matrices = [] for a_ in range(supercell[0][0], supercell[0][1] + 1): for b_ in range(supercell[1][0], supercell[1][1] + 1): for c_ in range(supercell[2][0], supercell[2][1] + 1): mult_matrix = np.array([[a_, b_, c_]]) mult_matrix = np.repeat( mult_matrix, coordinates.shape[0], axis=0) multiplication_matrices.append(mult_matrix) frac_coordinates = cart2frac_all(coordinates, matrix) updated_coordinates = [] for mat in multiplication_matrices: updated_coor = frac_coordinates + mat updated_coordinates.append(updated_coor) supercell_frac_coordinates = np.concatenate(updated_coordinates, axis=0) supercell_coordinates = frac2cart_all(supercell_frac_coordinates, matrix) # Now for each new cell in the supercell we need to repeat the # elements array so that it maches new_elements = deepcopy(system['elements']) new_ids = deepcopy(system['atom_ids']) for i in range(len(updated_coordinates) - 1): new_elements = np.concatenate((new_elements, system['elements'])) new_ids = np.concatenate((new_ids, system['atom_ids'])) cryst = lattice_array_to_unit_cell(matrix) supercell_system = { 'elements': new_elements, 'atom_ids': new_ids, 'coordinates': supercell_coordinates, 'unit_cell': cryst, 'lattice': matrix, } return supercell_system
[ "def", "create_supercell", "(", "system", ",", "supercell", "=", "[", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", ",", "[", "-", "1", ",", "1", "]", "]", ")", ":", "if", "'lattice'", "not", "in", "system", ".", "keys", "(", ")", ":", "matrix", "=", "unit_cell_to_lattice_array", "(", "system", "[", "'unit_cell'", "]", ")", "else", ":", "matrix", "=", "system", "[", "'lattice'", "]", "coordinates", "=", "deepcopy", "(", "system", "[", "'coordinates'", "]", ")", "multiplication_matrices", "=", "[", "]", "for", "a_", "in", "range", "(", "supercell", "[", "0", "]", "[", "0", "]", ",", "supercell", "[", "0", "]", "[", "1", "]", "+", "1", ")", ":", "for", "b_", "in", "range", "(", "supercell", "[", "1", "]", "[", "0", "]", ",", "supercell", "[", "1", "]", "[", "1", "]", "+", "1", ")", ":", "for", "c_", "in", "range", "(", "supercell", "[", "2", "]", "[", "0", "]", ",", "supercell", "[", "2", "]", "[", "1", "]", "+", "1", ")", ":", "mult_matrix", "=", "np", ".", "array", "(", "[", "[", "a_", ",", "b_", ",", "c_", "]", "]", ")", "mult_matrix", "=", "np", ".", "repeat", "(", "mult_matrix", ",", "coordinates", ".", "shape", "[", "0", "]", ",", "axis", "=", "0", ")", "multiplication_matrices", ".", "append", "(", "mult_matrix", ")", "frac_coordinates", "=", "cart2frac_all", "(", "coordinates", ",", "matrix", ")", "updated_coordinates", "=", "[", "]", "for", "mat", "in", "multiplication_matrices", ":", "updated_coor", "=", "frac_coordinates", "+", "mat", "updated_coordinates", ".", "append", "(", "updated_coor", ")", "supercell_frac_coordinates", "=", "np", ".", "concatenate", "(", "updated_coordinates", ",", "axis", "=", "0", ")", "supercell_coordinates", "=", "frac2cart_all", "(", "supercell_frac_coordinates", ",", "matrix", ")", "# Now for each new cell in the supercell we need to repeat the", "# elements array so that it maches", "new_elements", "=", "deepcopy", "(", "system", "[", "'elements'", "]", ")", "new_ids", "=", "deepcopy", "(", "system", "[", "'atom_ids'", "]", ")", "for", "i", "in", "range", "(", "len", "(", "updated_coordinates", ")", "-", "1", ")", ":", "new_elements", "=", "np", ".", "concatenate", "(", "(", "new_elements", ",", "system", "[", "'elements'", "]", ")", ")", "new_ids", "=", "np", ".", "concatenate", "(", "(", "new_ids", ",", "system", "[", "'atom_ids'", "]", ")", ")", "cryst", "=", "lattice_array_to_unit_cell", "(", "matrix", ")", "supercell_system", "=", "{", "'elements'", ":", "new_elements", ",", "'atom_ids'", ":", "new_ids", ",", "'coordinates'", ":", "supercell_coordinates", ",", "'unit_cell'", ":", "cryst", ",", "'lattice'", ":", "matrix", ",", "}", "return", "supercell_system" ]
Create a supercell.
[ "Create", "a", "supercell", "." ]
train
https://github.com/marcinmiklitz/pywindow/blob/e5264812157224f22a691741ca2e0aefdc9bd2eb/pywindow/utilities.py#L777-L814
0.000565
JNRowe/jnrbase
jnrbase/cmdline.py
get_default
def get_default(__func: Callable, __arg: str) -> str: """Fetch default value for a function argument Args: __func: Function to inspect __arg: Argument to extract default value for """ return signature(__func).parameters[__arg].default
python
def get_default(__func: Callable, __arg: str) -> str: """Fetch default value for a function argument Args: __func: Function to inspect __arg: Argument to extract default value for """ return signature(__func).parameters[__arg].default
[ "def", "get_default", "(", "__func", ":", "Callable", ",", "__arg", ":", "str", ")", "->", "str", ":", "return", "signature", "(", "__func", ")", ".", "parameters", "[", "__arg", "]", ".", "default" ]
Fetch default value for a function argument Args: __func: Function to inspect __arg: Argument to extract default value for
[ "Fetch", "default", "value", "for", "a", "function", "argument" ]
train
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/cmdline.py#L38-L45
0.003745
syrusakbary/promise
promise/dataloader.py
DataLoader.prime
def prime(self, key, value): # type: (Hashable, Any) -> DataLoader """ Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining. """ cache_key = self.get_cache_key(key) # Only add the key if it does not already exist. if cache_key not in self._promise_cache: # Cache a rejected promise if the value is an Error, in order to match # the behavior of load(key). if isinstance(value, Exception): promise = Promise.reject(value) else: promise = Promise.resolve(value) self._promise_cache[cache_key] = promise return self
python
def prime(self, key, value): # type: (Hashable, Any) -> DataLoader """ Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining. """ cache_key = self.get_cache_key(key) # Only add the key if it does not already exist. if cache_key not in self._promise_cache: # Cache a rejected promise if the value is an Error, in order to match # the behavior of load(key). if isinstance(value, Exception): promise = Promise.reject(value) else: promise = Promise.resolve(value) self._promise_cache[cache_key] = promise return self
[ "def", "prime", "(", "self", ",", "key", ",", "value", ")", ":", "# type: (Hashable, Any) -> DataLoader", "cache_key", "=", "self", ".", "get_cache_key", "(", "key", ")", "# Only add the key if it does not already exist.", "if", "cache_key", "not", "in", "self", ".", "_promise_cache", ":", "# Cache a rejected promise if the value is an Error, in order to match", "# the behavior of load(key).", "if", "isinstance", "(", "value", ",", "Exception", ")", ":", "promise", "=", "Promise", ".", "reject", "(", "value", ")", "else", ":", "promise", "=", "Promise", ".", "resolve", "(", "value", ")", "self", ".", "_promise_cache", "[", "cache_key", "]", "=", "promise", "return", "self" ]
Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining.
[ "Adds", "the", "provied", "key", "and", "value", "to", "the", "cache", ".", "If", "the", "key", "already", "exists", "no", "change", "is", "made", ".", "Returns", "itself", "for", "method", "chaining", "." ]
train
https://github.com/syrusakbary/promise/blob/d80d791fcc86c89713dac57b55e56c0a9024f153/promise/dataloader.py#L170-L189
0.006658
hfaran/slack-export-viewer
slackviewer/reader.py
Reader.compile_mpim_users
def compile_mpim_users(self): """ Gets the info for the members within the multiple person instant message Returns a list of all dms with the members that have ever existed :rtype: [object] { name: <name> users: [<user_id>] } """ mpim_data = self._read_from_json("mpims.json") mpims = [c for c in mpim_data.values()] all_mpim_users = [] for mpim in mpims: mpim_members = {"name": mpim["name"], "users": [self.__USER_DATA[m] for m in mpim["members"]]} all_mpim_users.append(mpim_members) return all_mpim_users
python
def compile_mpim_users(self): """ Gets the info for the members within the multiple person instant message Returns a list of all dms with the members that have ever existed :rtype: [object] { name: <name> users: [<user_id>] } """ mpim_data = self._read_from_json("mpims.json") mpims = [c for c in mpim_data.values()] all_mpim_users = [] for mpim in mpims: mpim_members = {"name": mpim["name"], "users": [self.__USER_DATA[m] for m in mpim["members"]]} all_mpim_users.append(mpim_members) return all_mpim_users
[ "def", "compile_mpim_users", "(", "self", ")", ":", "mpim_data", "=", "self", ".", "_read_from_json", "(", "\"mpims.json\"", ")", "mpims", "=", "[", "c", "for", "c", "in", "mpim_data", ".", "values", "(", ")", "]", "all_mpim_users", "=", "[", "]", "for", "mpim", "in", "mpims", ":", "mpim_members", "=", "{", "\"name\"", ":", "mpim", "[", "\"name\"", "]", ",", "\"users\"", ":", "[", "self", ".", "__USER_DATA", "[", "m", "]", "for", "m", "in", "mpim", "[", "\"members\"", "]", "]", "}", "all_mpim_users", ".", "append", "(", "mpim_members", ")", "return", "all_mpim_users" ]
Gets the info for the members within the multiple person instant message Returns a list of all dms with the members that have ever existed :rtype: [object] { name: <name> users: [<user_id>] }
[ "Gets", "the", "info", "for", "the", "members", "within", "the", "multiple", "person", "instant", "message" ]
train
https://github.com/hfaran/slack-export-viewer/blob/bbe97f5cd9f72a0cc41c7395cef23860b44918f8/slackviewer/reader.py#L89-L111
0.006079
hazelcast/hazelcast-python-client
hazelcast/transaction.py
Transaction.rollback
def rollback(self): """ Rollback of this current transaction. """ self._check_thread() if self.state not in (_STATE_ACTIVE, _STATE_PARTIAL_COMMIT): raise TransactionError("Transaction is not active.") try: if self.state != _STATE_PARTIAL_COMMIT: request = transaction_rollback_codec.encode_request(self.id, self.thread_id) self.client.invoker.invoke_on_connection(request, self.connection).result() self.state = _STATE_ROLLED_BACK finally: self._locals.transaction_exists = False
python
def rollback(self): """ Rollback of this current transaction. """ self._check_thread() if self.state not in (_STATE_ACTIVE, _STATE_PARTIAL_COMMIT): raise TransactionError("Transaction is not active.") try: if self.state != _STATE_PARTIAL_COMMIT: request = transaction_rollback_codec.encode_request(self.id, self.thread_id) self.client.invoker.invoke_on_connection(request, self.connection).result() self.state = _STATE_ROLLED_BACK finally: self._locals.transaction_exists = False
[ "def", "rollback", "(", "self", ")", ":", "self", ".", "_check_thread", "(", ")", "if", "self", ".", "state", "not", "in", "(", "_STATE_ACTIVE", ",", "_STATE_PARTIAL_COMMIT", ")", ":", "raise", "TransactionError", "(", "\"Transaction is not active.\"", ")", "try", ":", "if", "self", ".", "state", "!=", "_STATE_PARTIAL_COMMIT", ":", "request", "=", "transaction_rollback_codec", ".", "encode_request", "(", "self", ".", "id", ",", "self", ".", "thread_id", ")", "self", ".", "client", ".", "invoker", ".", "invoke_on_connection", "(", "request", ",", "self", ".", "connection", ")", ".", "result", "(", ")", "self", ".", "state", "=", "_STATE_ROLLED_BACK", "finally", ":", "self", ".", "_locals", ".", "transaction_exists", "=", "False" ]
Rollback of this current transaction.
[ "Rollback", "of", "this", "current", "transaction", "." ]
train
https://github.com/hazelcast/hazelcast-python-client/blob/3f6639443c23d6d036aa343f8e094f052250d2c1/hazelcast/transaction.py#L137-L150
0.006504
DEIB-GECO/PyGMQL
gmql/dataset/GMQLDataset.py
GMQLDataset.merge
def merge(self, groupBy=None): """ *Wrapper of* ``MERGE`` The MERGE operator builds a new dataset consisting of a single sample having * as regions all the regions of all the input samples, with the same attributes and values * as metadata the union of all the metadata attribute-values of the input samples. A groupby clause can be specified on metadata: the samples are then partitioned in groups, each with a distinct value of the grouping metadata attributes, and the MERGE operation is applied to each group separately, yielding to one sample in the result dataset for each group. Samples without the grouping metadata attributes are disregarded :param groupBy: list of metadata attributes :return: a new GMQLDataset Example of usage:: import gmql as gl d1 = gl.get_example_dataset("Example_Dataset_1") result = d1.merge(['antibody']) """ if isinstance(groupBy, list) and \ all([isinstance(x, str) for x in groupBy]): groupBy = Some(groupBy) elif groupBy is None: groupBy = none() else: raise TypeError("groupBy must be a list of strings. " "{} was provided".format(type(groupBy))) new_index = self.opmng.merge(self.__index, groupBy) return GMQLDataset(index=new_index, location=self.location, local_sources=self._local_sources, remote_sources=self._remote_sources, meta_profile=self.meta_profile)
python
def merge(self, groupBy=None): """ *Wrapper of* ``MERGE`` The MERGE operator builds a new dataset consisting of a single sample having * as regions all the regions of all the input samples, with the same attributes and values * as metadata the union of all the metadata attribute-values of the input samples. A groupby clause can be specified on metadata: the samples are then partitioned in groups, each with a distinct value of the grouping metadata attributes, and the MERGE operation is applied to each group separately, yielding to one sample in the result dataset for each group. Samples without the grouping metadata attributes are disregarded :param groupBy: list of metadata attributes :return: a new GMQLDataset Example of usage:: import gmql as gl d1 = gl.get_example_dataset("Example_Dataset_1") result = d1.merge(['antibody']) """ if isinstance(groupBy, list) and \ all([isinstance(x, str) for x in groupBy]): groupBy = Some(groupBy) elif groupBy is None: groupBy = none() else: raise TypeError("groupBy must be a list of strings. " "{} was provided".format(type(groupBy))) new_index = self.opmng.merge(self.__index, groupBy) return GMQLDataset(index=new_index, location=self.location, local_sources=self._local_sources, remote_sources=self._remote_sources, meta_profile=self.meta_profile)
[ "def", "merge", "(", "self", ",", "groupBy", "=", "None", ")", ":", "if", "isinstance", "(", "groupBy", ",", "list", ")", "and", "all", "(", "[", "isinstance", "(", "x", ",", "str", ")", "for", "x", "in", "groupBy", "]", ")", ":", "groupBy", "=", "Some", "(", "groupBy", ")", "elif", "groupBy", "is", "None", ":", "groupBy", "=", "none", "(", ")", "else", ":", "raise", "TypeError", "(", "\"groupBy must be a list of strings. \"", "\"{} was provided\"", ".", "format", "(", "type", "(", "groupBy", ")", ")", ")", "new_index", "=", "self", ".", "opmng", ".", "merge", "(", "self", ".", "__index", ",", "groupBy", ")", "return", "GMQLDataset", "(", "index", "=", "new_index", ",", "location", "=", "self", ".", "location", ",", "local_sources", "=", "self", ".", "_local_sources", ",", "remote_sources", "=", "self", ".", "_remote_sources", ",", "meta_profile", "=", "self", ".", "meta_profile", ")" ]
*Wrapper of* ``MERGE`` The MERGE operator builds a new dataset consisting of a single sample having * as regions all the regions of all the input samples, with the same attributes and values * as metadata the union of all the metadata attribute-values of the input samples. A groupby clause can be specified on metadata: the samples are then partitioned in groups, each with a distinct value of the grouping metadata attributes, and the MERGE operation is applied to each group separately, yielding to one sample in the result dataset for each group. Samples without the grouping metadata attributes are disregarded :param groupBy: list of metadata attributes :return: a new GMQLDataset Example of usage:: import gmql as gl d1 = gl.get_example_dataset("Example_Dataset_1") result = d1.merge(['antibody'])
[ "*", "Wrapper", "of", "*", "MERGE" ]
train
https://github.com/DEIB-GECO/PyGMQL/blob/e58b2f9402a86056dcda484a32e3de0bb06ed991/gmql/dataset/GMQLDataset.py#L1184-L1225
0.007523
NYUCCL/psiTurk
psiturk/amt_services.py
MTurkServices.setup_mturk_connection
def setup_mturk_connection(self): ''' Connect to turk ''' if ((self.aws_access_key_id == 'YourAccessKeyId') or (self.aws_secret_access_key == 'YourSecretAccessKey')): print "AWS access key not set in ~/.psiturkconfig; please enter a valid access key." assert False if self.is_sandbox: endpoint_url = 'https://mturk-requester-sandbox.us-east-1.amazonaws.com' else: endpoint_url = 'https://mturk-requester.us-east-1.amazonaws.com' self.mtc = boto3.client('mturk', region_name='us-east-1', aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, endpoint_url=endpoint_url) return True
python
def setup_mturk_connection(self): ''' Connect to turk ''' if ((self.aws_access_key_id == 'YourAccessKeyId') or (self.aws_secret_access_key == 'YourSecretAccessKey')): print "AWS access key not set in ~/.psiturkconfig; please enter a valid access key." assert False if self.is_sandbox: endpoint_url = 'https://mturk-requester-sandbox.us-east-1.amazonaws.com' else: endpoint_url = 'https://mturk-requester.us-east-1.amazonaws.com' self.mtc = boto3.client('mturk', region_name='us-east-1', aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, endpoint_url=endpoint_url) return True
[ "def", "setup_mturk_connection", "(", "self", ")", ":", "if", "(", "(", "self", ".", "aws_access_key_id", "==", "'YourAccessKeyId'", ")", "or", "(", "self", ".", "aws_secret_access_key", "==", "'YourSecretAccessKey'", ")", ")", ":", "print", "\"AWS access key not set in ~/.psiturkconfig; please enter a valid access key.\"", "assert", "False", "if", "self", ".", "is_sandbox", ":", "endpoint_url", "=", "'https://mturk-requester-sandbox.us-east-1.amazonaws.com'", "else", ":", "endpoint_url", "=", "'https://mturk-requester.us-east-1.amazonaws.com'", "self", ".", "mtc", "=", "boto3", ".", "client", "(", "'mturk'", ",", "region_name", "=", "'us-east-1'", ",", "aws_access_key_id", "=", "self", ".", "aws_access_key_id", ",", "aws_secret_access_key", "=", "self", ".", "aws_secret_access_key", ",", "endpoint_url", "=", "endpoint_url", ")", "return", "True" ]
Connect to turk
[ "Connect", "to", "turk" ]
train
https://github.com/NYUCCL/psiTurk/blob/7170b992a0b5f56c165929cf87b3d3a1f3336c36/psiturk/amt_services.py#L436-L453
0.010063
pip-services3-python/pip-services3-commons-python
pip_services3_commons/validate/Schema.py
Schema._perform_type_validation
def _perform_type_validation(self, path, typ, value, results): """ Validates a given value to match specified type. The type can be defined as a Schema, type, a type name or [[TypeCode]]. When type is a Schema, it executes validation recursively against that Schema. :param path: a dot notation path to the value. :param typ: a type to match the value type :param value: a value to be validated. :param results: a list with validation results to add new results. """ # If type it not defined then skip if typ == None: return # Perform validation against schema if isinstance(typ, Schema): schema = typ schema._perform_validation(path, value, results) return # If value is null then skip value = ObjectReader.get_value(value) if value == None: return name = path if path != None else "value" value_type = type(value) # Match types if TypeMatcher.match_type(typ, value_type): return # Generate type mismatch error results.append( ValidationResult( path, ValidationResultType.Error, "TYPE_MISMATCH", name + " type must be " + self._type_to_string(typ) + " but found " + self._type_to_string(value_type), typ, value_type ) )
python
def _perform_type_validation(self, path, typ, value, results): """ Validates a given value to match specified type. The type can be defined as a Schema, type, a type name or [[TypeCode]]. When type is a Schema, it executes validation recursively against that Schema. :param path: a dot notation path to the value. :param typ: a type to match the value type :param value: a value to be validated. :param results: a list with validation results to add new results. """ # If type it not defined then skip if typ == None: return # Perform validation against schema if isinstance(typ, Schema): schema = typ schema._perform_validation(path, value, results) return # If value is null then skip value = ObjectReader.get_value(value) if value == None: return name = path if path != None else "value" value_type = type(value) # Match types if TypeMatcher.match_type(typ, value_type): return # Generate type mismatch error results.append( ValidationResult( path, ValidationResultType.Error, "TYPE_MISMATCH", name + " type must be " + self._type_to_string(typ) + " but found " + self._type_to_string(value_type), typ, value_type ) )
[ "def", "_perform_type_validation", "(", "self", ",", "path", ",", "typ", ",", "value", ",", "results", ")", ":", "# If type it not defined then skip", "if", "typ", "==", "None", ":", "return", "# Perform validation against schema", "if", "isinstance", "(", "typ", ",", "Schema", ")", ":", "schema", "=", "typ", "schema", ".", "_perform_validation", "(", "path", ",", "value", ",", "results", ")", "return", "# If value is null then skip", "value", "=", "ObjectReader", ".", "get_value", "(", "value", ")", "if", "value", "==", "None", ":", "return", "name", "=", "path", "if", "path", "!=", "None", "else", "\"value\"", "value_type", "=", "type", "(", "value", ")", "# Match types", "if", "TypeMatcher", ".", "match_type", "(", "typ", ",", "value_type", ")", ":", "return", "# Generate type mismatch error", "results", ".", "append", "(", "ValidationResult", "(", "path", ",", "ValidationResultType", ".", "Error", ",", "\"TYPE_MISMATCH\"", ",", "name", "+", "\" type must be \"", "+", "self", ".", "_type_to_string", "(", "typ", ")", "+", "\" but found \"", "+", "self", ".", "_type_to_string", "(", "value_type", ")", ",", "typ", ",", "value_type", ")", ")" ]
Validates a given value to match specified type. The type can be defined as a Schema, type, a type name or [[TypeCode]]. When type is a Schema, it executes validation recursively against that Schema. :param path: a dot notation path to the value. :param typ: a type to match the value type :param value: a value to be validated. :param results: a list with validation results to add new results.
[ "Validates", "a", "given", "value", "to", "match", "specified", "type", ".", "The", "type", "can", "be", "defined", "as", "a", "Schema", "type", "a", "type", "name", "or", "[[", "TypeCode", "]]", ".", "When", "type", "is", "a", "Schema", "it", "executes", "validation", "recursively", "against", "that", "Schema", "." ]
train
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/Schema.py#L119-L165
0.005288
O365/python-o365
O365/message.py
Message.to_api_data
def to_api_data(self, restrict_keys=None): """ Returns a dict representation of this message prepared to be send to the cloud :param restrict_keys: a set of keys to restrict the returned data to :type restrict_keys: dict or set :return: converted to cloud based keys :rtype: dict """ cc = self._cc # alias to shorten the code message = { cc('subject'): self.subject, cc('body'): { cc('contentType'): self.body_type, cc('content'): self.body}, cc('importance'): cc(self.importance.value), cc('flag'): self.flag.to_api_data(), cc('isReadReceiptRequested'): self.is_read_receipt_requested, cc('isDeliveryReceiptRequested'): self.is_delivery_receipt_requested, } if self.to: message[cc('toRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.to] if self.cc: message[cc('ccRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.cc] if self.bcc: message[cc('bccRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.bcc] if self.reply_to: message[cc('replyTo')] = [self._recipient_to_cloud(recipient) for recipient in self.reply_to] if self.attachments: message[cc('attachments')] = self.attachments.to_api_data() if self.sender and self.sender.address: message[cc('from')] = self._recipient_to_cloud(self.sender) if self.categories or 'categories' in (restrict_keys or {}): message[cc('categories')] = self.categories if self.object_id and not self.__is_draft: # return the whole signature of this message message[cc('id')] = self.object_id if self.created: message[cc('createdDateTime')] = self.created.astimezone( pytz.utc).isoformat() if self.received: message[cc('receivedDateTime')] = self.received.astimezone( pytz.utc).isoformat() if self.sent: message[cc('sentDateTime')] = self.sent.astimezone( pytz.utc).isoformat() message[cc('hasAttachments')] = bool(self.attachments) message[cc('isRead')] = self.is_read message[cc('isDraft')] = self.__is_draft message[cc('conversationId')] = self.conversation_id # this property does not form part of the message itself message[cc('parentFolderId')] = self.folder_id if restrict_keys: for key in list(message.keys()): if key not in restrict_keys: del message[key] return message
python
def to_api_data(self, restrict_keys=None): """ Returns a dict representation of this message prepared to be send to the cloud :param restrict_keys: a set of keys to restrict the returned data to :type restrict_keys: dict or set :return: converted to cloud based keys :rtype: dict """ cc = self._cc # alias to shorten the code message = { cc('subject'): self.subject, cc('body'): { cc('contentType'): self.body_type, cc('content'): self.body}, cc('importance'): cc(self.importance.value), cc('flag'): self.flag.to_api_data(), cc('isReadReceiptRequested'): self.is_read_receipt_requested, cc('isDeliveryReceiptRequested'): self.is_delivery_receipt_requested, } if self.to: message[cc('toRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.to] if self.cc: message[cc('ccRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.cc] if self.bcc: message[cc('bccRecipients')] = [self._recipient_to_cloud(recipient) for recipient in self.bcc] if self.reply_to: message[cc('replyTo')] = [self._recipient_to_cloud(recipient) for recipient in self.reply_to] if self.attachments: message[cc('attachments')] = self.attachments.to_api_data() if self.sender and self.sender.address: message[cc('from')] = self._recipient_to_cloud(self.sender) if self.categories or 'categories' in (restrict_keys or {}): message[cc('categories')] = self.categories if self.object_id and not self.__is_draft: # return the whole signature of this message message[cc('id')] = self.object_id if self.created: message[cc('createdDateTime')] = self.created.astimezone( pytz.utc).isoformat() if self.received: message[cc('receivedDateTime')] = self.received.astimezone( pytz.utc).isoformat() if self.sent: message[cc('sentDateTime')] = self.sent.astimezone( pytz.utc).isoformat() message[cc('hasAttachments')] = bool(self.attachments) message[cc('isRead')] = self.is_read message[cc('isDraft')] = self.__is_draft message[cc('conversationId')] = self.conversation_id # this property does not form part of the message itself message[cc('parentFolderId')] = self.folder_id if restrict_keys: for key in list(message.keys()): if key not in restrict_keys: del message[key] return message
[ "def", "to_api_data", "(", "self", ",", "restrict_keys", "=", "None", ")", ":", "cc", "=", "self", ".", "_cc", "# alias to shorten the code", "message", "=", "{", "cc", "(", "'subject'", ")", ":", "self", ".", "subject", ",", "cc", "(", "'body'", ")", ":", "{", "cc", "(", "'contentType'", ")", ":", "self", ".", "body_type", ",", "cc", "(", "'content'", ")", ":", "self", ".", "body", "}", ",", "cc", "(", "'importance'", ")", ":", "cc", "(", "self", ".", "importance", ".", "value", ")", ",", "cc", "(", "'flag'", ")", ":", "self", ".", "flag", ".", "to_api_data", "(", ")", ",", "cc", "(", "'isReadReceiptRequested'", ")", ":", "self", ".", "is_read_receipt_requested", ",", "cc", "(", "'isDeliveryReceiptRequested'", ")", ":", "self", ".", "is_delivery_receipt_requested", ",", "}", "if", "self", ".", "to", ":", "message", "[", "cc", "(", "'toRecipients'", ")", "]", "=", "[", "self", ".", "_recipient_to_cloud", "(", "recipient", ")", "for", "recipient", "in", "self", ".", "to", "]", "if", "self", ".", "cc", ":", "message", "[", "cc", "(", "'ccRecipients'", ")", "]", "=", "[", "self", ".", "_recipient_to_cloud", "(", "recipient", ")", "for", "recipient", "in", "self", ".", "cc", "]", "if", "self", ".", "bcc", ":", "message", "[", "cc", "(", "'bccRecipients'", ")", "]", "=", "[", "self", ".", "_recipient_to_cloud", "(", "recipient", ")", "for", "recipient", "in", "self", ".", "bcc", "]", "if", "self", ".", "reply_to", ":", "message", "[", "cc", "(", "'replyTo'", ")", "]", "=", "[", "self", ".", "_recipient_to_cloud", "(", "recipient", ")", "for", "recipient", "in", "self", ".", "reply_to", "]", "if", "self", ".", "attachments", ":", "message", "[", "cc", "(", "'attachments'", ")", "]", "=", "self", ".", "attachments", ".", "to_api_data", "(", ")", "if", "self", ".", "sender", "and", "self", ".", "sender", ".", "address", ":", "message", "[", "cc", "(", "'from'", ")", "]", "=", "self", ".", "_recipient_to_cloud", "(", "self", ".", "sender", ")", "if", "self", ".", "categories", "or", "'categories'", "in", "(", "restrict_keys", "or", "{", "}", ")", ":", "message", "[", "cc", "(", "'categories'", ")", "]", "=", "self", ".", "categories", "if", "self", ".", "object_id", "and", "not", "self", ".", "__is_draft", ":", "# return the whole signature of this message", "message", "[", "cc", "(", "'id'", ")", "]", "=", "self", ".", "object_id", "if", "self", ".", "created", ":", "message", "[", "cc", "(", "'createdDateTime'", ")", "]", "=", "self", ".", "created", ".", "astimezone", "(", "pytz", ".", "utc", ")", ".", "isoformat", "(", ")", "if", "self", ".", "received", ":", "message", "[", "cc", "(", "'receivedDateTime'", ")", "]", "=", "self", ".", "received", ".", "astimezone", "(", "pytz", ".", "utc", ")", ".", "isoformat", "(", ")", "if", "self", ".", "sent", ":", "message", "[", "cc", "(", "'sentDateTime'", ")", "]", "=", "self", ".", "sent", ".", "astimezone", "(", "pytz", ".", "utc", ")", ".", "isoformat", "(", ")", "message", "[", "cc", "(", "'hasAttachments'", ")", "]", "=", "bool", "(", "self", ".", "attachments", ")", "message", "[", "cc", "(", "'isRead'", ")", "]", "=", "self", ".", "is_read", "message", "[", "cc", "(", "'isDraft'", ")", "]", "=", "self", ".", "__is_draft", "message", "[", "cc", "(", "'conversationId'", ")", "]", "=", "self", ".", "conversation_id", "# this property does not form part of the message itself", "message", "[", "cc", "(", "'parentFolderId'", ")", "]", "=", "self", ".", "folder_id", "if", "restrict_keys", ":", "for", "key", "in", "list", "(", "message", ".", "keys", "(", ")", ")", ":", "if", "key", "not", "in", "restrict_keys", ":", "del", "message", "[", "key", "]", "return", "message" ]
Returns a dict representation of this message prepared to be send to the cloud :param restrict_keys: a set of keys to restrict the returned data to :type restrict_keys: dict or set :return: converted to cloud based keys :rtype: dict
[ "Returns", "a", "dict", "representation", "of", "this", "message", "prepared", "to", "be", "send", "to", "the", "cloud" ]
train
https://github.com/O365/python-o365/blob/02a71cf3775cc6a3c042e003365d6a07c8c75a73/O365/message.py#L504-L573
0.000998
Asana/python-asana
asana/session.py
AsanaOAuth2Session.fetch_token
def fetch_token(self, **kwargs): """Exchange a code (and 'state' token) for a bearer token""" return super(AsanaOAuth2Session, self).fetch_token(self.token_url, client_secret=self.client_secret, **kwargs)
python
def fetch_token(self, **kwargs): """Exchange a code (and 'state' token) for a bearer token""" return super(AsanaOAuth2Session, self).fetch_token(self.token_url, client_secret=self.client_secret, **kwargs)
[ "def", "fetch_token", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "super", "(", "AsanaOAuth2Session", ",", "self", ")", ".", "fetch_token", "(", "self", ".", "token_url", ",", "client_secret", "=", "self", ".", "client_secret", ",", "*", "*", "kwargs", ")" ]
Exchange a code (and 'state' token) for a bearer token
[ "Exchange", "a", "code", "(", "and", "state", "token", ")", "for", "a", "bearer", "token" ]
train
https://github.com/Asana/python-asana/blob/6deb7a34495db23f44858e53b6bb2c9eccff7872/asana/session.py#L18-L20
0.013636
ClimateImpactLab/DataFS
datafs/config/constructor.py
APIConstructor._generate_service
def _generate_service(service_config): ''' Generate a service from a service_config dictionary Parameters ---------- service_config : dict Configuration with keys service, args, and kwargs used to generate a new fs service object Returns ------- service : object fs service object initialized with *args, **kwargs Examples -------- Generate a temporary filesystem (no arguments required): .. code-block:: python >>> tmp = APIConstructor._generate_service( ... {'service': 'TempFS'}) ... >>> from fs.tempfs import TempFS >>> assert isinstance(tmp, TempFS) >>> import os >>> assert os.path.isdir(tmp.getsyspath('/')) >>> tmp.close() Generate a system filesystem in a temporary directory: .. code-block:: python >>> import tempfile >>> tempdir = tempfile.mkdtemp() >>> local = APIConstructor._generate_service( ... { ... 'service': 'OSFS', ... 'args': [tempdir] ... }) ... >>> from fs.osfs import OSFS >>> assert isinstance(local, OSFS) >>> import os >>> assert os.path.isdir(local.getsyspath('/')) >>> local.close() >>> import shutil >>> shutil.rmtree(tempdir) Mock an S3 filesystem with moto: .. code-block:: python >>> import moto >>> m = moto.mock_s3() >>> m.start() >>> s3 = APIConstructor._generate_service( ... { ... 'service': 'S3FS', ... 'args': ['bucket-name'], ... 'kwargs': { ... 'aws_access_key':'MY_KEY', ... 'aws_secret_key':'MY_SECRET_KEY' ... } ... }) ... >>> from fs.s3fs import S3FS >>> assert isinstance(s3, S3FS) >>> m.stop() ''' filesystems = [] for _, modname, _ in pkgutil.iter_modules(fs.__path__): if modname.endswith('fs'): filesystems.append(modname) service_mod_name = service_config['service'].lower() assert_msg = 'Filesystem "{}" not found in pyFilesystem {}'.format( service_mod_name, fs.__version__) assert service_mod_name in filesystems, assert_msg svc_module = importlib.import_module('fs.{}'.format(service_mod_name)) svc_class = svc_module.__dict__[service_config['service']] service = svc_class(*service_config.get('args', []), **service_config.get('kwargs', {})) return service
python
def _generate_service(service_config): ''' Generate a service from a service_config dictionary Parameters ---------- service_config : dict Configuration with keys service, args, and kwargs used to generate a new fs service object Returns ------- service : object fs service object initialized with *args, **kwargs Examples -------- Generate a temporary filesystem (no arguments required): .. code-block:: python >>> tmp = APIConstructor._generate_service( ... {'service': 'TempFS'}) ... >>> from fs.tempfs import TempFS >>> assert isinstance(tmp, TempFS) >>> import os >>> assert os.path.isdir(tmp.getsyspath('/')) >>> tmp.close() Generate a system filesystem in a temporary directory: .. code-block:: python >>> import tempfile >>> tempdir = tempfile.mkdtemp() >>> local = APIConstructor._generate_service( ... { ... 'service': 'OSFS', ... 'args': [tempdir] ... }) ... >>> from fs.osfs import OSFS >>> assert isinstance(local, OSFS) >>> import os >>> assert os.path.isdir(local.getsyspath('/')) >>> local.close() >>> import shutil >>> shutil.rmtree(tempdir) Mock an S3 filesystem with moto: .. code-block:: python >>> import moto >>> m = moto.mock_s3() >>> m.start() >>> s3 = APIConstructor._generate_service( ... { ... 'service': 'S3FS', ... 'args': ['bucket-name'], ... 'kwargs': { ... 'aws_access_key':'MY_KEY', ... 'aws_secret_key':'MY_SECRET_KEY' ... } ... }) ... >>> from fs.s3fs import S3FS >>> assert isinstance(s3, S3FS) >>> m.stop() ''' filesystems = [] for _, modname, _ in pkgutil.iter_modules(fs.__path__): if modname.endswith('fs'): filesystems.append(modname) service_mod_name = service_config['service'].lower() assert_msg = 'Filesystem "{}" not found in pyFilesystem {}'.format( service_mod_name, fs.__version__) assert service_mod_name in filesystems, assert_msg svc_module = importlib.import_module('fs.{}'.format(service_mod_name)) svc_class = svc_module.__dict__[service_config['service']] service = svc_class(*service_config.get('args', []), **service_config.get('kwargs', {})) return service
[ "def", "_generate_service", "(", "service_config", ")", ":", "filesystems", "=", "[", "]", "for", "_", ",", "modname", ",", "_", "in", "pkgutil", ".", "iter_modules", "(", "fs", ".", "__path__", ")", ":", "if", "modname", ".", "endswith", "(", "'fs'", ")", ":", "filesystems", ".", "append", "(", "modname", ")", "service_mod_name", "=", "service_config", "[", "'service'", "]", ".", "lower", "(", ")", "assert_msg", "=", "'Filesystem \"{}\" not found in pyFilesystem {}'", ".", "format", "(", "service_mod_name", ",", "fs", ".", "__version__", ")", "assert", "service_mod_name", "in", "filesystems", ",", "assert_msg", "svc_module", "=", "importlib", ".", "import_module", "(", "'fs.{}'", ".", "format", "(", "service_mod_name", ")", ")", "svc_class", "=", "svc_module", ".", "__dict__", "[", "service_config", "[", "'service'", "]", "]", "service", "=", "svc_class", "(", "*", "service_config", ".", "get", "(", "'args'", ",", "[", "]", ")", ",", "*", "*", "service_config", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "return", "service" ]
Generate a service from a service_config dictionary Parameters ---------- service_config : dict Configuration with keys service, args, and kwargs used to generate a new fs service object Returns ------- service : object fs service object initialized with *args, **kwargs Examples -------- Generate a temporary filesystem (no arguments required): .. code-block:: python >>> tmp = APIConstructor._generate_service( ... {'service': 'TempFS'}) ... >>> from fs.tempfs import TempFS >>> assert isinstance(tmp, TempFS) >>> import os >>> assert os.path.isdir(tmp.getsyspath('/')) >>> tmp.close() Generate a system filesystem in a temporary directory: .. code-block:: python >>> import tempfile >>> tempdir = tempfile.mkdtemp() >>> local = APIConstructor._generate_service( ... { ... 'service': 'OSFS', ... 'args': [tempdir] ... }) ... >>> from fs.osfs import OSFS >>> assert isinstance(local, OSFS) >>> import os >>> assert os.path.isdir(local.getsyspath('/')) >>> local.close() >>> import shutil >>> shutil.rmtree(tempdir) Mock an S3 filesystem with moto: .. code-block:: python >>> import moto >>> m = moto.mock_s3() >>> m.start() >>> s3 = APIConstructor._generate_service( ... { ... 'service': 'S3FS', ... 'args': ['bucket-name'], ... 'kwargs': { ... 'aws_access_key':'MY_KEY', ... 'aws_secret_key':'MY_SECRET_KEY' ... } ... }) ... >>> from fs.s3fs import S3FS >>> assert isinstance(s3, S3FS) >>> m.stop()
[ "Generate", "a", "service", "from", "a", "service_config", "dictionary" ]
train
https://github.com/ClimateImpactLab/DataFS/blob/0d32c2b4e18d300a11b748a552f6adbc3dd8f59d/datafs/config/constructor.py#L125-L224
0.000678
jameslyons/pycipher
pycipher/columnartransposition.py
ColTrans.encipher
def encipher(self,string): """Encipher string using Columnar Transposition cipher according to initialised key. Punctuation and whitespace are removed from the input. Example:: ciphertext = ColTrans('GERMAN').encipher(plaintext) :param string: The string to encipher. :returns: The enciphered string. """ string = self.remove_punctuation(string) ret = '' ind = self.sortind(self.keyword) for i in range(len(self.keyword)): ret += string[ind.index(i)::len(self.keyword)] return ret
python
def encipher(self,string): """Encipher string using Columnar Transposition cipher according to initialised key. Punctuation and whitespace are removed from the input. Example:: ciphertext = ColTrans('GERMAN').encipher(plaintext) :param string: The string to encipher. :returns: The enciphered string. """ string = self.remove_punctuation(string) ret = '' ind = self.sortind(self.keyword) for i in range(len(self.keyword)): ret += string[ind.index(i)::len(self.keyword)] return ret
[ "def", "encipher", "(", "self", ",", "string", ")", ":", "string", "=", "self", ".", "remove_punctuation", "(", "string", ")", "ret", "=", "''", "ind", "=", "self", ".", "sortind", "(", "self", ".", "keyword", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "keyword", ")", ")", ":", "ret", "+=", "string", "[", "ind", ".", "index", "(", "i", ")", ":", ":", "len", "(", "self", ".", "keyword", ")", "]", "return", "ret" ]
Encipher string using Columnar Transposition cipher according to initialised key. Punctuation and whitespace are removed from the input. Example:: ciphertext = ColTrans('GERMAN').encipher(plaintext) :param string: The string to encipher. :returns: The enciphered string.
[ "Encipher", "string", "using", "Columnar", "Transposition", "cipher", "according", "to", "initialised", "key", ".", "Punctuation", "and", "whitespace", "are", "removed", "from", "the", "input", "." ]
train
https://github.com/jameslyons/pycipher/blob/8f1d7cf3cba4e12171e27d9ce723ad890194de19/pycipher/columnartransposition.py#L32-L48
0.012678
jobovy/galpy
galpy/df/streamgapdf.py
streamgapdf._density_par
def _density_par(self,dangle,tdisrupt=None,approx=True, higherorder=None): """The raw density as a function of parallel angle, approx= use faster method that directly integrates the spline representation""" if higherorder is None: higherorder= self._higherorderTrack if tdisrupt is None: tdisrupt= self._tdisrupt if approx: return self._density_par_approx(dangle,tdisrupt, higherorder=higherorder) else: return integrate.quad(lambda T: numpy.sqrt(self._sortedSigOEig[2])\ *(1+T*T)/(1-T*T)**2.\ *self.pOparapar(T/(1-T*T)\ *numpy.sqrt(self._sortedSigOEig[2])\ +self._meandO,dangle), -1.,1.)[0]
python
def _density_par(self,dangle,tdisrupt=None,approx=True, higherorder=None): """The raw density as a function of parallel angle, approx= use faster method that directly integrates the spline representation""" if higherorder is None: higherorder= self._higherorderTrack if tdisrupt is None: tdisrupt= self._tdisrupt if approx: return self._density_par_approx(dangle,tdisrupt, higherorder=higherorder) else: return integrate.quad(lambda T: numpy.sqrt(self._sortedSigOEig[2])\ *(1+T*T)/(1-T*T)**2.\ *self.pOparapar(T/(1-T*T)\ *numpy.sqrt(self._sortedSigOEig[2])\ +self._meandO,dangle), -1.,1.)[0]
[ "def", "_density_par", "(", "self", ",", "dangle", ",", "tdisrupt", "=", "None", ",", "approx", "=", "True", ",", "higherorder", "=", "None", ")", ":", "if", "higherorder", "is", "None", ":", "higherorder", "=", "self", ".", "_higherorderTrack", "if", "tdisrupt", "is", "None", ":", "tdisrupt", "=", "self", ".", "_tdisrupt", "if", "approx", ":", "return", "self", ".", "_density_par_approx", "(", "dangle", ",", "tdisrupt", ",", "higherorder", "=", "higherorder", ")", "else", ":", "return", "integrate", ".", "quad", "(", "lambda", "T", ":", "numpy", ".", "sqrt", "(", "self", ".", "_sortedSigOEig", "[", "2", "]", ")", "*", "(", "1", "+", "T", "*", "T", ")", "/", "(", "1", "-", "T", "*", "T", ")", "**", "2.", "*", "self", ".", "pOparapar", "(", "T", "/", "(", "1", "-", "T", "*", "T", ")", "*", "numpy", ".", "sqrt", "(", "self", ".", "_sortedSigOEig", "[", "2", "]", ")", "+", "self", ".", "_meandO", ",", "dangle", ")", ",", "-", "1.", ",", "1.", ")", "[", "0", "]" ]
The raw density as a function of parallel angle, approx= use faster method that directly integrates the spline representation
[ "The", "raw", "density", "as", "a", "function", "of", "parallel", "angle", "approx", "=", "use", "faster", "method", "that", "directly", "integrates", "the", "spline", "representation" ]
train
https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamgapdf.py#L225-L241
0.027027
Azure/azure-uamqp-python
uamqp/message.py
BatchMessage._create_batch_message
def _create_batch_message(self): """Create a ~uamqp.message.Message for a value supplied by the data generator. Applies all properties and annotations to the message. :rtype: ~uamqp.message.Message """ return Message(body=[], properties=self.properties, annotations=self.annotations, msg_format=self.batch_format, header=self.header, encoding=self._encoding)
python
def _create_batch_message(self): """Create a ~uamqp.message.Message for a value supplied by the data generator. Applies all properties and annotations to the message. :rtype: ~uamqp.message.Message """ return Message(body=[], properties=self.properties, annotations=self.annotations, msg_format=self.batch_format, header=self.header, encoding=self._encoding)
[ "def", "_create_batch_message", "(", "self", ")", ":", "return", "Message", "(", "body", "=", "[", "]", ",", "properties", "=", "self", ".", "properties", ",", "annotations", "=", "self", ".", "annotations", ",", "msg_format", "=", "self", ".", "batch_format", ",", "header", "=", "self", ".", "header", ",", "encoding", "=", "self", ".", "_encoding", ")" ]
Create a ~uamqp.message.Message for a value supplied by the data generator. Applies all properties and annotations to the message. :rtype: ~uamqp.message.Message
[ "Create", "a", "~uamqp", ".", "message", ".", "Message", "for", "a", "value", "supplied", "by", "the", "data", "generator", ".", "Applies", "all", "properties", "and", "annotations", "to", "the", "message", "." ]
train
https://github.com/Azure/azure-uamqp-python/blob/b67e4fcaf2e8a337636947523570239c10a58ae2/uamqp/message.py#L439-L450
0.003891
Galarzaa90/tibia.py
tibiapy/guild.py
Guild._parse_guild_members
def _parse_guild_members(self, parsed_content): """ Parses the guild's member and invited list. Parameters ---------- parsed_content: :class:`bs4.Tag` The parsed content of the guild's page """ member_rows = parsed_content.find_all("tr", {'bgcolor': ["#D4C0A1", "#F1E0C6"]}) previous_rank = {} for row in member_rows: columns = row.find_all('td') values = tuple(c.text.replace("\u00a0", " ") for c in columns) if len(columns) == COLS_GUILD_MEMBER: self._parse_current_member(previous_rank, values) if len(columns) == COLS_INVITED_MEMBER: self._parse_invited_member(values)
python
def _parse_guild_members(self, parsed_content): """ Parses the guild's member and invited list. Parameters ---------- parsed_content: :class:`bs4.Tag` The parsed content of the guild's page """ member_rows = parsed_content.find_all("tr", {'bgcolor': ["#D4C0A1", "#F1E0C6"]}) previous_rank = {} for row in member_rows: columns = row.find_all('td') values = tuple(c.text.replace("\u00a0", " ") for c in columns) if len(columns) == COLS_GUILD_MEMBER: self._parse_current_member(previous_rank, values) if len(columns) == COLS_INVITED_MEMBER: self._parse_invited_member(values)
[ "def", "_parse_guild_members", "(", "self", ",", "parsed_content", ")", ":", "member_rows", "=", "parsed_content", ".", "find_all", "(", "\"tr\"", ",", "{", "'bgcolor'", ":", "[", "\"#D4C0A1\"", ",", "\"#F1E0C6\"", "]", "}", ")", "previous_rank", "=", "{", "}", "for", "row", "in", "member_rows", ":", "columns", "=", "row", ".", "find_all", "(", "'td'", ")", "values", "=", "tuple", "(", "c", ".", "text", ".", "replace", "(", "\"\\u00a0\"", ",", "\" \"", ")", "for", "c", "in", "columns", ")", "if", "len", "(", "columns", ")", "==", "COLS_GUILD_MEMBER", ":", "self", ".", "_parse_current_member", "(", "previous_rank", ",", "values", ")", "if", "len", "(", "columns", ")", "==", "COLS_INVITED_MEMBER", ":", "self", ".", "_parse_invited_member", "(", "values", ")" ]
Parses the guild's member and invited list. Parameters ---------- parsed_content: :class:`bs4.Tag` The parsed content of the guild's page
[ "Parses", "the", "guild", "s", "member", "and", "invited", "list", "." ]
train
https://github.com/Galarzaa90/tibia.py/blob/02ba1a8f1e18177ef5c7dcd44affc8d761d59e12/tibiapy/guild.py#L326-L343
0.004071
google/transitfeed
transitfeed/route.py
Route.GetPatternIdTripDict
def GetPatternIdTripDict(self): """Return a dictionary that maps pattern_id to a list of Trip objects.""" d = {} for t in self._trips: d.setdefault(t.pattern_id, []).append(t) return d
python
def GetPatternIdTripDict(self): """Return a dictionary that maps pattern_id to a list of Trip objects.""" d = {} for t in self._trips: d.setdefault(t.pattern_id, []).append(t) return d
[ "def", "GetPatternIdTripDict", "(", "self", ")", ":", "d", "=", "{", "}", "for", "t", "in", "self", ".", "_trips", ":", "d", ".", "setdefault", "(", "t", ".", "pattern_id", ",", "[", "]", ")", ".", "append", "(", "t", ")", "return", "d" ]
Return a dictionary that maps pattern_id to a list of Trip objects.
[ "Return", "a", "dictionary", "that", "maps", "pattern_id", "to", "a", "list", "of", "Trip", "objects", "." ]
train
https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/route.py#L113-L118
0.009709
tarmstrong/nbdiff
nbdiff/notebook_parser.py
NotebookParser.parse
def parse(self, json_data): """Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure. """ data = current.read(json_data, 'ipynb') json_data.close() return data
python
def parse(self, json_data): """Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure. """ data = current.read(json_data, 'ipynb') json_data.close() return data
[ "def", "parse", "(", "self", ",", "json_data", ")", ":", "data", "=", "current", ".", "read", "(", "json_data", ",", "'ipynb'", ")", "json_data", ".", "close", "(", ")", "return", "data" ]
Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure.
[ "Parse", "a", "notebook", ".", "ipynb", "file", "." ]
train
https://github.com/tarmstrong/nbdiff/blob/3fdfb89f94fc0f4821bc04999ddf53b34d882ab9/nbdiff/notebook_parser.py#L6-L20
0.005464
StackStorm/pybind
pybind/slxos/v17r_2_00/interface/ethernet/__init__.py
ethernet._set_loopback
def _set_loopback(self, v, load=False): """ Setter method for loopback, mapped from YANG variable /interface/ethernet/loopback (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_loopback is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_loopback() directly. YANG Description: This leaf specifies the administratively configured loopback mode on the physical interface """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'phy': {'value': 1}},), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set loopback mode'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """loopback must be of a type compatible with enumeration""", 'defined-type': "brocade-interface:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'phy': {'value': 1}},), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set loopback mode'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)""", }) self.__loopback = t if hasattr(self, '_set'): self._set()
python
def _set_loopback(self, v, load=False): """ Setter method for loopback, mapped from YANG variable /interface/ethernet/loopback (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_loopback is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_loopback() directly. YANG Description: This leaf specifies the administratively configured loopback mode on the physical interface """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'phy': {'value': 1}},), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set loopback mode'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """loopback must be of a type compatible with enumeration""", 'defined-type': "brocade-interface:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'phy': {'value': 1}},), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set loopback mode'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)""", }) self.__loopback = t if hasattr(self, '_set'): self._set()
[ "def", "_set_loopback", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "unicode", ",", "restriction_type", "=", "\"dict_key\"", ",", "restriction_arg", "=", "{", "u'phy'", ":", "{", "'value'", ":", "1", "}", "}", ",", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"loopback\"", ",", "rest_name", "=", "\"loopback\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'cli-full-command'", ":", "None", ",", "u'info'", ":", "u'Set loopback mode'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-interface'", ",", "defining_module", "=", "'brocade-interface'", ",", "yang_type", "=", "'enumeration'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"loopback must be of a type compatible with enumeration\"\"\"", ",", "'defined-type'", ":", "\"brocade-interface:enumeration\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'phy': {'value': 1}},), is_leaf=True, yang_name=\"loopback\", rest_name=\"loopback\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set loopback mode'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__loopback", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for loopback, mapped from YANG variable /interface/ethernet/loopback (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_loopback is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_loopback() directly. YANG Description: This leaf specifies the administratively configured loopback mode on the physical interface
[ "Setter", "method", "for", "loopback", "mapped", "from", "YANG", "variable", "/", "interface", "/", "ethernet", "/", "loopback", "(", "enumeration", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_loopback", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_loopback", "()", "directly", "." ]
train
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17r_2_00/interface/ethernet/__init__.py#L270-L294
0.004861
ska-sa/kittens
Kittens/pixmaps.py
QPixmapWrapper.pm
def pm(self): """Get QPixmap from wrapper""" if self._pm is None: self._pm = QPixmap(self._xpmstr) return self._pm
python
def pm(self): """Get QPixmap from wrapper""" if self._pm is None: self._pm = QPixmap(self._xpmstr) return self._pm
[ "def", "pm", "(", "self", ")", ":", "if", "self", ".", "_pm", "is", "None", ":", "self", ".", "_pm", "=", "QPixmap", "(", "self", ".", "_xpmstr", ")", "return", "self", ".", "_pm" ]
Get QPixmap from wrapper
[ "Get", "QPixmap", "from", "wrapper" ]
train
https://github.com/ska-sa/kittens/blob/92058e065ddffa5d00a44749145a6f917e0f31dc/Kittens/pixmaps.py#L94-L98
0.013333
chakki-works/seqeval
seqeval/metrics/sequence_labeling.py
recall_score
def recall_score(y_true, y_pred, average='micro', suffix=False): """Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samples. The best value is 1 and the worst value is 0. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import recall_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> recall_score(y_true, y_pred) 0.50 """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_true = len(true_entities) score = nb_correct / nb_true if nb_true > 0 else 0 return score
python
def recall_score(y_true, y_pred, average='micro', suffix=False): """Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samples. The best value is 1 and the worst value is 0. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import recall_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> recall_score(y_true, y_pred) 0.50 """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_true = len(true_entities) score = nb_correct / nb_true if nb_true > 0 else 0 return score
[ "def", "recall_score", "(", "y_true", ",", "y_pred", ",", "average", "=", "'micro'", ",", "suffix", "=", "False", ")", ":", "true_entities", "=", "set", "(", "get_entities", "(", "y_true", ",", "suffix", ")", ")", "pred_entities", "=", "set", "(", "get_entities", "(", "y_pred", ",", "suffix", ")", ")", "nb_correct", "=", "len", "(", "true_entities", "&", "pred_entities", ")", "nb_true", "=", "len", "(", "true_entities", ")", "score", "=", "nb_correct", "/", "nb_true", "if", "nb_true", ">", "0", "else", "0", "return", "score" ]
Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samples. The best value is 1 and the worst value is 0. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import recall_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> recall_score(y_true, y_pred) 0.50
[ "Compute", "the", "recall", "." ]
train
https://github.com/chakki-works/seqeval/blob/f1e5ff1a94da11500c47fd11d4d72617f7f55911/seqeval/metrics/sequence_labeling.py#L221-L252
0.002591
dinoboff/schemabuilder
src/schemabuilder/schema.py
Ref.validate
def validate(self, data): """Validate the data against the schema. """ validator = self._schema.validator(self._id) validator.validate(data)
python
def validate(self, data): """Validate the data against the schema. """ validator = self._schema.validator(self._id) validator.validate(data)
[ "def", "validate", "(", "self", ",", "data", ")", ":", "validator", "=", "self", ".", "_schema", ".", "validator", "(", "self", ".", "_id", ")", "validator", ".", "validate", "(", "data", ")" ]
Validate the data against the schema.
[ "Validate", "the", "data", "against", "the", "schema", "." ]
train
https://github.com/dinoboff/schemabuilder/blob/9b9f3de2528836ad069f458d3d68b1b5f4efbe94/src/schemabuilder/schema.py#L93-L98
0.011561
praw-dev/prawcore
prawcore/rate_limit.py
RateLimiter.update
def update(self, response_headers): """Update the state of the rate limiter based on the response headers. This method should only be called following a HTTP request to reddit. Response headers that do not contain x-ratelimit fields will be treated as a single request. This behavior is to error on the safe-side as such responses should trigger exceptions that indicate invalid behavior. """ if "x-ratelimit-remaining" not in response_headers: if self.remaining is not None: self.remaining -= 1 self.used += 1 return now = time.time() prev_remaining = self.remaining seconds_to_reset = int(response_headers["x-ratelimit-reset"]) self.remaining = float(response_headers["x-ratelimit-remaining"]) self.used = int(response_headers["x-ratelimit-used"]) self.reset_timestamp = now + seconds_to_reset if self.remaining <= 0: self.next_request_timestamp = self.reset_timestamp return if prev_remaining is not None and prev_remaining > self.remaining: estimated_clients = prev_remaining - self.remaining else: estimated_clients = 1.0 self.next_request_timestamp = min( self.reset_timestamp, now + (estimated_clients * seconds_to_reset / self.remaining), )
python
def update(self, response_headers): """Update the state of the rate limiter based on the response headers. This method should only be called following a HTTP request to reddit. Response headers that do not contain x-ratelimit fields will be treated as a single request. This behavior is to error on the safe-side as such responses should trigger exceptions that indicate invalid behavior. """ if "x-ratelimit-remaining" not in response_headers: if self.remaining is not None: self.remaining -= 1 self.used += 1 return now = time.time() prev_remaining = self.remaining seconds_to_reset = int(response_headers["x-ratelimit-reset"]) self.remaining = float(response_headers["x-ratelimit-remaining"]) self.used = int(response_headers["x-ratelimit-used"]) self.reset_timestamp = now + seconds_to_reset if self.remaining <= 0: self.next_request_timestamp = self.reset_timestamp return if prev_remaining is not None and prev_remaining > self.remaining: estimated_clients = prev_remaining - self.remaining else: estimated_clients = 1.0 self.next_request_timestamp = min( self.reset_timestamp, now + (estimated_clients * seconds_to_reset / self.remaining), )
[ "def", "update", "(", "self", ",", "response_headers", ")", ":", "if", "\"x-ratelimit-remaining\"", "not", "in", "response_headers", ":", "if", "self", ".", "remaining", "is", "not", "None", ":", "self", ".", "remaining", "-=", "1", "self", ".", "used", "+=", "1", "return", "now", "=", "time", ".", "time", "(", ")", "prev_remaining", "=", "self", ".", "remaining", "seconds_to_reset", "=", "int", "(", "response_headers", "[", "\"x-ratelimit-reset\"", "]", ")", "self", ".", "remaining", "=", "float", "(", "response_headers", "[", "\"x-ratelimit-remaining\"", "]", ")", "self", ".", "used", "=", "int", "(", "response_headers", "[", "\"x-ratelimit-used\"", "]", ")", "self", ".", "reset_timestamp", "=", "now", "+", "seconds_to_reset", "if", "self", ".", "remaining", "<=", "0", ":", "self", ".", "next_request_timestamp", "=", "self", ".", "reset_timestamp", "return", "if", "prev_remaining", "is", "not", "None", "and", "prev_remaining", ">", "self", ".", "remaining", ":", "estimated_clients", "=", "prev_remaining", "-", "self", ".", "remaining", "else", ":", "estimated_clients", "=", "1.0", "self", ".", "next_request_timestamp", "=", "min", "(", "self", ".", "reset_timestamp", ",", "now", "+", "(", "estimated_clients", "*", "seconds_to_reset", "/", "self", ".", "remaining", ")", ",", ")" ]
Update the state of the rate limiter based on the response headers. This method should only be called following a HTTP request to reddit. Response headers that do not contain x-ratelimit fields will be treated as a single request. This behavior is to error on the safe-side as such responses should trigger exceptions that indicate invalid behavior.
[ "Update", "the", "state", "of", "the", "rate", "limiter", "based", "on", "the", "response", "headers", "." ]
train
https://github.com/praw-dev/prawcore/blob/b16ae88a1f2bf98095ed6fe64851cb7add7ed752/prawcore/rate_limit.py#L53-L89
0.001401
msiedlarek/wiring
wiring/interface.py
isimplementation
def isimplementation(obj, interfaces): """ Returns `True` if `obj` is a class implementing all of `interfaces` or an instance of such class. `interfaces` can be a single :term:`interface` class or an iterable of interface classes. """ if not inspect.isclass(obj): isimplementation(obj.__class__, interfaces) if not isinstance(interfaces, collections.Iterable): interfaces = [interfaces] return frozenset(interfaces).issubset( get_implemented_interfaces(obj) )
python
def isimplementation(obj, interfaces): """ Returns `True` if `obj` is a class implementing all of `interfaces` or an instance of such class. `interfaces` can be a single :term:`interface` class or an iterable of interface classes. """ if not inspect.isclass(obj): isimplementation(obj.__class__, interfaces) if not isinstance(interfaces, collections.Iterable): interfaces = [interfaces] return frozenset(interfaces).issubset( get_implemented_interfaces(obj) )
[ "def", "isimplementation", "(", "obj", ",", "interfaces", ")", ":", "if", "not", "inspect", ".", "isclass", "(", "obj", ")", ":", "isimplementation", "(", "obj", ".", "__class__", ",", "interfaces", ")", "if", "not", "isinstance", "(", "interfaces", ",", "collections", ".", "Iterable", ")", ":", "interfaces", "=", "[", "interfaces", "]", "return", "frozenset", "(", "interfaces", ")", ".", "issubset", "(", "get_implemented_interfaces", "(", "obj", ")", ")" ]
Returns `True` if `obj` is a class implementing all of `interfaces` or an instance of such class. `interfaces` can be a single :term:`interface` class or an iterable of interface classes.
[ "Returns", "True", "if", "obj", "is", "a", "class", "implementing", "all", "of", "interfaces", "or", "an", "instance", "of", "such", "class", "." ]
train
https://github.com/msiedlarek/wiring/blob/c32165b680356fe9f1e422a1d11127f867065f94/wiring/interface.py#L364-L378
0.001908
ibis-project/ibis
ibis/impala/client.py
ImpalaClient.create_table
def create_table( self, table_name, obj=None, schema=None, database=None, external=False, force=False, # HDFS options format='parquet', location=None, partition=None, like_parquet=None, ): """ Create a new table in Impala using an Ibis table expression. This is currently designed for tables whose data is stored in HDFS (or eventually other filesystems). Parameters ---------- table_name : string obj : TableExpr or pandas.DataFrame, optional If passed, creates table from select statement results schema : ibis.Schema, optional Mutually exclusive with expr, creates an empty table with a particular schema database : string, default None (optional) force : boolean, default False Do not create table if table with indicated name already exists external : boolean, default False Create an external table; Impala will not delete the underlying data when the table is dropped format : {'parquet'} location : string, default None Specify the directory location where Impala reads and writes files for the table partition : list of strings Must pass a schema to use this. Cannot partition from an expression (create-table-as-select) like_parquet : string (HDFS path), optional Can specify in lieu of a schema Examples -------- >>> con.create_table('new_table_name', table_expr) # doctest: +SKIP """ if like_parquet is not None: raise NotImplementedError if obj is not None: if isinstance(obj, pd.DataFrame): from ibis.impala.pandas_interop import write_temp_dataframe writer, to_insert = write_temp_dataframe(self, obj) else: to_insert = obj ast = self._build_ast(to_insert, ImpalaDialect.make_context()) select = ast.queries[0] statement = ddl.CTAS( table_name, select, database=database, can_exist=force, format=format, external=external, partition=partition, path=location, ) elif schema is not None: statement = ddl.CreateTableWithSchema( table_name, schema, database=database, format=format, can_exist=force, external=external, path=location, partition=partition, ) else: raise com.IbisError('Must pass expr or schema') return self._execute(statement)
python
def create_table( self, table_name, obj=None, schema=None, database=None, external=False, force=False, # HDFS options format='parquet', location=None, partition=None, like_parquet=None, ): """ Create a new table in Impala using an Ibis table expression. This is currently designed for tables whose data is stored in HDFS (or eventually other filesystems). Parameters ---------- table_name : string obj : TableExpr or pandas.DataFrame, optional If passed, creates table from select statement results schema : ibis.Schema, optional Mutually exclusive with expr, creates an empty table with a particular schema database : string, default None (optional) force : boolean, default False Do not create table if table with indicated name already exists external : boolean, default False Create an external table; Impala will not delete the underlying data when the table is dropped format : {'parquet'} location : string, default None Specify the directory location where Impala reads and writes files for the table partition : list of strings Must pass a schema to use this. Cannot partition from an expression (create-table-as-select) like_parquet : string (HDFS path), optional Can specify in lieu of a schema Examples -------- >>> con.create_table('new_table_name', table_expr) # doctest: +SKIP """ if like_parquet is not None: raise NotImplementedError if obj is not None: if isinstance(obj, pd.DataFrame): from ibis.impala.pandas_interop import write_temp_dataframe writer, to_insert = write_temp_dataframe(self, obj) else: to_insert = obj ast = self._build_ast(to_insert, ImpalaDialect.make_context()) select = ast.queries[0] statement = ddl.CTAS( table_name, select, database=database, can_exist=force, format=format, external=external, partition=partition, path=location, ) elif schema is not None: statement = ddl.CreateTableWithSchema( table_name, schema, database=database, format=format, can_exist=force, external=external, path=location, partition=partition, ) else: raise com.IbisError('Must pass expr or schema') return self._execute(statement)
[ "def", "create_table", "(", "self", ",", "table_name", ",", "obj", "=", "None", ",", "schema", "=", "None", ",", "database", "=", "None", ",", "external", "=", "False", ",", "force", "=", "False", ",", "# HDFS options", "format", "=", "'parquet'", ",", "location", "=", "None", ",", "partition", "=", "None", ",", "like_parquet", "=", "None", ",", ")", ":", "if", "like_parquet", "is", "not", "None", ":", "raise", "NotImplementedError", "if", "obj", "is", "not", "None", ":", "if", "isinstance", "(", "obj", ",", "pd", ".", "DataFrame", ")", ":", "from", "ibis", ".", "impala", ".", "pandas_interop", "import", "write_temp_dataframe", "writer", ",", "to_insert", "=", "write_temp_dataframe", "(", "self", ",", "obj", ")", "else", ":", "to_insert", "=", "obj", "ast", "=", "self", ".", "_build_ast", "(", "to_insert", ",", "ImpalaDialect", ".", "make_context", "(", ")", ")", "select", "=", "ast", ".", "queries", "[", "0", "]", "statement", "=", "ddl", ".", "CTAS", "(", "table_name", ",", "select", ",", "database", "=", "database", ",", "can_exist", "=", "force", ",", "format", "=", "format", ",", "external", "=", "external", ",", "partition", "=", "partition", ",", "path", "=", "location", ",", ")", "elif", "schema", "is", "not", "None", ":", "statement", "=", "ddl", ".", "CreateTableWithSchema", "(", "table_name", ",", "schema", ",", "database", "=", "database", ",", "format", "=", "format", ",", "can_exist", "=", "force", ",", "external", "=", "external", ",", "path", "=", "location", ",", "partition", "=", "partition", ",", ")", "else", ":", "raise", "com", ".", "IbisError", "(", "'Must pass expr or schema'", ")", "return", "self", ".", "_execute", "(", "statement", ")" ]
Create a new table in Impala using an Ibis table expression. This is currently designed for tables whose data is stored in HDFS (or eventually other filesystems). Parameters ---------- table_name : string obj : TableExpr or pandas.DataFrame, optional If passed, creates table from select statement results schema : ibis.Schema, optional Mutually exclusive with expr, creates an empty table with a particular schema database : string, default None (optional) force : boolean, default False Do not create table if table with indicated name already exists external : boolean, default False Create an external table; Impala will not delete the underlying data when the table is dropped format : {'parquet'} location : string, default None Specify the directory location where Impala reads and writes files for the table partition : list of strings Must pass a schema to use this. Cannot partition from an expression (create-table-as-select) like_parquet : string (HDFS path), optional Can specify in lieu of a schema Examples -------- >>> con.create_table('new_table_name', table_expr) # doctest: +SKIP
[ "Create", "a", "new", "table", "in", "Impala", "using", "an", "Ibis", "table", "expression", ".", "This", "is", "currently", "designed", "for", "tables", "whose", "data", "is", "stored", "in", "HDFS", "(", "or", "eventually", "other", "filesystems", ")", "." ]
train
https://github.com/ibis-project/ibis/blob/1e39a5fd9ef088b45c155e8a5f541767ee8ef2e7/ibis/impala/client.py#L1117-L1201
0.001037
mlavin/django-all-access
allaccess/clients.py
BaseOAuthClient.get_profile_info
def get_profile_info(self, raw_token, profile_info_params={}): "Fetch user profile information." try: response = self.request('get', self.provider.profile_url, token=raw_token, params=profile_info_params) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch user profile: {0}'.format(e)) return None else: return response.json() or response.text
python
def get_profile_info(self, raw_token, profile_info_params={}): "Fetch user profile information." try: response = self.request('get', self.provider.profile_url, token=raw_token, params=profile_info_params) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch user profile: {0}'.format(e)) return None else: return response.json() or response.text
[ "def", "get_profile_info", "(", "self", ",", "raw_token", ",", "profile_info_params", "=", "{", "}", ")", ":", "try", ":", "response", "=", "self", ".", "request", "(", "'get'", ",", "self", ".", "provider", ".", "profile_url", ",", "token", "=", "raw_token", ",", "params", "=", "profile_info_params", ")", "response", ".", "raise_for_status", "(", ")", "except", "RequestException", "as", "e", ":", "logger", ".", "error", "(", "'Unable to fetch user profile: {0}'", ".", "format", "(", "e", ")", ")", "return", "None", "else", ":", "return", "response", ".", "json", "(", ")", "or", "response", ".", "text" ]
Fetch user profile information.
[ "Fetch", "user", "profile", "information", "." ]
train
https://github.com/mlavin/django-all-access/blob/4b15b6c9dedf8080a7c477e0af1142c609ec5598/allaccess/clients.py#L29-L38
0.006356
mitsei/dlkit
dlkit/json_/grading/objects.py
GradeForm.set_input_score_end_range
def set_input_score_end_range(self, score): """Sets the input score start range. arg: score (decimal): the new start range raise: InvalidArgument - ``score`` is invalid raise: NoAccess - ``range`` cannot be modified *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.grading.GradeSystemForm.set_lowest_numeric_score if self.get_input_score_end_range_metadata().is_read_only(): raise errors.NoAccess() try: score = float(score) except ValueError: raise errors.InvalidArgument() if not self._is_valid_decimal(score, self.get_input_score_end_range_metadata()): raise errors.InvalidArgument() self._my_map['inputScoreEndRange'] = score
python
def set_input_score_end_range(self, score): """Sets the input score start range. arg: score (decimal): the new start range raise: InvalidArgument - ``score`` is invalid raise: NoAccess - ``range`` cannot be modified *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.grading.GradeSystemForm.set_lowest_numeric_score if self.get_input_score_end_range_metadata().is_read_only(): raise errors.NoAccess() try: score = float(score) except ValueError: raise errors.InvalidArgument() if not self._is_valid_decimal(score, self.get_input_score_end_range_metadata()): raise errors.InvalidArgument() self._my_map['inputScoreEndRange'] = score
[ "def", "set_input_score_end_range", "(", "self", ",", "score", ")", ":", "# Implemented from template for osid.grading.GradeSystemForm.set_lowest_numeric_score", "if", "self", ".", "get_input_score_end_range_metadata", "(", ")", ".", "is_read_only", "(", ")", ":", "raise", "errors", ".", "NoAccess", "(", ")", "try", ":", "score", "=", "float", "(", "score", ")", "except", "ValueError", ":", "raise", "errors", ".", "InvalidArgument", "(", ")", "if", "not", "self", ".", "_is_valid_decimal", "(", "score", ",", "self", ".", "get_input_score_end_range_metadata", "(", ")", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", ")", "self", ".", "_my_map", "[", "'inputScoreEndRange'", "]", "=", "score" ]
Sets the input score start range. arg: score (decimal): the new start range raise: InvalidArgument - ``score`` is invalid raise: NoAccess - ``range`` cannot be modified *compliance: mandatory -- This method must be implemented.*
[ "Sets", "the", "input", "score", "start", "range", "." ]
train
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/grading/objects.py#L255-L273
0.004808
miguelgrinberg/python-engineio
engineio/async_drivers/gevent_uwsgi.py
uWSGIWebSocket._decode_received
def _decode_received(self, msg): """Returns either bytes or str, depending on message type.""" if not isinstance(msg, six.binary_type): # already decoded - do nothing return msg # only decode from utf-8 if message is not binary data type = six.byte2int(msg[0:1]) if type >= 48: # no binary return msg.decode('utf-8') # binary message, don't try to decode return msg
python
def _decode_received(self, msg): """Returns either bytes or str, depending on message type.""" if not isinstance(msg, six.binary_type): # already decoded - do nothing return msg # only decode from utf-8 if message is not binary data type = six.byte2int(msg[0:1]) if type >= 48: # no binary return msg.decode('utf-8') # binary message, don't try to decode return msg
[ "def", "_decode_received", "(", "self", ",", "msg", ")", ":", "if", "not", "isinstance", "(", "msg", ",", "six", ".", "binary_type", ")", ":", "# already decoded - do nothing", "return", "msg", "# only decode from utf-8 if message is not binary data", "type", "=", "six", ".", "byte2int", "(", "msg", "[", "0", ":", "1", "]", ")", "if", "type", ">=", "48", ":", "# no binary", "return", "msg", ".", "decode", "(", "'utf-8'", ")", "# binary message, don't try to decode", "return", "msg" ]
Returns either bytes or str, depending on message type.
[ "Returns", "either", "bytes", "or", "str", "depending", "on", "message", "type", "." ]
train
https://github.com/miguelgrinberg/python-engineio/blob/261fd67103cb5d9a44369415748e66fdf62de6fb/engineio/async_drivers/gevent_uwsgi.py#L87-L97
0.004367
jaredLunde/vital-tools
vital/debug/__init__.py
Logg.format_message
def format_message(self, message): """ Formats a message with :class:Look """ look = Look(message) return look.pretty(display=False)
python
def format_message(self, message): """ Formats a message with :class:Look """ look = Look(message) return look.pretty(display=False)
[ "def", "format_message", "(", "self", ",", "message", ")", ":", "look", "=", "Look", "(", "message", ")", "return", "look", ".", "pretty", "(", "display", "=", "False", ")" ]
Formats a message with :class:Look
[ "Formats", "a", "message", "with", ":", "class", ":", "Look" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L1804-L1807
0.012821
saltstack/salt
salt/transport/tcp.py
TCPPubServerChannel._publish_daemon
def _publish_daemon(self, **kwargs): ''' Bind to the interface specified in the configuration file ''' salt.utils.process.appendproctitle(self.__class__.__name__) log_queue = kwargs.get('log_queue') if log_queue is not None: salt.log.setup.set_multiprocessing_logging_queue(log_queue) log_queue_level = kwargs.get('log_queue_level') if log_queue_level is not None: salt.log.setup.set_multiprocessing_logging_level(log_queue_level) salt.log.setup.setup_multiprocessing_logging(log_queue) # Check if io_loop was set outside if self.io_loop is None: self.io_loop = tornado.ioloop.IOLoop.current() # Spin up the publisher pub_server = PubServer(self.opts, io_loop=self.io_loop) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) _set_tcp_keepalive(sock, self.opts) sock.setblocking(0) sock.bind((self.opts['interface'], int(self.opts['publish_port']))) sock.listen(self.backlog) # pub_server will take ownership of the socket pub_server.add_socket(sock) # Set up Salt IPC server if self.opts.get('ipc_mode', '') == 'tcp': pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514)) else: pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') pull_sock = salt.transport.ipc.IPCMessageServer( self.opts, pull_uri, io_loop=self.io_loop, payload_handler=pub_server.publish_payload, ) # Securely create socket log.info('Starting the Salt Puller on %s', pull_uri) with salt.utils.files.set_umask(0o177): pull_sock.start() # run forever try: self.io_loop.start() except (KeyboardInterrupt, SystemExit): salt.log.setup.shutdown_multiprocessing_logging()
python
def _publish_daemon(self, **kwargs): ''' Bind to the interface specified in the configuration file ''' salt.utils.process.appendproctitle(self.__class__.__name__) log_queue = kwargs.get('log_queue') if log_queue is not None: salt.log.setup.set_multiprocessing_logging_queue(log_queue) log_queue_level = kwargs.get('log_queue_level') if log_queue_level is not None: salt.log.setup.set_multiprocessing_logging_level(log_queue_level) salt.log.setup.setup_multiprocessing_logging(log_queue) # Check if io_loop was set outside if self.io_loop is None: self.io_loop = tornado.ioloop.IOLoop.current() # Spin up the publisher pub_server = PubServer(self.opts, io_loop=self.io_loop) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) _set_tcp_keepalive(sock, self.opts) sock.setblocking(0) sock.bind((self.opts['interface'], int(self.opts['publish_port']))) sock.listen(self.backlog) # pub_server will take ownership of the socket pub_server.add_socket(sock) # Set up Salt IPC server if self.opts.get('ipc_mode', '') == 'tcp': pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514)) else: pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') pull_sock = salt.transport.ipc.IPCMessageServer( self.opts, pull_uri, io_loop=self.io_loop, payload_handler=pub_server.publish_payload, ) # Securely create socket log.info('Starting the Salt Puller on %s', pull_uri) with salt.utils.files.set_umask(0o177): pull_sock.start() # run forever try: self.io_loop.start() except (KeyboardInterrupt, SystemExit): salt.log.setup.shutdown_multiprocessing_logging()
[ "def", "_publish_daemon", "(", "self", ",", "*", "*", "kwargs", ")", ":", "salt", ".", "utils", ".", "process", ".", "appendproctitle", "(", "self", ".", "__class__", ".", "__name__", ")", "log_queue", "=", "kwargs", ".", "get", "(", "'log_queue'", ")", "if", "log_queue", "is", "not", "None", ":", "salt", ".", "log", ".", "setup", ".", "set_multiprocessing_logging_queue", "(", "log_queue", ")", "log_queue_level", "=", "kwargs", ".", "get", "(", "'log_queue_level'", ")", "if", "log_queue_level", "is", "not", "None", ":", "salt", ".", "log", ".", "setup", ".", "set_multiprocessing_logging_level", "(", "log_queue_level", ")", "salt", ".", "log", ".", "setup", ".", "setup_multiprocessing_logging", "(", "log_queue", ")", "# Check if io_loop was set outside", "if", "self", ".", "io_loop", "is", "None", ":", "self", ".", "io_loop", "=", "tornado", ".", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "# Spin up the publisher", "pub_server", "=", "PubServer", "(", "self", ".", "opts", ",", "io_loop", "=", "self", ".", "io_loop", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "_set_tcp_keepalive", "(", "sock", ",", "self", ".", "opts", ")", "sock", ".", "setblocking", "(", "0", ")", "sock", ".", "bind", "(", "(", "self", ".", "opts", "[", "'interface'", "]", ",", "int", "(", "self", ".", "opts", "[", "'publish_port'", "]", ")", ")", ")", "sock", ".", "listen", "(", "self", ".", "backlog", ")", "# pub_server will take ownership of the socket", "pub_server", ".", "add_socket", "(", "sock", ")", "# Set up Salt IPC server", "if", "self", ".", "opts", ".", "get", "(", "'ipc_mode'", ",", "''", ")", "==", "'tcp'", ":", "pull_uri", "=", "int", "(", "self", ".", "opts", ".", "get", "(", "'tcp_master_publish_pull'", ",", "4514", ")", ")", "else", ":", "pull_uri", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'sock_dir'", "]", ",", "'publish_pull.ipc'", ")", "pull_sock", "=", "salt", ".", "transport", ".", "ipc", ".", "IPCMessageServer", "(", "self", ".", "opts", ",", "pull_uri", ",", "io_loop", "=", "self", ".", "io_loop", ",", "payload_handler", "=", "pub_server", ".", "publish_payload", ",", ")", "# Securely create socket", "log", ".", "info", "(", "'Starting the Salt Puller on %s'", ",", "pull_uri", ")", "with", "salt", ".", "utils", ".", "files", ".", "set_umask", "(", "0o177", ")", ":", "pull_sock", ".", "start", "(", ")", "# run forever", "try", ":", "self", ".", "io_loop", ".", "start", "(", ")", "except", "(", "KeyboardInterrupt", ",", "SystemExit", ")", ":", "salt", ".", "log", ".", "setup", ".", "shutdown_multiprocessing_logging", "(", ")" ]
Bind to the interface specified in the configuration file
[ "Bind", "to", "the", "interface", "specified", "in", "the", "configuration", "file" ]
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/transport/tcp.py#L1418-L1469
0.000986
ThreatConnect-Inc/tcex
tcex/tcex_ti_batch.py
TcExBatch.errors
def errors(self, batch_id, halt_on_error=True): """Retrieve Batch errors to ThreatConnect API. .. code-block:: javascript [{ "errorReason": "Incident incident-001 has an invalid status.", "errorSource": "incident-001 is not valid." }, { "errorReason": "Incident incident-002 has an invalid status.", "errorSource":"incident-002 is not valid." }] Args: batch_id (str): The ID returned from the ThreatConnect API for the current batch job. halt_on_error (bool, default:True): If True any exception will raise an error. """ errors = [] try: r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id)) # if r.status_code == 404: # time.sleep(5) # allow time for errors to be processed # r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id)) self.tcex.log.debug( 'Retrieve Errors for ID {}: status code {}, errors {}'.format( batch_id, r.status_code, r.text ) ) # self.tcex.log.debug('Retrieve Errors URL {}'.format(r.url)) # API does not return correct content type if r.ok: errors = json.loads(r.text) # temporarily process errors to find "critical" errors. # FR in core to return error codes. for error in errors: error_reason = error.get('errorReason') for error_msg in self._critical_failures: if re.findall(error_msg, error_reason): self.tcex.handle_error(10500, [error_reason], halt_on_error) return errors except Exception as e: self.tcex.handle_error(560, [e], halt_on_error)
python
def errors(self, batch_id, halt_on_error=True): """Retrieve Batch errors to ThreatConnect API. .. code-block:: javascript [{ "errorReason": "Incident incident-001 has an invalid status.", "errorSource": "incident-001 is not valid." }, { "errorReason": "Incident incident-002 has an invalid status.", "errorSource":"incident-002 is not valid." }] Args: batch_id (str): The ID returned from the ThreatConnect API for the current batch job. halt_on_error (bool, default:True): If True any exception will raise an error. """ errors = [] try: r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id)) # if r.status_code == 404: # time.sleep(5) # allow time for errors to be processed # r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id)) self.tcex.log.debug( 'Retrieve Errors for ID {}: status code {}, errors {}'.format( batch_id, r.status_code, r.text ) ) # self.tcex.log.debug('Retrieve Errors URL {}'.format(r.url)) # API does not return correct content type if r.ok: errors = json.loads(r.text) # temporarily process errors to find "critical" errors. # FR in core to return error codes. for error in errors: error_reason = error.get('errorReason') for error_msg in self._critical_failures: if re.findall(error_msg, error_reason): self.tcex.handle_error(10500, [error_reason], halt_on_error) return errors except Exception as e: self.tcex.handle_error(560, [e], halt_on_error)
[ "def", "errors", "(", "self", ",", "batch_id", ",", "halt_on_error", "=", "True", ")", ":", "errors", "=", "[", "]", "try", ":", "r", "=", "self", ".", "tcex", ".", "session", ".", "get", "(", "'/v2/batch/{}/errors'", ".", "format", "(", "batch_id", ")", ")", "# if r.status_code == 404:", "# time.sleep(5) # allow time for errors to be processed", "# r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id))", "self", ".", "tcex", ".", "log", ".", "debug", "(", "'Retrieve Errors for ID {}: status code {}, errors {}'", ".", "format", "(", "batch_id", ",", "r", ".", "status_code", ",", "r", ".", "text", ")", ")", "# self.tcex.log.debug('Retrieve Errors URL {}'.format(r.url))", "# API does not return correct content type", "if", "r", ".", "ok", ":", "errors", "=", "json", ".", "loads", "(", "r", ".", "text", ")", "# temporarily process errors to find \"critical\" errors.", "# FR in core to return error codes.", "for", "error", "in", "errors", ":", "error_reason", "=", "error", ".", "get", "(", "'errorReason'", ")", "for", "error_msg", "in", "self", ".", "_critical_failures", ":", "if", "re", ".", "findall", "(", "error_msg", ",", "error_reason", ")", ":", "self", ".", "tcex", ".", "handle_error", "(", "10500", ",", "[", "error_reason", "]", ",", "halt_on_error", ")", "return", "errors", "except", "Exception", "as", "e", ":", "self", ".", "tcex", ".", "handle_error", "(", "560", ",", "[", "e", "]", ",", "halt_on_error", ")" ]
Retrieve Batch errors to ThreatConnect API. .. code-block:: javascript [{ "errorReason": "Incident incident-001 has an invalid status.", "errorSource": "incident-001 is not valid." }, { "errorReason": "Incident incident-002 has an invalid status.", "errorSource":"incident-002 is not valid." }] Args: batch_id (str): The ID returned from the ThreatConnect API for the current batch job. halt_on_error (bool, default:True): If True any exception will raise an error.
[ "Retrieve", "Batch", "errors", "to", "ThreatConnect", "API", "." ]
train
https://github.com/ThreatConnect-Inc/tcex/blob/dd4d7a1ef723af1561687120191886b9a2fd4b47/tcex/tcex_ti_batch.py#L657-L698
0.003156
log2timeline/plaso
plaso/parsers/dsv_parser.py
DSVParser._HasExpectedLineLength
def _HasExpectedLineLength(self, file_object): """Determines if a file begins with lines of the expected length. As we know the maximum length of valid lines in the DSV file, the presence of lines longer than this indicates that the file will not be parsed successfully, without reading excessive data from a large file. Args: file_object (dfvfs.FileIO): file-like object. Returns: bool: True if the file has lines of the expected length. """ original_file_position = file_object.tell() line_reader = self._CreateLineReader(file_object) for _ in range(0, 20): # Attempt to read a line that is longer than any line that should be in # the file. sample_line = line_reader.readline(self._maximum_line_length + 1) if len(sample_line) > self._maximum_line_length: file_object.seek(original_file_position) return False file_object.seek(original_file_position) return True
python
def _HasExpectedLineLength(self, file_object): """Determines if a file begins with lines of the expected length. As we know the maximum length of valid lines in the DSV file, the presence of lines longer than this indicates that the file will not be parsed successfully, without reading excessive data from a large file. Args: file_object (dfvfs.FileIO): file-like object. Returns: bool: True if the file has lines of the expected length. """ original_file_position = file_object.tell() line_reader = self._CreateLineReader(file_object) for _ in range(0, 20): # Attempt to read a line that is longer than any line that should be in # the file. sample_line = line_reader.readline(self._maximum_line_length + 1) if len(sample_line) > self._maximum_line_length: file_object.seek(original_file_position) return False file_object.seek(original_file_position) return True
[ "def", "_HasExpectedLineLength", "(", "self", ",", "file_object", ")", ":", "original_file_position", "=", "file_object", ".", "tell", "(", ")", "line_reader", "=", "self", ".", "_CreateLineReader", "(", "file_object", ")", "for", "_", "in", "range", "(", "0", ",", "20", ")", ":", "# Attempt to read a line that is longer than any line that should be in", "# the file.", "sample_line", "=", "line_reader", ".", "readline", "(", "self", ".", "_maximum_line_length", "+", "1", ")", "if", "len", "(", "sample_line", ")", ">", "self", ".", "_maximum_line_length", ":", "file_object", ".", "seek", "(", "original_file_position", ")", "return", "False", "file_object", ".", "seek", "(", "original_file_position", ")", "return", "True" ]
Determines if a file begins with lines of the expected length. As we know the maximum length of valid lines in the DSV file, the presence of lines longer than this indicates that the file will not be parsed successfully, without reading excessive data from a large file. Args: file_object (dfvfs.FileIO): file-like object. Returns: bool: True if the file has lines of the expected length.
[ "Determines", "if", "a", "file", "begins", "with", "lines", "of", "the", "expected", "length", "." ]
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/dsv_parser.py#L168-L191
0.005165
awslabs/aws-sam-cli
samcli/local/docker/lambda_build_container.py
LambdaBuildContainer._convert_to_container_dirs
def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_mapping): """ Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container """ if not host_paths_to_convert: # Nothing to do return host_paths_to_convert # Make sure the key is absolute host path. Relative paths are tricky to work with because two different # relative paths can point to the same directory ("../foo", "../../foo") mapping = {str(pathlib.Path(p).resolve()): v for p, v in host_to_container_path_mapping.items()} result = [] for original_path in host_paths_to_convert: abspath = str(pathlib.Path(original_path).resolve()) if abspath in mapping: result.append(mapping[abspath]) else: result.append(original_path) LOG.debug("Cannot convert host path '%s' to its equivalent path within the container. " "Host path is not mounted within the container", abspath) return result
python
def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_mapping): """ Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container """ if not host_paths_to_convert: # Nothing to do return host_paths_to_convert # Make sure the key is absolute host path. Relative paths are tricky to work with because two different # relative paths can point to the same directory ("../foo", "../../foo") mapping = {str(pathlib.Path(p).resolve()): v for p, v in host_to_container_path_mapping.items()} result = [] for original_path in host_paths_to_convert: abspath = str(pathlib.Path(original_path).resolve()) if abspath in mapping: result.append(mapping[abspath]) else: result.append(original_path) LOG.debug("Cannot convert host path '%s' to its equivalent path within the container. " "Host path is not mounted within the container", abspath) return result
[ "def", "_convert_to_container_dirs", "(", "host_paths_to_convert", ",", "host_to_container_path_mapping", ")", ":", "if", "not", "host_paths_to_convert", ":", "# Nothing to do", "return", "host_paths_to_convert", "# Make sure the key is absolute host path. Relative paths are tricky to work with because two different", "# relative paths can point to the same directory (\"../foo\", \"../../foo\")", "mapping", "=", "{", "str", "(", "pathlib", ".", "Path", "(", "p", ")", ".", "resolve", "(", ")", ")", ":", "v", "for", "p", ",", "v", "in", "host_to_container_path_mapping", ".", "items", "(", ")", "}", "result", "=", "[", "]", "for", "original_path", "in", "host_paths_to_convert", ":", "abspath", "=", "str", "(", "pathlib", ".", "Path", "(", "original_path", ")", ".", "resolve", "(", ")", ")", "if", "abspath", "in", "mapping", ":", "result", ".", "append", "(", "mapping", "[", "abspath", "]", ")", "else", ":", "result", ".", "append", "(", "original_path", ")", "LOG", ".", "debug", "(", "\"Cannot convert host path '%s' to its equivalent path within the container. \"", "\"Host path is not mounted within the container\"", ",", "abspath", ")", "return", "result" ]
Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container
[ "Use", "this", "method", "to", "convert", "a", "list", "of", "host", "paths", "to", "a", "list", "of", "equivalent", "paths", "within", "the", "container", "where", "the", "given", "host", "path", "is", "mounted", ".", "This", "is", "necessary", "when", "SAM", "CLI", "needs", "to", "pass", "path", "information", "to", "the", "Lambda", "Builder", "running", "within", "the", "container", "." ]
train
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_build_container.py#L183-L228
0.00675
stephan-mclean/KickassTorrentsAPI
kat.py
Search.search
def search(self, term=None, category=None, pages=1, url=search_url, sort=None, order=None): """Search a given URL for torrent results.""" if not self.current_url: self.current_url = url if self.current_url == Search.base_url: # Searching home page so no formatting results = self._get_results(self.current_url) self._add_results(results) else: search = self._format_search(term, category) sorting = self._format_sort(sort, order) # Now get the results. for i in range(pages): results = self._get_results(search + "/" + str(self._current_page) + "/" + sorting) self._add_results(results) self._current_page += 1 self._current_page -= 1
python
def search(self, term=None, category=None, pages=1, url=search_url, sort=None, order=None): """Search a given URL for torrent results.""" if not self.current_url: self.current_url = url if self.current_url == Search.base_url: # Searching home page so no formatting results = self._get_results(self.current_url) self._add_results(results) else: search = self._format_search(term, category) sorting = self._format_sort(sort, order) # Now get the results. for i in range(pages): results = self._get_results(search + "/" + str(self._current_page) + "/" + sorting) self._add_results(results) self._current_page += 1 self._current_page -= 1
[ "def", "search", "(", "self", ",", "term", "=", "None", ",", "category", "=", "None", ",", "pages", "=", "1", ",", "url", "=", "search_url", ",", "sort", "=", "None", ",", "order", "=", "None", ")", ":", "if", "not", "self", ".", "current_url", ":", "self", ".", "current_url", "=", "url", "if", "self", ".", "current_url", "==", "Search", ".", "base_url", ":", "# Searching home page so no formatting", "results", "=", "self", ".", "_get_results", "(", "self", ".", "current_url", ")", "self", ".", "_add_results", "(", "results", ")", "else", ":", "search", "=", "self", ".", "_format_search", "(", "term", ",", "category", ")", "sorting", "=", "self", ".", "_format_sort", "(", "sort", ",", "order", ")", "# Now get the results.", "for", "i", "in", "range", "(", "pages", ")", ":", "results", "=", "self", ".", "_get_results", "(", "search", "+", "\"/\"", "+", "str", "(", "self", ".", "_current_page", ")", "+", "\"/\"", "+", "sorting", ")", "self", ".", "_add_results", "(", "results", ")", "self", ".", "_current_page", "+=", "1", "self", ".", "_current_page", "-=", "1" ]
Search a given URL for torrent results.
[ "Search", "a", "given", "URL", "for", "torrent", "results", "." ]
train
https://github.com/stephan-mclean/KickassTorrentsAPI/blob/4d867a090c06ce95b9ed996b48092cb5bfe28bbd/kat.py#L161-L183
0.031429
apache/spark
python/pyspark/sql/conf.py
RuntimeConfig._checkType
def _checkType(self, obj, identifier): """Assert that an object is of type str.""" if not isinstance(obj, basestring): raise TypeError("expected %s '%s' to be a string (was '%s')" % (identifier, obj, type(obj).__name__))
python
def _checkType(self, obj, identifier): """Assert that an object is of type str.""" if not isinstance(obj, basestring): raise TypeError("expected %s '%s' to be a string (was '%s')" % (identifier, obj, type(obj).__name__))
[ "def", "_checkType", "(", "self", ",", "obj", ",", "identifier", ")", ":", "if", "not", "isinstance", "(", "obj", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"expected %s '%s' to be a string (was '%s')\"", "%", "(", "identifier", ",", "obj", ",", "type", "(", "obj", ")", ".", "__name__", ")", ")" ]
Assert that an object is of type str.
[ "Assert", "that", "an", "object", "is", "of", "type", "str", "." ]
train
https://github.com/apache/spark/blob/618d6bff71073c8c93501ab7392c3cc579730f0b/python/pyspark/sql/conf.py#L63-L67
0.007246
redhat-openstack/python-tripleo-helper
tripleohelper/undercloud.py
Undercloud.set_flavor
def set_flavor(self, node, flavor): """Set a flavor to a given ironic node. :param uuid: the ironic node UUID :param flavor: the flavor name """ command = ( 'ironic node-update {uuid} add ' 'properties/capabilities=profile:{flavor},boot_option:local').format( uuid=node.uuid, flavor=flavor) node.flavor = flavor self.add_environment_file(user='stack', filename='stackrc') self.run(command, user='stack')
python
def set_flavor(self, node, flavor): """Set a flavor to a given ironic node. :param uuid: the ironic node UUID :param flavor: the flavor name """ command = ( 'ironic node-update {uuid} add ' 'properties/capabilities=profile:{flavor},boot_option:local').format( uuid=node.uuid, flavor=flavor) node.flavor = flavor self.add_environment_file(user='stack', filename='stackrc') self.run(command, user='stack')
[ "def", "set_flavor", "(", "self", ",", "node", ",", "flavor", ")", ":", "command", "=", "(", "'ironic node-update {uuid} add '", "'properties/capabilities=profile:{flavor},boot_option:local'", ")", ".", "format", "(", "uuid", "=", "node", ".", "uuid", ",", "flavor", "=", "flavor", ")", "node", ".", "flavor", "=", "flavor", "self", ".", "add_environment_file", "(", "user", "=", "'stack'", ",", "filename", "=", "'stackrc'", ")", "self", ".", "run", "(", "command", ",", "user", "=", "'stack'", ")" ]
Set a flavor to a given ironic node. :param uuid: the ironic node UUID :param flavor: the flavor name
[ "Set", "a", "flavor", "to", "a", "given", "ironic", "node", "." ]
train
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/undercloud.py#L206-L219
0.005894
django-leonardo/django-leonardo
leonardo/models.py
register_widgets
def register_widgets(): """ Register all collected widgets from settings WIDGETS = [('mymodule.models.MyWidget', {'mykwargs': 'mykwarg'})] WIDGETS = ['mymodule.models.MyWidget', MyClass] """ # special case # register external apps Page.create_content_type( ApplicationWidget, APPLICATIONS=settings.APPLICATION_CHOICES) for _optgroup, _widgets in six.iteritems(settings.WIDGETS): optgroup = _optgroup if _optgroup != 'ungrouped' else None for widget in _widgets: kwargs = {'optgroup': optgroup} # load class from strings if isinstance(widget, six.string_types): try: WidgetCls = get_class_from_string(widget) except: exc_info = sys.exc_info() raise six.reraise(*exc_info) elif isinstance(widget, tuple): try: WidgetCls = get_class_from_string(widget[0]) if len(widget) > 1: kwargs.update(widget[1]) except Exception as e: raise Exception('%s: %s' % (mod, e)) else: WidgetCls = widget Page.create_content_type( WidgetCls, **kwargs)
python
def register_widgets(): """ Register all collected widgets from settings WIDGETS = [('mymodule.models.MyWidget', {'mykwargs': 'mykwarg'})] WIDGETS = ['mymodule.models.MyWidget', MyClass] """ # special case # register external apps Page.create_content_type( ApplicationWidget, APPLICATIONS=settings.APPLICATION_CHOICES) for _optgroup, _widgets in six.iteritems(settings.WIDGETS): optgroup = _optgroup if _optgroup != 'ungrouped' else None for widget in _widgets: kwargs = {'optgroup': optgroup} # load class from strings if isinstance(widget, six.string_types): try: WidgetCls = get_class_from_string(widget) except: exc_info = sys.exc_info() raise six.reraise(*exc_info) elif isinstance(widget, tuple): try: WidgetCls = get_class_from_string(widget[0]) if len(widget) > 1: kwargs.update(widget[1]) except Exception as e: raise Exception('%s: %s' % (mod, e)) else: WidgetCls = widget Page.create_content_type( WidgetCls, **kwargs)
[ "def", "register_widgets", "(", ")", ":", "# special case", "# register external apps", "Page", ".", "create_content_type", "(", "ApplicationWidget", ",", "APPLICATIONS", "=", "settings", ".", "APPLICATION_CHOICES", ")", "for", "_optgroup", ",", "_widgets", "in", "six", ".", "iteritems", "(", "settings", ".", "WIDGETS", ")", ":", "optgroup", "=", "_optgroup", "if", "_optgroup", "!=", "'ungrouped'", "else", "None", "for", "widget", "in", "_widgets", ":", "kwargs", "=", "{", "'optgroup'", ":", "optgroup", "}", "# load class from strings", "if", "isinstance", "(", "widget", ",", "six", ".", "string_types", ")", ":", "try", ":", "WidgetCls", "=", "get_class_from_string", "(", "widget", ")", "except", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "raise", "six", ".", "reraise", "(", "*", "exc_info", ")", "elif", "isinstance", "(", "widget", ",", "tuple", ")", ":", "try", ":", "WidgetCls", "=", "get_class_from_string", "(", "widget", "[", "0", "]", ")", "if", "len", "(", "widget", ")", ">", "1", ":", "kwargs", ".", "update", "(", "widget", "[", "1", "]", ")", "except", "Exception", "as", "e", ":", "raise", "Exception", "(", "'%s: %s'", "%", "(", "mod", ",", "e", ")", ")", "else", ":", "WidgetCls", "=", "widget", "Page", ".", "create_content_type", "(", "WidgetCls", ",", "*", "*", "kwargs", ")" ]
Register all collected widgets from settings WIDGETS = [('mymodule.models.MyWidget', {'mykwargs': 'mykwarg'})] WIDGETS = ['mymodule.models.MyWidget', MyClass]
[ "Register", "all", "collected", "widgets", "from", "settings", "WIDGETS", "=", "[", "(", "mymodule", ".", "models", ".", "MyWidget", "{", "mykwargs", ":", "mykwarg", "}", ")", "]", "WIDGETS", "=", "[", "mymodule", ".", "models", ".", "MyWidget", "MyClass", "]" ]
train
https://github.com/django-leonardo/django-leonardo/blob/4b933e1792221a13b4028753d5f1d3499b0816d4/leonardo/models.py#L17-L53
0.001531
aacanakin/glim
glim/core.py
Facade.boot
def boot(cls, *args, **kwargs): """ Function creates the instance of accessor with dynamic positional & keyword arguments. Args ---- args (positional arguments): the positional arguments that are passed to the class of accessor. kwargs (keyword arguments): the keyword arguments that are passed to the class of accessor. """ if cls.accessor is not None: if cls.instance is None: cls.instance = cls.accessor(*args, **kwargs)
python
def boot(cls, *args, **kwargs): """ Function creates the instance of accessor with dynamic positional & keyword arguments. Args ---- args (positional arguments): the positional arguments that are passed to the class of accessor. kwargs (keyword arguments): the keyword arguments that are passed to the class of accessor. """ if cls.accessor is not None: if cls.instance is None: cls.instance = cls.accessor(*args, **kwargs)
[ "def", "boot", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "cls", ".", "accessor", "is", "not", "None", ":", "if", "cls", ".", "instance", "is", "None", ":", "cls", ".", "instance", "=", "cls", ".", "accessor", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Function creates the instance of accessor with dynamic positional & keyword arguments. Args ---- args (positional arguments): the positional arguments that are passed to the class of accessor. kwargs (keyword arguments): the keyword arguments that are passed to the class of accessor.
[ "Function", "creates", "the", "instance", "of", "accessor", "with", "dynamic", "positional", "&", "keyword", "arguments", "." ]
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/core.py#L150-L164
0.003623
benley/butcher
butcher/cache.py
CacheManager.get_obj
def get_obj(self, objpath, metahash, dst_path): """Get object from cache, write it to dst_path. Args: objpath: filename relative to buildroot (example: mini-boot/blahblah/somefile.bin) metahash: metahash. See targets/base.py dst_path: Absolute path where the file should be written. Raises: CacheMiss: if the item is not in the cache """ incachepath = self.path_in_cache(objpath, metahash) if not os.path.exists(incachepath): raise CacheMiss('%s not in cache.' % incachepath) else: log.debug('Cache hit! %s~%s', objpath, metahash.hexdigest()) if not os.path.exists(os.path.dirname(dst_path)): os.makedirs(os.path.dirname(dst_path)) os.link(incachepath, dst_path)
python
def get_obj(self, objpath, metahash, dst_path): """Get object from cache, write it to dst_path. Args: objpath: filename relative to buildroot (example: mini-boot/blahblah/somefile.bin) metahash: metahash. See targets/base.py dst_path: Absolute path where the file should be written. Raises: CacheMiss: if the item is not in the cache """ incachepath = self.path_in_cache(objpath, metahash) if not os.path.exists(incachepath): raise CacheMiss('%s not in cache.' % incachepath) else: log.debug('Cache hit! %s~%s', objpath, metahash.hexdigest()) if not os.path.exists(os.path.dirname(dst_path)): os.makedirs(os.path.dirname(dst_path)) os.link(incachepath, dst_path)
[ "def", "get_obj", "(", "self", ",", "objpath", ",", "metahash", ",", "dst_path", ")", ":", "incachepath", "=", "self", ".", "path_in_cache", "(", "objpath", ",", "metahash", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "incachepath", ")", ":", "raise", "CacheMiss", "(", "'%s not in cache.'", "%", "incachepath", ")", "else", ":", "log", ".", "debug", "(", "'Cache hit! %s~%s'", ",", "objpath", ",", "metahash", ".", "hexdigest", "(", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "dirname", "(", "dst_path", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "dst_path", ")", ")", "os", ".", "link", "(", "incachepath", ",", "dst_path", ")" ]
Get object from cache, write it to dst_path. Args: objpath: filename relative to buildroot (example: mini-boot/blahblah/somefile.bin) metahash: metahash. See targets/base.py dst_path: Absolute path where the file should be written. Raises: CacheMiss: if the item is not in the cache
[ "Get", "object", "from", "cache", "write", "it", "to", "dst_path", "." ]
train
https://github.com/benley/butcher/blob/8b18828ea040af56b7835beab5fd03eab23cc9ee/butcher/cache.py#L128-L146
0.002375
fictorial/pygameui
pygameui/imageview.py
view_for_image_named
def view_for_image_named(image_name): """Create an ImageView for the given image.""" image = resource.get_image(image_name) if not image: return None return ImageView(pygame.Rect(0, 0, 0, 0), image)
python
def view_for_image_named(image_name): """Create an ImageView for the given image.""" image = resource.get_image(image_name) if not image: return None return ImageView(pygame.Rect(0, 0, 0, 0), image)
[ "def", "view_for_image_named", "(", "image_name", ")", ":", "image", "=", "resource", ".", "get_image", "(", "image_name", ")", "if", "not", "image", ":", "return", "None", "return", "ImageView", "(", "pygame", ".", "Rect", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "image", ")" ]
Create an ImageView for the given image.
[ "Create", "an", "ImageView", "for", "the", "given", "image", "." ]
train
https://github.com/fictorial/pygameui/blob/af6a35f347d6fafa66c4255bbbe38736d842ff65/pygameui/imageview.py#L64-L72
0.004444
rwl/pylon
pylon/opf.py
OPFModel.get_var
def get_var(self, name): """ Returns the variable set with the given name. """ for var in self.vars: if var.name == name: return var else: raise ValueError
python
def get_var(self, name): """ Returns the variable set with the given name. """ for var in self.vars: if var.name == name: return var else: raise ValueError
[ "def", "get_var", "(", "self", ",", "name", ")", ":", "for", "var", "in", "self", ".", "vars", ":", "if", "var", ".", "name", "==", "name", ":", "return", "var", "else", ":", "raise", "ValueError" ]
Returns the variable set with the given name.
[ "Returns", "the", "variable", "set", "with", "the", "given", "name", "." ]
train
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L750-L757
0.008811
softlayer/softlayer-python
SoftLayer/CLI/object_storage/credential/list.py
cli
def cli(env, identifier): """Retrieve credentials used for generating an AWS signature. Max of 2.""" mgr = SoftLayer.ObjectStorageManager(env.client) credential_list = mgr.list_credential(identifier) table = formatting.Table(['id', 'password', 'username', 'type_name']) for credential in credential_list: table.add_row([ credential['id'], credential['password'], credential['username'], credential['type']['name'] ]) env.fout(table)
python
def cli(env, identifier): """Retrieve credentials used for generating an AWS signature. Max of 2.""" mgr = SoftLayer.ObjectStorageManager(env.client) credential_list = mgr.list_credential(identifier) table = formatting.Table(['id', 'password', 'username', 'type_name']) for credential in credential_list: table.add_row([ credential['id'], credential['password'], credential['username'], credential['type']['name'] ]) env.fout(table)
[ "def", "cli", "(", "env", ",", "identifier", ")", ":", "mgr", "=", "SoftLayer", ".", "ObjectStorageManager", "(", "env", ".", "client", ")", "credential_list", "=", "mgr", ".", "list_credential", "(", "identifier", ")", "table", "=", "formatting", ".", "Table", "(", "[", "'id'", ",", "'password'", ",", "'username'", ",", "'type_name'", "]", ")", "for", "credential", "in", "credential_list", ":", "table", ".", "add_row", "(", "[", "credential", "[", "'id'", "]", ",", "credential", "[", "'password'", "]", ",", "credential", "[", "'username'", "]", ",", "credential", "[", "'type'", "]", "[", "'name'", "]", "]", ")", "env", ".", "fout", "(", "table", ")" ]
Retrieve credentials used for generating an AWS signature. Max of 2.
[ "Retrieve", "credentials", "used", "for", "generating", "an", "AWS", "signature", ".", "Max", "of", "2", "." ]
train
https://github.com/softlayer/softlayer-python/blob/9f181be08cc3668353b05a6de0cb324f52cff6fa/SoftLayer/CLI/object_storage/credential/list.py#L14-L29
0.001912
yyuu/botornado
boto/mturk/connection.py
MTurkConnection._get_pages
def _get_pages(page_size, total_records): """ Given a page size (records per page) and a total number of records, return the page numbers to be retrieved. """ pages = total_records/page_size+bool(total_records%page_size) return range(1, pages+1)
python
def _get_pages(page_size, total_records): """ Given a page size (records per page) and a total number of records, return the page numbers to be retrieved. """ pages = total_records/page_size+bool(total_records%page_size) return range(1, pages+1)
[ "def", "_get_pages", "(", "page_size", ",", "total_records", ")", ":", "pages", "=", "total_records", "/", "page_size", "+", "bool", "(", "total_records", "%", "page_size", ")", "return", "range", "(", "1", ",", "pages", "+", "1", ")" ]
Given a page size (records per page) and a total number of records, return the page numbers to be retrieved.
[ "Given", "a", "page", "size", "(", "records", "per", "page", ")", "and", "a", "total", "number", "of", "records", "return", "the", "page", "numbers", "to", "be", "retrieved", "." ]
train
https://github.com/yyuu/botornado/blob/fffb056f5ff2324d1d5c1304014cfb1d899f602e/boto/mturk/connection.py#L261-L267
0.010239
jupyter-widgets/ipywidgets
ipywidgets/widgets/widget.py
Widget.send_state
def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end, if it exists. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) if len(state) > 0: if self._property_lock: # we need to keep this dict up to date with the front-end values for name, value in state.items(): if name in self._property_lock: self._property_lock[name] = value state, buffer_paths, buffers = _remove_buffers(state) msg = {'method': 'update', 'state': state, 'buffer_paths': buffer_paths} self._send(msg, buffers=buffers)
python
def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end, if it exists. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) if len(state) > 0: if self._property_lock: # we need to keep this dict up to date with the front-end values for name, value in state.items(): if name in self._property_lock: self._property_lock[name] = value state, buffer_paths, buffers = _remove_buffers(state) msg = {'method': 'update', 'state': state, 'buffer_paths': buffer_paths} self._send(msg, buffers=buffers)
[ "def", "send_state", "(", "self", ",", "key", "=", "None", ")", ":", "state", "=", "self", ".", "get_state", "(", "key", "=", "key", ")", "if", "len", "(", "state", ")", ">", "0", ":", "if", "self", ".", "_property_lock", ":", "# we need to keep this dict up to date with the front-end values", "for", "name", ",", "value", "in", "state", ".", "items", "(", ")", ":", "if", "name", "in", "self", ".", "_property_lock", ":", "self", ".", "_property_lock", "[", "name", "]", "=", "value", "state", ",", "buffer_paths", ",", "buffers", "=", "_remove_buffers", "(", "state", ")", "msg", "=", "{", "'method'", ":", "'update'", ",", "'state'", ":", "state", ",", "'buffer_paths'", ":", "buffer_paths", "}", "self", ".", "_send", "(", "msg", ",", "buffers", "=", "buffers", ")" ]
Sends the widget state, or a piece of it, to the front-end, if it exists. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end.
[ "Sends", "the", "widget", "state", "or", "a", "piece", "of", "it", "to", "the", "front", "-", "end", "if", "it", "exists", "." ]
train
https://github.com/jupyter-widgets/ipywidgets/blob/36fe37594cd5a268def228709ca27e37b99ac606/ipywidgets/widgets/widget.py#L472-L488
0.007203
xingjiepan/cylinder_fitting
cylinder_fitting/fitting.py
calc_A_hat
def calc_A_hat(A, S): '''Return the A_hat matrix of A given the skew matrix S''' return np.dot(S, np.dot(A, np.transpose(S)))
python
def calc_A_hat(A, S): '''Return the A_hat matrix of A given the skew matrix S''' return np.dot(S, np.dot(A, np.transpose(S)))
[ "def", "calc_A_hat", "(", "A", ",", "S", ")", ":", "return", "np", ".", "dot", "(", "S", ",", "np", ".", "dot", "(", "A", ",", "np", ".", "transpose", "(", "S", ")", ")", ")" ]
Return the A_hat matrix of A given the skew matrix S
[ "Return", "the", "A_hat", "matrix", "of", "A", "given", "the", "skew", "matrix", "S" ]
train
https://github.com/xingjiepan/cylinder_fitting/blob/f96d79732bc49cbc0cb4b39f008af7ce42aeb213/cylinder_fitting/fitting.py#L26-L28
0.007519
apache/incubator-mxnet
python/mxnet/gluon/parameter.py
Parameter.data
def data(self, ctx=None): """Returns a copy of this parameter on one context. Must have been initialized on this context before. For sparse parameters, use :py:meth:`Parameter.row_sparse_data` instead. Parameters ---------- ctx : Context Desired context. Returns ------- NDArray on ctx """ if self._stype != 'default': raise RuntimeError("Cannot return a copy of Parameter '%s' on ctx %s via data() " \ "because its storage type is %s. Please use row_sparse_data() " \ "instead." % (self.name, str(ctx), self._stype)) return self._check_and_get(self._data, ctx)
python
def data(self, ctx=None): """Returns a copy of this parameter on one context. Must have been initialized on this context before. For sparse parameters, use :py:meth:`Parameter.row_sparse_data` instead. Parameters ---------- ctx : Context Desired context. Returns ------- NDArray on ctx """ if self._stype != 'default': raise RuntimeError("Cannot return a copy of Parameter '%s' on ctx %s via data() " \ "because its storage type is %s. Please use row_sparse_data() " \ "instead." % (self.name, str(ctx), self._stype)) return self._check_and_get(self._data, ctx)
[ "def", "data", "(", "self", ",", "ctx", "=", "None", ")", ":", "if", "self", ".", "_stype", "!=", "'default'", ":", "raise", "RuntimeError", "(", "\"Cannot return a copy of Parameter '%s' on ctx %s via data() \"", "\"because its storage type is %s. Please use row_sparse_data() \"", "\"instead.\"", "%", "(", "self", ".", "name", ",", "str", "(", "ctx", ")", ",", "self", ".", "_stype", ")", ")", "return", "self", ".", "_check_and_get", "(", "self", ".", "_data", ",", "ctx", ")" ]
Returns a copy of this parameter on one context. Must have been initialized on this context before. For sparse parameters, use :py:meth:`Parameter.row_sparse_data` instead. Parameters ---------- ctx : Context Desired context. Returns ------- NDArray on ctx
[ "Returns", "a", "copy", "of", "this", "parameter", "on", "one", "context", ".", "Must", "have", "been", "initialized", "on", "this", "context", "before", ".", "For", "sparse", "parameters", "use", ":", "py", ":", "meth", ":", "Parameter", ".", "row_sparse_data", "instead", "." ]
train
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/gluon/parameter.py#L476-L494
0.008054
dustinmm80/healthy
checks.py
check_license
def check_license(package_info, *args): """ Does the package have a license classifier? :param package_info: package_info dictionary :return: Tuple (is the condition True or False?, reason if it is False else None, score to be applied) """ classifiers = package_info.get('classifiers') reason = "No License" result = False if len([c for c in classifiers if c.startswith('License ::')]) > 0: result = True return result, reason, HAS_LICENSE
python
def check_license(package_info, *args): """ Does the package have a license classifier? :param package_info: package_info dictionary :return: Tuple (is the condition True or False?, reason if it is False else None, score to be applied) """ classifiers = package_info.get('classifiers') reason = "No License" result = False if len([c for c in classifiers if c.startswith('License ::')]) > 0: result = True return result, reason, HAS_LICENSE
[ "def", "check_license", "(", "package_info", ",", "*", "args", ")", ":", "classifiers", "=", "package_info", ".", "get", "(", "'classifiers'", ")", "reason", "=", "\"No License\"", "result", "=", "False", "if", "len", "(", "[", "c", "for", "c", "in", "classifiers", "if", "c", ".", "startswith", "(", "'License ::'", ")", "]", ")", ">", "0", ":", "result", "=", "True", "return", "result", ",", "reason", ",", "HAS_LICENSE" ]
Does the package have a license classifier? :param package_info: package_info dictionary :return: Tuple (is the condition True or False?, reason if it is False else None, score to be applied)
[ "Does", "the", "package", "have", "a", "license", "classifier?", ":", "param", "package_info", ":", "package_info", "dictionary", ":", "return", ":", "Tuple", "(", "is", "the", "condition", "True", "or", "False?", "reason", "if", "it", "is", "False", "else", "None", "score", "to", "be", "applied", ")" ]
train
https://github.com/dustinmm80/healthy/blob/b59016c3f578ca45b6ce857a2d5c4584b8542288/checks.py#L33-L46
0.00409
apple/turicreate
deps/src/libxml2-2.9.1/python/libxml2.py
xmlNode.textMerge
def textMerge(self, second): """Merge two text nodes into one """ if second is None: second__o = None else: second__o = second._o ret = libxml2mod.xmlTextMerge(self._o, second__o) if ret is None:raise treeError('xmlTextMerge() failed') __tmp = xmlNode(_obj=ret) return __tmp
python
def textMerge(self, second): """Merge two text nodes into one """ if second is None: second__o = None else: second__o = second._o ret = libxml2mod.xmlTextMerge(self._o, second__o) if ret is None:raise treeError('xmlTextMerge() failed') __tmp = xmlNode(_obj=ret) return __tmp
[ "def", "textMerge", "(", "self", ",", "second", ")", ":", "if", "second", "is", "None", ":", "second__o", "=", "None", "else", ":", "second__o", "=", "second", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlTextMerge", "(", "self", ".", "_o", ",", "second__o", ")", "if", "ret", "is", "None", ":", "raise", "treeError", "(", "'xmlTextMerge() failed'", ")", "__tmp", "=", "xmlNode", "(", "_obj", "=", "ret", ")", "return", "__tmp" ]
Merge two text nodes into one
[ "Merge", "two", "text", "nodes", "into", "one" ]
train
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/libxml2-2.9.1/python/libxml2.py#L3606-L3613
0.018182
fermiPy/fermipy
fermipy/roi_model.py
ROIModel.load_source
def load_source(self, src, build_index=True, merge_sources=True, **kwargs): """ Load a single source. Parameters ---------- src : `~fermipy.roi_model.Source` Source object that will be added to the ROI. merge_sources : bool When a source matches an existing source in the model update that source with the properties of the new source. build_index : bool Re-make the source index after loading this source. """ src = copy.deepcopy(src) name = src.name.replace(' ', '').lower() min_sep = kwargs.get('min_separation', None) if min_sep is not None: sep = src.skydir.separation(self._src_skydir).deg if len(sep) > 0 and np.min(sep) < min_sep: return match_srcs = self.match_source(src) if len(match_srcs) == 1: # self.logger.debug('Found matching source for %s : %s', # src.name, match_srcs[0].name) if merge_sources: match_srcs[0].update_from_source(src) else: match_srcs[0].add_name(src.name) self._add_source_alias(src.name.replace(' ', '').lower(), match_srcs[0]) return elif len(match_srcs) > 2: raise Exception('Multiple sources with name %s' % name) self._add_source_alias(src.name, src) for name in src.names: self._add_source_alias(name.replace(' ', '').lower(), src) if isinstance(src, Source): self._srcs.append(src) else: self._diffuse_srcs.append(src) if build_index: self._build_src_index()
python
def load_source(self, src, build_index=True, merge_sources=True, **kwargs): """ Load a single source. Parameters ---------- src : `~fermipy.roi_model.Source` Source object that will be added to the ROI. merge_sources : bool When a source matches an existing source in the model update that source with the properties of the new source. build_index : bool Re-make the source index after loading this source. """ src = copy.deepcopy(src) name = src.name.replace(' ', '').lower() min_sep = kwargs.get('min_separation', None) if min_sep is not None: sep = src.skydir.separation(self._src_skydir).deg if len(sep) > 0 and np.min(sep) < min_sep: return match_srcs = self.match_source(src) if len(match_srcs) == 1: # self.logger.debug('Found matching source for %s : %s', # src.name, match_srcs[0].name) if merge_sources: match_srcs[0].update_from_source(src) else: match_srcs[0].add_name(src.name) self._add_source_alias(src.name.replace(' ', '').lower(), match_srcs[0]) return elif len(match_srcs) > 2: raise Exception('Multiple sources with name %s' % name) self._add_source_alias(src.name, src) for name in src.names: self._add_source_alias(name.replace(' ', '').lower(), src) if isinstance(src, Source): self._srcs.append(src) else: self._diffuse_srcs.append(src) if build_index: self._build_src_index()
[ "def", "load_source", "(", "self", ",", "src", ",", "build_index", "=", "True", ",", "merge_sources", "=", "True", ",", "*", "*", "kwargs", ")", ":", "src", "=", "copy", ".", "deepcopy", "(", "src", ")", "name", "=", "src", ".", "name", ".", "replace", "(", "' '", ",", "''", ")", ".", "lower", "(", ")", "min_sep", "=", "kwargs", ".", "get", "(", "'min_separation'", ",", "None", ")", "if", "min_sep", "is", "not", "None", ":", "sep", "=", "src", ".", "skydir", ".", "separation", "(", "self", ".", "_src_skydir", ")", ".", "deg", "if", "len", "(", "sep", ")", ">", "0", "and", "np", ".", "min", "(", "sep", ")", "<", "min_sep", ":", "return", "match_srcs", "=", "self", ".", "match_source", "(", "src", ")", "if", "len", "(", "match_srcs", ")", "==", "1", ":", "# self.logger.debug('Found matching source for %s : %s',", "# src.name, match_srcs[0].name)", "if", "merge_sources", ":", "match_srcs", "[", "0", "]", ".", "update_from_source", "(", "src", ")", "else", ":", "match_srcs", "[", "0", "]", ".", "add_name", "(", "src", ".", "name", ")", "self", ".", "_add_source_alias", "(", "src", ".", "name", ".", "replace", "(", "' '", ",", "''", ")", ".", "lower", "(", ")", ",", "match_srcs", "[", "0", "]", ")", "return", "elif", "len", "(", "match_srcs", ")", ">", "2", ":", "raise", "Exception", "(", "'Multiple sources with name %s'", "%", "name", ")", "self", ".", "_add_source_alias", "(", "src", ".", "name", ",", "src", ")", "for", "name", "in", "src", ".", "names", ":", "self", ".", "_add_source_alias", "(", "name", ".", "replace", "(", "' '", ",", "''", ")", ".", "lower", "(", ")", ",", "src", ")", "if", "isinstance", "(", "src", ",", "Source", ")", ":", "self", ".", "_srcs", ".", "append", "(", "src", ")", "else", ":", "self", ".", "_diffuse_srcs", ".", "append", "(", "src", ")", "if", "build_index", ":", "self", ".", "_build_src_index", "(", ")" ]
Load a single source. Parameters ---------- src : `~fermipy.roi_model.Source` Source object that will be added to the ROI. merge_sources : bool When a source matches an existing source in the model update that source with the properties of the new source. build_index : bool Re-make the source index after loading this source.
[ "Load", "a", "single", "source", "." ]
train
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1575-L1634
0.002769
senaite/senaite.core
bika/lims/api/__init__.py
get_user_properties
def get_user_properties(user_or_username): """Return User Properties :param user_or_username: Plone group identifier :returns: Plone MemberData """ user = get_user(user_or_username) if user is None: return {} if not callable(user.getUser): return {} out = {} plone_user = user.getUser() for sheet in plone_user.listPropertysheets(): ps = plone_user.getPropertysheet(sheet) out.update(dict(ps.propertyItems())) return out
python
def get_user_properties(user_or_username): """Return User Properties :param user_or_username: Plone group identifier :returns: Plone MemberData """ user = get_user(user_or_username) if user is None: return {} if not callable(user.getUser): return {} out = {} plone_user = user.getUser() for sheet in plone_user.listPropertysheets(): ps = plone_user.getPropertysheet(sheet) out.update(dict(ps.propertyItems())) return out
[ "def", "get_user_properties", "(", "user_or_username", ")", ":", "user", "=", "get_user", "(", "user_or_username", ")", "if", "user", "is", "None", ":", "return", "{", "}", "if", "not", "callable", "(", "user", ".", "getUser", ")", ":", "return", "{", "}", "out", "=", "{", "}", "plone_user", "=", "user", ".", "getUser", "(", ")", "for", "sheet", "in", "plone_user", ".", "listPropertysheets", "(", ")", ":", "ps", "=", "plone_user", ".", "getPropertysheet", "(", "sheet", ")", "out", ".", "update", "(", "dict", "(", "ps", ".", "propertyItems", "(", ")", ")", ")", "return", "out" ]
Return User Properties :param user_or_username: Plone group identifier :returns: Plone MemberData
[ "Return", "User", "Properties" ]
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/api/__init__.py#L998-L1014
0.002012
transitland/mapzen-gtfs
mzgtfs/feed.py
Feed.make_zip
def make_zip(self, filename, files=None, path=None, clone=None, compress=True): """Create a Zip archive. Provide any of the following: files - A list of files path - A directory of .txt files clone - Copy any files from a zip archive not specified above Duplicate files will be ignored. The 'files' argument will be used first, then files found in the specified 'path', then in the specified 'clone' archive. """ if filename and os.path.exists(filename): raise IOError('File exists: %s'%filename) files = files or [] arcnames = [] if path and os.path.isdir(path): files += glob.glob(os.path.join(path, '*.txt')) if compress: compress_level = zipfile.ZIP_DEFLATED else: compress_level = zipfile.ZIP_STORED # Write files. self.log("Creating zip archive: %s"%filename) zf = zipfile.ZipFile(filename, 'a', compression=compress_level) for f in files: base = os.path.basename(f) if base in arcnames: self.log('... skipping: %s'%f) else: self.log('... adding: %s'%f) arcnames.append(base) zf.write(f, base) # Clone from existing zip archive. if clone and os.path.exists(clone): zc = zipfile.ZipFile(clone) for f in zc.namelist(): base = os.path.basename(f) if os.path.splitext(base)[-1] != '.txt': pass # self.log('... skipping from clone: %s'%f) elif base in arcnames: self.log('... skipping from clone: %s'%f) else: self.log('... adding from clone: %s'%f) arcnames.append(base) with zc.open(f) as i: data = i.read() zf.writestr(base, data) zf.close()
python
def make_zip(self, filename, files=None, path=None, clone=None, compress=True): """Create a Zip archive. Provide any of the following: files - A list of files path - A directory of .txt files clone - Copy any files from a zip archive not specified above Duplicate files will be ignored. The 'files' argument will be used first, then files found in the specified 'path', then in the specified 'clone' archive. """ if filename and os.path.exists(filename): raise IOError('File exists: %s'%filename) files = files or [] arcnames = [] if path and os.path.isdir(path): files += glob.glob(os.path.join(path, '*.txt')) if compress: compress_level = zipfile.ZIP_DEFLATED else: compress_level = zipfile.ZIP_STORED # Write files. self.log("Creating zip archive: %s"%filename) zf = zipfile.ZipFile(filename, 'a', compression=compress_level) for f in files: base = os.path.basename(f) if base in arcnames: self.log('... skipping: %s'%f) else: self.log('... adding: %s'%f) arcnames.append(base) zf.write(f, base) # Clone from existing zip archive. if clone and os.path.exists(clone): zc = zipfile.ZipFile(clone) for f in zc.namelist(): base = os.path.basename(f) if os.path.splitext(base)[-1] != '.txt': pass # self.log('... skipping from clone: %s'%f) elif base in arcnames: self.log('... skipping from clone: %s'%f) else: self.log('... adding from clone: %s'%f) arcnames.append(base) with zc.open(f) as i: data = i.read() zf.writestr(base, data) zf.close()
[ "def", "make_zip", "(", "self", ",", "filename", ",", "files", "=", "None", ",", "path", "=", "None", ",", "clone", "=", "None", ",", "compress", "=", "True", ")", ":", "if", "filename", "and", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "raise", "IOError", "(", "'File exists: %s'", "%", "filename", ")", "files", "=", "files", "or", "[", "]", "arcnames", "=", "[", "]", "if", "path", "and", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "files", "+=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'*.txt'", ")", ")", "if", "compress", ":", "compress_level", "=", "zipfile", ".", "ZIP_DEFLATED", "else", ":", "compress_level", "=", "zipfile", ".", "ZIP_STORED", "# Write files.", "self", ".", "log", "(", "\"Creating zip archive: %s\"", "%", "filename", ")", "zf", "=", "zipfile", ".", "ZipFile", "(", "filename", ",", "'a'", ",", "compression", "=", "compress_level", ")", "for", "f", "in", "files", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "f", ")", "if", "base", "in", "arcnames", ":", "self", ".", "log", "(", "'... skipping: %s'", "%", "f", ")", "else", ":", "self", ".", "log", "(", "'... adding: %s'", "%", "f", ")", "arcnames", ".", "append", "(", "base", ")", "zf", ".", "write", "(", "f", ",", "base", ")", "# Clone from existing zip archive.", "if", "clone", "and", "os", ".", "path", ".", "exists", "(", "clone", ")", ":", "zc", "=", "zipfile", ".", "ZipFile", "(", "clone", ")", "for", "f", "in", "zc", ".", "namelist", "(", ")", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "f", ")", "if", "os", ".", "path", ".", "splitext", "(", "base", ")", "[", "-", "1", "]", "!=", "'.txt'", ":", "pass", "# self.log('... skipping from clone: %s'%f)", "elif", "base", "in", "arcnames", ":", "self", ".", "log", "(", "'... skipping from clone: %s'", "%", "f", ")", "else", ":", "self", ".", "log", "(", "'... adding from clone: %s'", "%", "f", ")", "arcnames", ".", "append", "(", "base", ")", "with", "zc", ".", "open", "(", "f", ")", "as", "i", ":", "data", "=", "i", ".", "read", "(", ")", "zf", ".", "writestr", "(", "base", ",", "data", ")", "zf", ".", "close", "(", ")" ]
Create a Zip archive. Provide any of the following: files - A list of files path - A directory of .txt files clone - Copy any files from a zip archive not specified above Duplicate files will be ignored. The 'files' argument will be used first, then files found in the specified 'path', then in the specified 'clone' archive.
[ "Create", "a", "Zip", "archive", "." ]
train
https://github.com/transitland/mapzen-gtfs/blob/d445f1588ed10713eea9a1ca2878eef792121eca/mzgtfs/feed.py#L154-L205
0.013196
saltstack/salt
salt/states/vbox_guest.py
additions_installed
def additions_installed(name, reboot=False, upgrade_os=False): ''' Ensure that the VirtualBox Guest Additions are installed. Uses the CD, connected by VirtualBox. name The name has no functional value and is only used as a tracking reference. reboot : False Restart OS to complete installation. upgrade_os : False Upgrade OS (to ensure the latests version of kernel and developer tools installed). ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} current_state = __salt__['vbox_guest.additions_version']() if current_state: ret['result'] = True ret['comment'] = 'System already in the correct state' return ret if __opts__['test']: ret['comment'] = ('The state of VirtualBox Guest Additions will be ' 'changed.') ret['changes'] = { 'old': current_state, 'new': True, } ret['result'] = None return ret new_state = __salt__['vbox_guest.additions_install']( reboot=reboot, upgrade_os=upgrade_os) ret['comment'] = 'The state of VirtualBox Guest Additions was changed!' ret['changes'] = { 'old': current_state, 'new': new_state, } ret['result'] = bool(new_state) return ret
python
def additions_installed(name, reboot=False, upgrade_os=False): ''' Ensure that the VirtualBox Guest Additions are installed. Uses the CD, connected by VirtualBox. name The name has no functional value and is only used as a tracking reference. reboot : False Restart OS to complete installation. upgrade_os : False Upgrade OS (to ensure the latests version of kernel and developer tools installed). ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} current_state = __salt__['vbox_guest.additions_version']() if current_state: ret['result'] = True ret['comment'] = 'System already in the correct state' return ret if __opts__['test']: ret['comment'] = ('The state of VirtualBox Guest Additions will be ' 'changed.') ret['changes'] = { 'old': current_state, 'new': True, } ret['result'] = None return ret new_state = __salt__['vbox_guest.additions_install']( reboot=reboot, upgrade_os=upgrade_os) ret['comment'] = 'The state of VirtualBox Guest Additions was changed!' ret['changes'] = { 'old': current_state, 'new': new_state, } ret['result'] = bool(new_state) return ret
[ "def", "additions_installed", "(", "name", ",", "reboot", "=", "False", ",", "upgrade_os", "=", "False", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "current_state", "=", "__salt__", "[", "'vbox_guest.additions_version'", "]", "(", ")", "if", "current_state", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'System already in the correct state'", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "(", "'The state of VirtualBox Guest Additions will be '", "'changed.'", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_state", ",", "'new'", ":", "True", ",", "}", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "new_state", "=", "__salt__", "[", "'vbox_guest.additions_install'", "]", "(", "reboot", "=", "reboot", ",", "upgrade_os", "=", "upgrade_os", ")", "ret", "[", "'comment'", "]", "=", "'The state of VirtualBox Guest Additions was changed!'", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_state", ",", "'new'", ":", "new_state", ",", "}", "ret", "[", "'result'", "]", "=", "bool", "(", "new_state", ")", "return", "ret" ]
Ensure that the VirtualBox Guest Additions are installed. Uses the CD, connected by VirtualBox. name The name has no functional value and is only used as a tracking reference. reboot : False Restart OS to complete installation. upgrade_os : False Upgrade OS (to ensure the latests version of kernel and developer tools installed).
[ "Ensure", "that", "the", "VirtualBox", "Guest", "Additions", "are", "installed", ".", "Uses", "the", "CD", "connected", "by", "VirtualBox", "." ]
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/vbox_guest.py#L13-L52
0.000746
swevm/scaleio-py
scaleiopy/api/scaleio/cluster/sds.py
Sds.set_sds_name
def set_sds_name(self, name, sdsObj): """ Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object """ # TODO: # Check if object parameters are the correct type, otherwise throw error # UNSURE IF THIS IS CORRECT WAY TO SET SDS NAME self.conn.connection._check_login() sdsNameDict = {'sdsName': name} response = self.conn.connection._do_post("{}/{}{}/{}".format(self.conn.connection._api_url, "instances/Sds::", sdcObj.id, 'action/setSdsName'), json=sdsNameDict) return response
python
def set_sds_name(self, name, sdsObj): """ Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object """ # TODO: # Check if object parameters are the correct type, otherwise throw error # UNSURE IF THIS IS CORRECT WAY TO SET SDS NAME self.conn.connection._check_login() sdsNameDict = {'sdsName': name} response = self.conn.connection._do_post("{}/{}{}/{}".format(self.conn.connection._api_url, "instances/Sds::", sdcObj.id, 'action/setSdsName'), json=sdsNameDict) return response
[ "def", "set_sds_name", "(", "self", ",", "name", ",", "sdsObj", ")", ":", "# TODO:", "# Check if object parameters are the correct type, otherwise throw error", "# UNSURE IF THIS IS CORRECT WAY TO SET SDS NAME", "self", ".", "conn", ".", "connection", ".", "_check_login", "(", ")", "sdsNameDict", "=", "{", "'sdsName'", ":", "name", "}", "response", "=", "self", ".", "conn", ".", "connection", ".", "_do_post", "(", "\"{}/{}{}/{}\"", ".", "format", "(", "self", ".", "conn", ".", "connection", ".", "_api_url", ",", "\"instances/Sds::\"", ",", "sdcObj", ".", "id", ",", "'action/setSdsName'", ")", ",", "json", "=", "sdsNameDict", ")", "return", "response" ]
Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object
[ "Set", "name", "for", "SDS", ":", "param", "name", ":", "Name", "of", "SDS", ":", "param", "sdsObj", ":", "ScaleIO", "SDS", "object", ":", "return", ":", "POST", "request", "response", ":", "rtype", ":", "Requests", "POST", "response", "object" ]
train
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/api/scaleio/cluster/sds.py#L33-L47
0.007342
klingtnet/sblgntparser
sblgntparser/tools.py
particles
def particles(category=None): ''' Returns a dict containing old greek particles grouped by category. ''' filepath = os.path.join(os.path.dirname(__file__), './particles.json') with open(filepath) as f: try: particles = json.load(f) except ValueError as e: log.error('Bad json format in "{}"'.format(filepath)) else: if category: if category in particles: return particles[category] else: log.warn('Category "{}" not contained in particle dictionary!'.format(category)) return particles
python
def particles(category=None): ''' Returns a dict containing old greek particles grouped by category. ''' filepath = os.path.join(os.path.dirname(__file__), './particles.json') with open(filepath) as f: try: particles = json.load(f) except ValueError as e: log.error('Bad json format in "{}"'.format(filepath)) else: if category: if category in particles: return particles[category] else: log.warn('Category "{}" not contained in particle dictionary!'.format(category)) return particles
[ "def", "particles", "(", "category", "=", "None", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'./particles.json'", ")", "with", "open", "(", "filepath", ")", "as", "f", ":", "try", ":", "particles", "=", "json", ".", "load", "(", "f", ")", "except", "ValueError", "as", "e", ":", "log", ".", "error", "(", "'Bad json format in \"{}\"'", ".", "format", "(", "filepath", ")", ")", "else", ":", "if", "category", ":", "if", "category", "in", "particles", ":", "return", "particles", "[", "category", "]", "else", ":", "log", ".", "warn", "(", "'Category \"{}\" not contained in particle dictionary!'", ".", "format", "(", "category", ")", ")", "return", "particles" ]
Returns a dict containing old greek particles grouped by category.
[ "Returns", "a", "dict", "containing", "old", "greek", "particles", "grouped", "by", "category", "." ]
train
https://github.com/klingtnet/sblgntparser/blob/535931a833203e5d9065072ec988c575b493d67f/sblgntparser/tools.py#L23-L39
0.003082
icgood/pymap
pymap/mime/parsed.py
ParsedHeaders.message_id
def message_id(self) -> Optional[UnstructuredHeader]: """The ``Message-Id`` header.""" try: return cast(UnstructuredHeader, self[b'message-id'][0]) except (KeyError, IndexError): return None
python
def message_id(self) -> Optional[UnstructuredHeader]: """The ``Message-Id`` header.""" try: return cast(UnstructuredHeader, self[b'message-id'][0]) except (KeyError, IndexError): return None
[ "def", "message_id", "(", "self", ")", "->", "Optional", "[", "UnstructuredHeader", "]", ":", "try", ":", "return", "cast", "(", "UnstructuredHeader", ",", "self", "[", "b'message-id'", "]", "[", "0", "]", ")", "except", "(", "KeyError", ",", "IndexError", ")", ":", "return", "None" ]
The ``Message-Id`` header.
[ "The", "Message", "-", "Id", "header", "." ]
train
https://github.com/icgood/pymap/blob/e77d9a54d760e3cbe044a548883bb4299ed61dc2/pymap/mime/parsed.py#L143-L148
0.008403
richardkiss/pycoin
pycoin/contrib/msg_signing.py
MessageSigner.pair_for_message_hash
def pair_for_message_hash(self, signature, msg_hash): """ Take a signature, encoded in Base64, and return the pair it was signed by. May raise EncodingError (from _decode_signature) """ # Decode base64 and a bitmask in first byte. is_compressed, recid, r, s = self._decode_signature(signature) # Calculate the specific public key used to sign this message. y_parity = recid & 1 q = self._generator.possible_public_pairs_for_signature(msg_hash, (r, s), y_parity=y_parity)[0] if recid > 1: order = self._generator.order() q = self._generator.Point(q[0] + order, q[1]) return q, is_compressed
python
def pair_for_message_hash(self, signature, msg_hash): """ Take a signature, encoded in Base64, and return the pair it was signed by. May raise EncodingError (from _decode_signature) """ # Decode base64 and a bitmask in first byte. is_compressed, recid, r, s = self._decode_signature(signature) # Calculate the specific public key used to sign this message. y_parity = recid & 1 q = self._generator.possible_public_pairs_for_signature(msg_hash, (r, s), y_parity=y_parity)[0] if recid > 1: order = self._generator.order() q = self._generator.Point(q[0] + order, q[1]) return q, is_compressed
[ "def", "pair_for_message_hash", "(", "self", ",", "signature", ",", "msg_hash", ")", ":", "# Decode base64 and a bitmask in first byte.", "is_compressed", ",", "recid", ",", "r", ",", "s", "=", "self", ".", "_decode_signature", "(", "signature", ")", "# Calculate the specific public key used to sign this message.", "y_parity", "=", "recid", "&", "1", "q", "=", "self", ".", "_generator", ".", "possible_public_pairs_for_signature", "(", "msg_hash", ",", "(", "r", ",", "s", ")", ",", "y_parity", "=", "y_parity", ")", "[", "0", "]", "if", "recid", ">", "1", ":", "order", "=", "self", ".", "_generator", ".", "order", "(", ")", "q", "=", "self", ".", "_generator", ".", "Point", "(", "q", "[", "0", "]", "+", "order", ",", "q", "[", "1", "]", ")", "return", "q", ",", "is_compressed" ]
Take a signature, encoded in Base64, and return the pair it was signed by. May raise EncodingError (from _decode_signature)
[ "Take", "a", "signature", "encoded", "in", "Base64", "and", "return", "the", "pair", "it", "was", "signed", "by", ".", "May", "raise", "EncodingError", "(", "from", "_decode_signature", ")" ]
train
https://github.com/richardkiss/pycoin/blob/1e8d0d9fe20ce0347b97847bb529cd1bd84c7442/pycoin/contrib/msg_signing.py#L139-L154
0.00569
GoogleCloudPlatform/datastore-ndb-python
ndb/model.py
Model._put_async
def _put_async(self, **ctx_options): """Write this entity to Cloud Datastore. This is the asynchronous version of Model._put(). """ if self._projection: raise datastore_errors.BadRequestError('Cannot put a partial entity') from . import tasklets ctx = tasklets.get_context() self._prepare_for_put() if self._key is None: self._key = Key(self._get_kind(), None) self._pre_put_hook() fut = ctx.put(self, **ctx_options) post_hook = self._post_put_hook if not self._is_default_hook(Model._default_post_put_hook, post_hook): fut.add_immediate_callback(post_hook, fut) return fut
python
def _put_async(self, **ctx_options): """Write this entity to Cloud Datastore. This is the asynchronous version of Model._put(). """ if self._projection: raise datastore_errors.BadRequestError('Cannot put a partial entity') from . import tasklets ctx = tasklets.get_context() self._prepare_for_put() if self._key is None: self._key = Key(self._get_kind(), None) self._pre_put_hook() fut = ctx.put(self, **ctx_options) post_hook = self._post_put_hook if not self._is_default_hook(Model._default_post_put_hook, post_hook): fut.add_immediate_callback(post_hook, fut) return fut
[ "def", "_put_async", "(", "self", ",", "*", "*", "ctx_options", ")", ":", "if", "self", ".", "_projection", ":", "raise", "datastore_errors", ".", "BadRequestError", "(", "'Cannot put a partial entity'", ")", "from", ".", "import", "tasklets", "ctx", "=", "tasklets", ".", "get_context", "(", ")", "self", ".", "_prepare_for_put", "(", ")", "if", "self", ".", "_key", "is", "None", ":", "self", ".", "_key", "=", "Key", "(", "self", ".", "_get_kind", "(", ")", ",", "None", ")", "self", ".", "_pre_put_hook", "(", ")", "fut", "=", "ctx", ".", "put", "(", "self", ",", "*", "*", "ctx_options", ")", "post_hook", "=", "self", ".", "_post_put_hook", "if", "not", "self", ".", "_is_default_hook", "(", "Model", ".", "_default_post_put_hook", ",", "post_hook", ")", ":", "fut", ".", "add_immediate_callback", "(", "post_hook", ",", "fut", ")", "return", "fut" ]
Write this entity to Cloud Datastore. This is the asynchronous version of Model._put().
[ "Write", "this", "entity", "to", "Cloud", "Datastore", "." ]
train
https://github.com/GoogleCloudPlatform/datastore-ndb-python/blob/cf4cab3f1f69cd04e1a9229871be466b53729f3f/ndb/model.py#L3461-L3478
0.006211
HdrHistogram/HdrHistogram_py
hdrh/histogram.py
HdrHistogram.record_value
def record_value(self, value, count=1): '''Record a new value into the histogram Args: value: the value to record (must be in the valid range) count: incremental count (defaults to 1) ''' if value < 0: return False counts_index = self._counts_index_for(value) if (counts_index < 0) or (self.counts_len <= counts_index): return False self.counts[counts_index] += count self.total_count += count self.min_value = min(self.min_value, value) self.max_value = max(self.max_value, value) return True
python
def record_value(self, value, count=1): '''Record a new value into the histogram Args: value: the value to record (must be in the valid range) count: incremental count (defaults to 1) ''' if value < 0: return False counts_index = self._counts_index_for(value) if (counts_index < 0) or (self.counts_len <= counts_index): return False self.counts[counts_index] += count self.total_count += count self.min_value = min(self.min_value, value) self.max_value = max(self.max_value, value) return True
[ "def", "record_value", "(", "self", ",", "value", ",", "count", "=", "1", ")", ":", "if", "value", "<", "0", ":", "return", "False", "counts_index", "=", "self", ".", "_counts_index_for", "(", "value", ")", "if", "(", "counts_index", "<", "0", ")", "or", "(", "self", ".", "counts_len", "<=", "counts_index", ")", ":", "return", "False", "self", ".", "counts", "[", "counts_index", "]", "+=", "count", "self", ".", "total_count", "+=", "count", "self", ".", "min_value", "=", "min", "(", "self", ".", "min_value", ",", "value", ")", "self", ".", "max_value", "=", "max", "(", "self", ".", "max_value", ",", "value", ")", "return", "True" ]
Record a new value into the histogram Args: value: the value to record (must be in the valid range) count: incremental count (defaults to 1)
[ "Record", "a", "new", "value", "into", "the", "histogram" ]
train
https://github.com/HdrHistogram/HdrHistogram_py/blob/cb99981b0564a62e1aa02bd764efa6445923f8f7/hdrh/histogram.py#L175-L191
0.003175
ssalentin/plip
plip/modules/preparation.py
PLInteraction.refine_hydrophobic
def refine_hydrophobic(self, all_h, pistacks): """Apply several rules to reduce the number of hydrophobic interactions.""" sel = {} # 1. Rings interacting via stacking can't have additional hydrophobic contacts between each other. for pistack, h in itertools.product(pistacks, all_h): h1, h2 = h.bsatom.idx, h.ligatom.idx brs, lrs = [p1.idx for p1 in pistack.proteinring.atoms], [p2.idx for p2 in pistack.ligandring.atoms] if h1 in brs and h2 in lrs: sel[(h1, h2)] = "EXCLUDE" hydroph = [h for h in all_h if not (h.bsatom.idx, h.ligatom.idx) in sel] sel2 = {} # 2. If a ligand atom interacts with several binding site atoms in the same residue, # keep only the one with the closest distance for h in hydroph: if not (h.ligatom.idx, h.resnr) in sel2: sel2[(h.ligatom.idx, h.resnr)] = h else: if sel2[(h.ligatom.idx, h.resnr)].distance > h.distance: sel2[(h.ligatom.idx, h.resnr)] = h hydroph = [h for h in sel2.values()] hydroph_final = [] bsclust = {} # 3. If a protein atom interacts with several neighboring ligand atoms, just keep the one with the closest dist for h in hydroph: if h.bsatom.idx not in bsclust: bsclust[h.bsatom.idx] = [h, ] else: bsclust[h.bsatom.idx].append(h) idx_to_h = {} for bs in [a for a in bsclust if len(bsclust[a]) == 1]: hydroph_final.append(bsclust[bs][0]) # A list of tuples with the idx of an atom and one of its neighbours is created for bs in [a for a in bsclust if not len(bsclust[a]) == 1]: tuples = [] all_idx = [i.ligatom.idx for i in bsclust[bs]] for b in bsclust[bs]: idx = b.ligatom.idx neigh = [na for na in pybel.ob.OBAtomAtomIter(b.ligatom.OBAtom)] for n in neigh: n_idx = n.GetIdx() if n_idx in all_idx: if n_idx < idx: tuples.append((n_idx, idx)) else: tuples.append((idx, n_idx)) idx_to_h[idx] = b tuples = list(set(tuples)) tuples = sorted(tuples, key=itemgetter(1)) clusters = cluster_doubles(tuples) # Cluster connected atoms (i.e. find hydrophobic patches) for cluster in clusters: min_dist = float('inf') min_h = None for atm_idx in cluster: h = idx_to_h[atm_idx] if h.distance < min_dist: min_dist = h.distance min_h = h hydroph_final.append(min_h) before, reduced = len(all_h), len(hydroph_final) if not before == 0 and not before == reduced: write_message('Reduced number of hydrophobic contacts from %i to %i.\n' % (before, reduced), indent=True) return hydroph_final
python
def refine_hydrophobic(self, all_h, pistacks): """Apply several rules to reduce the number of hydrophobic interactions.""" sel = {} # 1. Rings interacting via stacking can't have additional hydrophobic contacts between each other. for pistack, h in itertools.product(pistacks, all_h): h1, h2 = h.bsatom.idx, h.ligatom.idx brs, lrs = [p1.idx for p1 in pistack.proteinring.atoms], [p2.idx for p2 in pistack.ligandring.atoms] if h1 in brs and h2 in lrs: sel[(h1, h2)] = "EXCLUDE" hydroph = [h for h in all_h if not (h.bsatom.idx, h.ligatom.idx) in sel] sel2 = {} # 2. If a ligand atom interacts with several binding site atoms in the same residue, # keep only the one with the closest distance for h in hydroph: if not (h.ligatom.idx, h.resnr) in sel2: sel2[(h.ligatom.idx, h.resnr)] = h else: if sel2[(h.ligatom.idx, h.resnr)].distance > h.distance: sel2[(h.ligatom.idx, h.resnr)] = h hydroph = [h for h in sel2.values()] hydroph_final = [] bsclust = {} # 3. If a protein atom interacts with several neighboring ligand atoms, just keep the one with the closest dist for h in hydroph: if h.bsatom.idx not in bsclust: bsclust[h.bsatom.idx] = [h, ] else: bsclust[h.bsatom.idx].append(h) idx_to_h = {} for bs in [a for a in bsclust if len(bsclust[a]) == 1]: hydroph_final.append(bsclust[bs][0]) # A list of tuples with the idx of an atom and one of its neighbours is created for bs in [a for a in bsclust if not len(bsclust[a]) == 1]: tuples = [] all_idx = [i.ligatom.idx for i in bsclust[bs]] for b in bsclust[bs]: idx = b.ligatom.idx neigh = [na for na in pybel.ob.OBAtomAtomIter(b.ligatom.OBAtom)] for n in neigh: n_idx = n.GetIdx() if n_idx in all_idx: if n_idx < idx: tuples.append((n_idx, idx)) else: tuples.append((idx, n_idx)) idx_to_h[idx] = b tuples = list(set(tuples)) tuples = sorted(tuples, key=itemgetter(1)) clusters = cluster_doubles(tuples) # Cluster connected atoms (i.e. find hydrophobic patches) for cluster in clusters: min_dist = float('inf') min_h = None for atm_idx in cluster: h = idx_to_h[atm_idx] if h.distance < min_dist: min_dist = h.distance min_h = h hydroph_final.append(min_h) before, reduced = len(all_h), len(hydroph_final) if not before == 0 and not before == reduced: write_message('Reduced number of hydrophobic contacts from %i to %i.\n' % (before, reduced), indent=True) return hydroph_final
[ "def", "refine_hydrophobic", "(", "self", ",", "all_h", ",", "pistacks", ")", ":", "sel", "=", "{", "}", "# 1. Rings interacting via stacking can't have additional hydrophobic contacts between each other.", "for", "pistack", ",", "h", "in", "itertools", ".", "product", "(", "pistacks", ",", "all_h", ")", ":", "h1", ",", "h2", "=", "h", ".", "bsatom", ".", "idx", ",", "h", ".", "ligatom", ".", "idx", "brs", ",", "lrs", "=", "[", "p1", ".", "idx", "for", "p1", "in", "pistack", ".", "proteinring", ".", "atoms", "]", ",", "[", "p2", ".", "idx", "for", "p2", "in", "pistack", ".", "ligandring", ".", "atoms", "]", "if", "h1", "in", "brs", "and", "h2", "in", "lrs", ":", "sel", "[", "(", "h1", ",", "h2", ")", "]", "=", "\"EXCLUDE\"", "hydroph", "=", "[", "h", "for", "h", "in", "all_h", "if", "not", "(", "h", ".", "bsatom", ".", "idx", ",", "h", ".", "ligatom", ".", "idx", ")", "in", "sel", "]", "sel2", "=", "{", "}", "# 2. If a ligand atom interacts with several binding site atoms in the same residue,", "# keep only the one with the closest distance", "for", "h", "in", "hydroph", ":", "if", "not", "(", "h", ".", "ligatom", ".", "idx", ",", "h", ".", "resnr", ")", "in", "sel2", ":", "sel2", "[", "(", "h", ".", "ligatom", ".", "idx", ",", "h", ".", "resnr", ")", "]", "=", "h", "else", ":", "if", "sel2", "[", "(", "h", ".", "ligatom", ".", "idx", ",", "h", ".", "resnr", ")", "]", ".", "distance", ">", "h", ".", "distance", ":", "sel2", "[", "(", "h", ".", "ligatom", ".", "idx", ",", "h", ".", "resnr", ")", "]", "=", "h", "hydroph", "=", "[", "h", "for", "h", "in", "sel2", ".", "values", "(", ")", "]", "hydroph_final", "=", "[", "]", "bsclust", "=", "{", "}", "# 3. If a protein atom interacts with several neighboring ligand atoms, just keep the one with the closest dist", "for", "h", "in", "hydroph", ":", "if", "h", ".", "bsatom", ".", "idx", "not", "in", "bsclust", ":", "bsclust", "[", "h", ".", "bsatom", ".", "idx", "]", "=", "[", "h", ",", "]", "else", ":", "bsclust", "[", "h", ".", "bsatom", ".", "idx", "]", ".", "append", "(", "h", ")", "idx_to_h", "=", "{", "}", "for", "bs", "in", "[", "a", "for", "a", "in", "bsclust", "if", "len", "(", "bsclust", "[", "a", "]", ")", "==", "1", "]", ":", "hydroph_final", ".", "append", "(", "bsclust", "[", "bs", "]", "[", "0", "]", ")", "# A list of tuples with the idx of an atom and one of its neighbours is created", "for", "bs", "in", "[", "a", "for", "a", "in", "bsclust", "if", "not", "len", "(", "bsclust", "[", "a", "]", ")", "==", "1", "]", ":", "tuples", "=", "[", "]", "all_idx", "=", "[", "i", ".", "ligatom", ".", "idx", "for", "i", "in", "bsclust", "[", "bs", "]", "]", "for", "b", "in", "bsclust", "[", "bs", "]", ":", "idx", "=", "b", ".", "ligatom", ".", "idx", "neigh", "=", "[", "na", "for", "na", "in", "pybel", ".", "ob", ".", "OBAtomAtomIter", "(", "b", ".", "ligatom", ".", "OBAtom", ")", "]", "for", "n", "in", "neigh", ":", "n_idx", "=", "n", ".", "GetIdx", "(", ")", "if", "n_idx", "in", "all_idx", ":", "if", "n_idx", "<", "idx", ":", "tuples", ".", "append", "(", "(", "n_idx", ",", "idx", ")", ")", "else", ":", "tuples", ".", "append", "(", "(", "idx", ",", "n_idx", ")", ")", "idx_to_h", "[", "idx", "]", "=", "b", "tuples", "=", "list", "(", "set", "(", "tuples", ")", ")", "tuples", "=", "sorted", "(", "tuples", ",", "key", "=", "itemgetter", "(", "1", ")", ")", "clusters", "=", "cluster_doubles", "(", "tuples", ")", "# Cluster connected atoms (i.e. find hydrophobic patches)", "for", "cluster", "in", "clusters", ":", "min_dist", "=", "float", "(", "'inf'", ")", "min_h", "=", "None", "for", "atm_idx", "in", "cluster", ":", "h", "=", "idx_to_h", "[", "atm_idx", "]", "if", "h", ".", "distance", "<", "min_dist", ":", "min_dist", "=", "h", ".", "distance", "min_h", "=", "h", "hydroph_final", ".", "append", "(", "min_h", ")", "before", ",", "reduced", "=", "len", "(", "all_h", ")", ",", "len", "(", "hydroph_final", ")", "if", "not", "before", "==", "0", "and", "not", "before", "==", "reduced", ":", "write_message", "(", "'Reduced number of hydrophobic contacts from %i to %i.\\n'", "%", "(", "before", ",", "reduced", ")", ",", "indent", "=", "True", ")", "return", "hydroph_final" ]
Apply several rules to reduce the number of hydrophobic interactions.
[ "Apply", "several", "rules", "to", "reduce", "the", "number", "of", "hydrophobic", "interactions", "." ]
train
https://github.com/ssalentin/plip/blob/906c8d36463689779b403f6c2c9ed06174acaf9a/plip/modules/preparation.py#L710-L775
0.003795
yahoo/TensorFlowOnSpark
examples/imagenet/inception/slim/losses.py
l2_regularizer
def l2_regularizer(weight=1.0, scope=None): """Define a L2 regularizer. Args: weight: scale the loss by this factor. scope: Optional scope for name_scope. Returns: a regularizer function. """ def regularizer(tensor): with tf.name_scope(scope, 'L2Regularizer', [tensor]): l2_weight = tf.convert_to_tensor(weight, dtype=tensor.dtype.base_dtype, name='weight') return tf.multiply(l2_weight, tf.nn.l2_loss(tensor), name='value') return regularizer
python
def l2_regularizer(weight=1.0, scope=None): """Define a L2 regularizer. Args: weight: scale the loss by this factor. scope: Optional scope for name_scope. Returns: a regularizer function. """ def regularizer(tensor): with tf.name_scope(scope, 'L2Regularizer', [tensor]): l2_weight = tf.convert_to_tensor(weight, dtype=tensor.dtype.base_dtype, name='weight') return tf.multiply(l2_weight, tf.nn.l2_loss(tensor), name='value') return regularizer
[ "def", "l2_regularizer", "(", "weight", "=", "1.0", ",", "scope", "=", "None", ")", ":", "def", "regularizer", "(", "tensor", ")", ":", "with", "tf", ".", "name_scope", "(", "scope", ",", "'L2Regularizer'", ",", "[", "tensor", "]", ")", ":", "l2_weight", "=", "tf", ".", "convert_to_tensor", "(", "weight", ",", "dtype", "=", "tensor", ".", "dtype", ".", "base_dtype", ",", "name", "=", "'weight'", ")", "return", "tf", ".", "multiply", "(", "l2_weight", ",", "tf", ".", "nn", ".", "l2_loss", "(", "tensor", ")", ",", "name", "=", "'value'", ")", "return", "regularizer" ]
Define a L2 regularizer. Args: weight: scale the loss by this factor. scope: Optional scope for name_scope. Returns: a regularizer function.
[ "Define", "a", "L2", "regularizer", "." ]
train
https://github.com/yahoo/TensorFlowOnSpark/blob/5e4b6c185ab722fd0104ede0377e1149ea8d6f7c/examples/imagenet/inception/slim/losses.py#L56-L72
0.010657
quodlibet/mutagen
mutagen/dsf.py
delete
def delete(filething): """Remove tags from a file. Args: filething (filething) Raises: mutagen.MutagenError """ dsf_file = DSFFile(filething.fileobj) if dsf_file.dsd_chunk.offset_metdata_chunk != 0: id3_location = dsf_file.dsd_chunk.offset_metdata_chunk dsf_file.dsd_chunk.offset_metdata_chunk = 0 dsf_file.dsd_chunk.write() filething.fileobj.seek(id3_location) filething.fileobj.truncate()
python
def delete(filething): """Remove tags from a file. Args: filething (filething) Raises: mutagen.MutagenError """ dsf_file = DSFFile(filething.fileobj) if dsf_file.dsd_chunk.offset_metdata_chunk != 0: id3_location = dsf_file.dsd_chunk.offset_metdata_chunk dsf_file.dsd_chunk.offset_metdata_chunk = 0 dsf_file.dsd_chunk.write() filething.fileobj.seek(id3_location) filething.fileobj.truncate()
[ "def", "delete", "(", "filething", ")", ":", "dsf_file", "=", "DSFFile", "(", "filething", ".", "fileobj", ")", "if", "dsf_file", ".", "dsd_chunk", ".", "offset_metdata_chunk", "!=", "0", ":", "id3_location", "=", "dsf_file", ".", "dsd_chunk", ".", "offset_metdata_chunk", "dsf_file", ".", "dsd_chunk", ".", "offset_metdata_chunk", "=", "0", "dsf_file", ".", "dsd_chunk", ".", "write", "(", ")", "filething", ".", "fileobj", ".", "seek", "(", "id3_location", ")", "filething", ".", "fileobj", ".", "truncate", "(", ")" ]
Remove tags from a file. Args: filething (filething) Raises: mutagen.MutagenError
[ "Remove", "tags", "from", "a", "file", "." ]
train
https://github.com/quodlibet/mutagen/blob/e393df5971ba41ba5a50de9c2c9e7e5484d82c4e/mutagen/dsf.py#L338-L355
0.00211
has2k1/plotnine
plotnine/positions/position.py
position.from_geom
def from_geom(geom): """ Create and return a position object for the geom Parameters ---------- geom : geom An instantiated geom object. Returns ------- out : position A position object Raises :class:`PlotnineError` if unable to create a `position`. """ name = geom.params['position'] if issubclass(type(name), position): return name if isinstance(name, type) and issubclass(name, position): klass = name elif is_string(name): if not name.startswith('position_'): name = 'position_{}'.format(name) klass = Registry[name] else: raise PlotnineError( 'Unknown position of type {}'.format(type(name))) return klass()
python
def from_geom(geom): """ Create and return a position object for the geom Parameters ---------- geom : geom An instantiated geom object. Returns ------- out : position A position object Raises :class:`PlotnineError` if unable to create a `position`. """ name = geom.params['position'] if issubclass(type(name), position): return name if isinstance(name, type) and issubclass(name, position): klass = name elif is_string(name): if not name.startswith('position_'): name = 'position_{}'.format(name) klass = Registry[name] else: raise PlotnineError( 'Unknown position of type {}'.format(type(name))) return klass()
[ "def", "from_geom", "(", "geom", ")", ":", "name", "=", "geom", ".", "params", "[", "'position'", "]", "if", "issubclass", "(", "type", "(", "name", ")", ",", "position", ")", ":", "return", "name", "if", "isinstance", "(", "name", ",", "type", ")", "and", "issubclass", "(", "name", ",", "position", ")", ":", "klass", "=", "name", "elif", "is_string", "(", "name", ")", ":", "if", "not", "name", ".", "startswith", "(", "'position_'", ")", ":", "name", "=", "'position_{}'", ".", "format", "(", "name", ")", "klass", "=", "Registry", "[", "name", "]", "else", ":", "raise", "PlotnineError", "(", "'Unknown position of type {}'", ".", "format", "(", "type", "(", "name", ")", ")", ")", "return", "klass", "(", ")" ]
Create and return a position object for the geom Parameters ---------- geom : geom An instantiated geom object. Returns ------- out : position A position object Raises :class:`PlotnineError` if unable to create a `position`.
[ "Create", "and", "return", "a", "position", "object", "for", "the", "geom" ]
train
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/positions/position.py#L106-L136
0.002323
blockstack/blockstack-core
blockstack/lib/atlas.py
atlas_peer_update_health
def atlas_peer_update_health( peer_hostport, received_response, peer_table=None ): """ Mark the given peer as alive at this time. Update times at which we contacted it, and update its health score. Use the global health table by default, or use the given health info if set. """ with AtlasPeerTableLocked(peer_table) as ptbl: if peer_hostport not in ptbl.keys(): return False # record that we contacted this peer, and whether or not we useful info from it now = time_now() # update timestamps; remove old data new_times = [] for (t, r) in ptbl[peer_hostport]['time']: if t + atlas_peer_lifetime_interval() < now: continue new_times.append((t, r)) new_times.append((now, received_response)) ptbl[peer_hostport]['time'] = new_times return True
python
def atlas_peer_update_health( peer_hostport, received_response, peer_table=None ): """ Mark the given peer as alive at this time. Update times at which we contacted it, and update its health score. Use the global health table by default, or use the given health info if set. """ with AtlasPeerTableLocked(peer_table) as ptbl: if peer_hostport not in ptbl.keys(): return False # record that we contacted this peer, and whether or not we useful info from it now = time_now() # update timestamps; remove old data new_times = [] for (t, r) in ptbl[peer_hostport]['time']: if t + atlas_peer_lifetime_interval() < now: continue new_times.append((t, r)) new_times.append((now, received_response)) ptbl[peer_hostport]['time'] = new_times return True
[ "def", "atlas_peer_update_health", "(", "peer_hostport", ",", "received_response", ",", "peer_table", "=", "None", ")", ":", "with", "AtlasPeerTableLocked", "(", "peer_table", ")", "as", "ptbl", ":", "if", "peer_hostport", "not", "in", "ptbl", ".", "keys", "(", ")", ":", "return", "False", "# record that we contacted this peer, and whether or not we useful info from it", "now", "=", "time_now", "(", ")", "# update timestamps; remove old data", "new_times", "=", "[", "]", "for", "(", "t", ",", "r", ")", "in", "ptbl", "[", "peer_hostport", "]", "[", "'time'", "]", ":", "if", "t", "+", "atlas_peer_lifetime_interval", "(", ")", "<", "now", ":", "continue", "new_times", ".", "append", "(", "(", "t", ",", "r", ")", ")", "new_times", ".", "append", "(", "(", "now", ",", "received_response", ")", ")", "ptbl", "[", "peer_hostport", "]", "[", "'time'", "]", "=", "new_times", "return", "True" ]
Mark the given peer as alive at this time. Update times at which we contacted it, and update its health score. Use the global health table by default, or use the given health info if set.
[ "Mark", "the", "given", "peer", "as", "alive", "at", "this", "time", ".", "Update", "times", "at", "which", "we", "contacted", "it", "and", "update", "its", "health", "score", "." ]
train
https://github.com/blockstack/blockstack-core/blob/1dcfdd39b152d29ce13e736a6a1a0981401a0505/blockstack/lib/atlas.py#L1902-L1930
0.007667
manahl/arctic
arctic/store/_ndarray_store.py
_update_fw_pointers
def _update_fw_pointers(collection, symbol, version, previous_version, is_append, shas_to_add=None): """ This function will decide whether to update the version document with forward pointers to segments. It detects cases where no prior writes/appends have been performed with FW pointers, and extracts the segment IDs. It also sets the metadata which indicate the mode of operation at the time of the version creation. """ version[FW_POINTERS_CONFIG_KEY] = ARCTIC_FORWARD_POINTERS_CFG.name # get the str as enum is not BSON serializable if ARCTIC_FORWARD_POINTERS_CFG is FwPointersCfg.DISABLED: return version_shas = set() if is_append: # Appends are tricky, as we extract the SHAs from the previous version (assuming it has FW pointers info) prev_fw_cfg = get_fwptr_config(previous_version) if prev_fw_cfg is FwPointersCfg.DISABLED.name: version_shas.update(Binary(sha) for sha in collection.find( {'symbol': symbol, 'parent': version_base_or_id(previous_version), 'segment': {'$lt': previous_version['up_to']}}, {'sha': 1})) else: version_shas.update(previous_version[FW_POINTERS_REFS_KEY]) # It is a write (we always get the all-inclusive set of SHAs), so no need to obtain previous SHAs version_shas.update(shas_to_add) # Verify here the number of seen segments vs expected ones if len(version_shas) != version['segment_count']: raise pymongo.errors.OperationFailure("Mismatched number of forward pointers to segments for {}: {} != {})" "Is append: {}. Previous version: {}. " "Gathered forward pointers segment shas: {}.".format( symbol, len(version_shas), version['segment_count'], is_append, previous_version['_id'], version_shas)) version[FW_POINTERS_REFS_KEY] = list(version_shas)
python
def _update_fw_pointers(collection, symbol, version, previous_version, is_append, shas_to_add=None): """ This function will decide whether to update the version document with forward pointers to segments. It detects cases where no prior writes/appends have been performed with FW pointers, and extracts the segment IDs. It also sets the metadata which indicate the mode of operation at the time of the version creation. """ version[FW_POINTERS_CONFIG_KEY] = ARCTIC_FORWARD_POINTERS_CFG.name # get the str as enum is not BSON serializable if ARCTIC_FORWARD_POINTERS_CFG is FwPointersCfg.DISABLED: return version_shas = set() if is_append: # Appends are tricky, as we extract the SHAs from the previous version (assuming it has FW pointers info) prev_fw_cfg = get_fwptr_config(previous_version) if prev_fw_cfg is FwPointersCfg.DISABLED.name: version_shas.update(Binary(sha) for sha in collection.find( {'symbol': symbol, 'parent': version_base_or_id(previous_version), 'segment': {'$lt': previous_version['up_to']}}, {'sha': 1})) else: version_shas.update(previous_version[FW_POINTERS_REFS_KEY]) # It is a write (we always get the all-inclusive set of SHAs), so no need to obtain previous SHAs version_shas.update(shas_to_add) # Verify here the number of seen segments vs expected ones if len(version_shas) != version['segment_count']: raise pymongo.errors.OperationFailure("Mismatched number of forward pointers to segments for {}: {} != {})" "Is append: {}. Previous version: {}. " "Gathered forward pointers segment shas: {}.".format( symbol, len(version_shas), version['segment_count'], is_append, previous_version['_id'], version_shas)) version[FW_POINTERS_REFS_KEY] = list(version_shas)
[ "def", "_update_fw_pointers", "(", "collection", ",", "symbol", ",", "version", ",", "previous_version", ",", "is_append", ",", "shas_to_add", "=", "None", ")", ":", "version", "[", "FW_POINTERS_CONFIG_KEY", "]", "=", "ARCTIC_FORWARD_POINTERS_CFG", ".", "name", "# get the str as enum is not BSON serializable", "if", "ARCTIC_FORWARD_POINTERS_CFG", "is", "FwPointersCfg", ".", "DISABLED", ":", "return", "version_shas", "=", "set", "(", ")", "if", "is_append", ":", "# Appends are tricky, as we extract the SHAs from the previous version (assuming it has FW pointers info)", "prev_fw_cfg", "=", "get_fwptr_config", "(", "previous_version", ")", "if", "prev_fw_cfg", "is", "FwPointersCfg", ".", "DISABLED", ".", "name", ":", "version_shas", ".", "update", "(", "Binary", "(", "sha", ")", "for", "sha", "in", "collection", ".", "find", "(", "{", "'symbol'", ":", "symbol", ",", "'parent'", ":", "version_base_or_id", "(", "previous_version", ")", ",", "'segment'", ":", "{", "'$lt'", ":", "previous_version", "[", "'up_to'", "]", "}", "}", ",", "{", "'sha'", ":", "1", "}", ")", ")", "else", ":", "version_shas", ".", "update", "(", "previous_version", "[", "FW_POINTERS_REFS_KEY", "]", ")", "# It is a write (we always get the all-inclusive set of SHAs), so no need to obtain previous SHAs", "version_shas", ".", "update", "(", "shas_to_add", ")", "# Verify here the number of seen segments vs expected ones", "if", "len", "(", "version_shas", ")", "!=", "version", "[", "'segment_count'", "]", ":", "raise", "pymongo", ".", "errors", ".", "OperationFailure", "(", "\"Mismatched number of forward pointers to segments for {}: {} != {})\"", "\"Is append: {}. Previous version: {}. \"", "\"Gathered forward pointers segment shas: {}.\"", ".", "format", "(", "symbol", ",", "len", "(", "version_shas", ")", ",", "version", "[", "'segment_count'", "]", ",", "is_append", ",", "previous_version", "[", "'_id'", "]", ",", "version_shas", ")", ")", "version", "[", "FW_POINTERS_REFS_KEY", "]", "=", "list", "(", "version_shas", ")" ]
This function will decide whether to update the version document with forward pointers to segments. It detects cases where no prior writes/appends have been performed with FW pointers, and extracts the segment IDs. It also sets the metadata which indicate the mode of operation at the time of the version creation.
[ "This", "function", "will", "decide", "whether", "to", "update", "the", "version", "document", "with", "forward", "pointers", "to", "segments", ".", "It", "detects", "cases", "where", "no", "prior", "writes", "/", "appends", "have", "been", "performed", "with", "FW", "pointers", "and", "extracts", "the", "segment", "IDs", ".", "It", "also", "sets", "the", "metadata", "which", "indicate", "the", "mode", "of", "operation", "at", "the", "time", "of", "the", "version", "creation", "." ]
train
https://github.com/manahl/arctic/blob/57e110b6e182dbab00e7e214dc26f7d9ec47c120/arctic/store/_ndarray_store.py#L121-L156
0.006516
singularityhub/singularity-cli
spython/image/__init__.py
Image.get_hash
def get_hash(self, image=None): '''return an md5 hash of the file based on a criteria level. This is intended to give the file a reasonable version. This only is useful for actual image files. Parameters ========== image: the image path to get hash for (first priority). Second priority is image path saved with image object, if exists. ''' hasher = hashlib.md5() image = image or self.image if os.path.exists(image): with open(image, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hasher.update(chunk) return hasher.hexdigest() bot.warning('%s does not exist.' %image)
python
def get_hash(self, image=None): '''return an md5 hash of the file based on a criteria level. This is intended to give the file a reasonable version. This only is useful for actual image files. Parameters ========== image: the image path to get hash for (first priority). Second priority is image path saved with image object, if exists. ''' hasher = hashlib.md5() image = image or self.image if os.path.exists(image): with open(image, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hasher.update(chunk) return hasher.hexdigest() bot.warning('%s does not exist.' %image)
[ "def", "get_hash", "(", "self", ",", "image", "=", "None", ")", ":", "hasher", "=", "hashlib", ".", "md5", "(", ")", "image", "=", "image", "or", "self", ".", "image", "if", "os", ".", "path", ".", "exists", "(", "image", ")", ":", "with", "open", "(", "image", ",", "\"rb\"", ")", "as", "f", ":", "for", "chunk", "in", "iter", "(", "lambda", ":", "f", ".", "read", "(", "4096", ")", ",", "b\"\"", ")", ":", "hasher", ".", "update", "(", "chunk", ")", "return", "hasher", ".", "hexdigest", "(", ")", "bot", ".", "warning", "(", "'%s does not exist.'", "%", "image", ")" ]
return an md5 hash of the file based on a criteria level. This is intended to give the file a reasonable version. This only is useful for actual image files. Parameters ========== image: the image path to get hash for (first priority). Second priority is image path saved with image object, if exists.
[ "return", "an", "md5", "hash", "of", "the", "file", "based", "on", "a", "criteria", "level", ".", "This", "is", "intended", "to", "give", "the", "file", "a", "reasonable", "version", ".", "This", "only", "is", "useful", "for", "actual", "image", "files", ".", "Parameters", "==========", "image", ":", "the", "image", "path", "to", "get", "hash", "for", "(", "first", "priority", ")", ".", "Second", "priority", "is", "image", "path", "saved", "with", "image", "object", "if", "exists", "." ]
train
https://github.com/singularityhub/singularity-cli/blob/cb36b4504812ca87e29c6a40b222a545d1865799/spython/image/__init__.py#L82-L102
0.00646
jaraco/path.py
path/__init__.py
Path.uncshare
def uncshare(self): """ The UNC mount point for this path. This is empty for paths on local drives. """ unc, r = self.module.splitunc(self) return self._next_class(unc)
python
def uncshare(self): """ The UNC mount point for this path. This is empty for paths on local drives. """ unc, r = self.module.splitunc(self) return self._next_class(unc)
[ "def", "uncshare", "(", "self", ")", ":", "unc", ",", "r", "=", "self", ".", "module", ".", "splitunc", "(", "self", ")", "return", "self", ".", "_next_class", "(", "unc", ")" ]
The UNC mount point for this path. This is empty for paths on local drives.
[ "The", "UNC", "mount", "point", "for", "this", "path", ".", "This", "is", "empty", "for", "paths", "on", "local", "drives", "." ]
train
https://github.com/jaraco/path.py/blob/bbe7d99e7a64a004f866ace9ec12bd9b296908f5/path/__init__.py#L391-L397
0.009259
archman/beamline
beamline/lattice.py
Lattice.formatElement
def formatElement(self, kw, format='elegant'): """ convert json/dict of element configuration into elegant/mad format :param kw: keyword """ etype = self.getElementType(kw) econf_dict = self.getElementConf(kw) econf_str = '' for k, v in econf_dict.items(): econf_str += (k + ' = ' + '"' + str(v) + '"' + ', ') if format == 'elegant': fmtstring = '{eid:<10s}:{etype:>10s}, {econf}'.format(eid=kw.upper(), etype=etype.upper(), econf=econf_str[ :-2]) # [:-2] slicing to remove trailing space and ',' elif format == 'mad': raise NotImplementedError("Not implemented, yet") return fmtstring
python
def formatElement(self, kw, format='elegant'): """ convert json/dict of element configuration into elegant/mad format :param kw: keyword """ etype = self.getElementType(kw) econf_dict = self.getElementConf(kw) econf_str = '' for k, v in econf_dict.items(): econf_str += (k + ' = ' + '"' + str(v) + '"' + ', ') if format == 'elegant': fmtstring = '{eid:<10s}:{etype:>10s}, {econf}'.format(eid=kw.upper(), etype=etype.upper(), econf=econf_str[ :-2]) # [:-2] slicing to remove trailing space and ',' elif format == 'mad': raise NotImplementedError("Not implemented, yet") return fmtstring
[ "def", "formatElement", "(", "self", ",", "kw", ",", "format", "=", "'elegant'", ")", ":", "etype", "=", "self", ".", "getElementType", "(", "kw", ")", "econf_dict", "=", "self", ".", "getElementConf", "(", "kw", ")", "econf_str", "=", "''", "for", "k", ",", "v", "in", "econf_dict", ".", "items", "(", ")", ":", "econf_str", "+=", "(", "k", "+", "' = '", "+", "'\"'", "+", "str", "(", "v", ")", "+", "'\"'", "+", "', '", ")", "if", "format", "==", "'elegant'", ":", "fmtstring", "=", "'{eid:<10s}:{etype:>10s}, {econf}'", ".", "format", "(", "eid", "=", "kw", ".", "upper", "(", ")", ",", "etype", "=", "etype", ".", "upper", "(", ")", ",", "econf", "=", "econf_str", "[", ":", "-", "2", "]", ")", "# [:-2] slicing to remove trailing space and ','", "elif", "format", "==", "'mad'", ":", "raise", "NotImplementedError", "(", "\"Not implemented, yet\"", ")", "return", "fmtstring" ]
convert json/dict of element configuration into elegant/mad format :param kw: keyword
[ "convert", "json", "/", "dict", "of", "element", "configuration", "into", "elegant", "/", "mad", "format", ":", "param", "kw", ":", "keyword" ]
train
https://github.com/archman/beamline/blob/417bc5dc13e754bc89d246427984590fced64d07/beamline/lattice.py#L591-L610
0.005441
StackStorm/pybind
pybind/nos/v6_0_2f/brocade_vcs_rpc/__init__.py
brocade_vcs._set_get_vcs_details
def _set_get_vcs_details(self, v, load=False): """ Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_vcs_details is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_vcs_details() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """get_vcs_details must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True)""", }) self.__get_vcs_details = t if hasattr(self, '_set'): self._set()
python
def _set_get_vcs_details(self, v, load=False): """ Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_vcs_details is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_vcs_details() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """get_vcs_details must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name="get-vcs-details", rest_name="get-vcs-details", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True)""", }) self.__get_vcs_details = t if hasattr(self, '_set'): self._set()
[ "def", "_set_get_vcs_details", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "get_vcs_details", ".", "get_vcs_details", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"get-vcs-details\"", ",", "rest_name", "=", "\"get-vcs-details\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "False", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'hidden'", ":", "u'rpccmd'", ",", "u'actionpoint'", ":", "u'getvcsdetails-action-point'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-vcs'", ",", "defining_module", "=", "'brocade-vcs'", ",", "yang_type", "=", "'rpc'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"get_vcs_details must be of a type compatible with rpc\"\"\"", ",", "'defined-type'", ":", "\"rpc\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=get_vcs_details.get_vcs_details, is_leaf=True, yang_name=\"get-vcs-details\", rest_name=\"get-vcs-details\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'getvcsdetails-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='rpc', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__get_vcs_details", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for get_vcs_details, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_vcs_details is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_vcs_details() directly.
[ "Setter", "method", "for", "get_vcs_details", "mapped", "from", "YANG", "variable", "/", "brocade_vcs_rpc", "/", "get_vcs_details", "(", "rpc", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_get_vcs_details", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_get_vcs_details", "()", "directly", "." ]
train
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/nos/v6_0_2f/brocade_vcs_rpc/__init__.py#L227-L248
0.006024
thewca/wca-regulations-compiler
wrc/parse/parser.py
WCAParser.p_text
def p_text(self, text): '''text : TEXT PARBREAK | TEXT | PARBREAK''' item = text[1] text[0] = item if item[0] != "\n" else u"" if len(text) > 2: text[0] += "\n"
python
def p_text(self, text): '''text : TEXT PARBREAK | TEXT | PARBREAK''' item = text[1] text[0] = item if item[0] != "\n" else u"" if len(text) > 2: text[0] += "\n"
[ "def", "p_text", "(", "self", ",", "text", ")", ":", "item", "=", "text", "[", "1", "]", "text", "[", "0", "]", "=", "item", "if", "item", "[", "0", "]", "!=", "\"\\n\"", "else", "u\"\"", "if", "len", "(", "text", ")", ">", "2", ":", "text", "[", "0", "]", "+=", "\"\\n\"" ]
text : TEXT PARBREAK | TEXT | PARBREAK
[ "text", ":", "TEXT", "PARBREAK", "|", "TEXT", "|", "PARBREAK" ]
train
https://github.com/thewca/wca-regulations-compiler/blob/3ebbd8fe8fec7c9167296f59b2677696fe61a954/wrc/parse/parser.py#L98-L105
0.008475
cedricbonhomme/Stegano
stegano/lsb/lsb.py
hide
def hide( input_image: Union[str, IO[bytes]], message: str, encoding: str = "UTF-8", auto_convert_rgb: bool = False, ): """Hide a message (string) in an image with the LSB (Least Significant Bit) technique. """ message_length = len(message) assert message_length != 0, "message length is zero" img = tools.open_image(input_image) if img.mode not in ["RGB", "RGBA"]: if not auto_convert_rgb: print( "The mode of the image is not RGB. Mode is {}".format(img.mode) ) answer = input("Convert the image to RGB ? [Y / n]\n") or "Y" if answer.lower() == "n": raise Exception("Not a RGB image.") img = img.convert("RGB") encoded = img.copy() width, height = img.size index = 0 message = str(message_length) + ":" + str(message) message_bits = "".join(tools.a2bits_list(message, encoding)) message_bits += "0" * ((3 - (len(message_bits) % 3)) % 3) npixels = width * height len_message_bits = len(message_bits) if len_message_bits > npixels * 3: raise Exception( "The message you want to hide is too long: {}".format( message_length ) ) for row in range(height): for col in range(width): if index + 3 <= len_message_bits: # Get the colour component. pixel = img.getpixel((col, row)) r = pixel[0] g = pixel[1] b = pixel[2] # Change the Least Significant Bit of each colour component. r = tools.setlsb(r, message_bits[index]) g = tools.setlsb(g, message_bits[index + 1]) b = tools.setlsb(b, message_bits[index + 2]) # Save the new pixel if img.mode == "RGBA": encoded.putpixel((col, row), (r, g, b, pixel[3])) else: encoded.putpixel((col, row), (r, g, b)) index += 3 else: img.close() return encoded
python
def hide( input_image: Union[str, IO[bytes]], message: str, encoding: str = "UTF-8", auto_convert_rgb: bool = False, ): """Hide a message (string) in an image with the LSB (Least Significant Bit) technique. """ message_length = len(message) assert message_length != 0, "message length is zero" img = tools.open_image(input_image) if img.mode not in ["RGB", "RGBA"]: if not auto_convert_rgb: print( "The mode of the image is not RGB. Mode is {}".format(img.mode) ) answer = input("Convert the image to RGB ? [Y / n]\n") or "Y" if answer.lower() == "n": raise Exception("Not a RGB image.") img = img.convert("RGB") encoded = img.copy() width, height = img.size index = 0 message = str(message_length) + ":" + str(message) message_bits = "".join(tools.a2bits_list(message, encoding)) message_bits += "0" * ((3 - (len(message_bits) % 3)) % 3) npixels = width * height len_message_bits = len(message_bits) if len_message_bits > npixels * 3: raise Exception( "The message you want to hide is too long: {}".format( message_length ) ) for row in range(height): for col in range(width): if index + 3 <= len_message_bits: # Get the colour component. pixel = img.getpixel((col, row)) r = pixel[0] g = pixel[1] b = pixel[2] # Change the Least Significant Bit of each colour component. r = tools.setlsb(r, message_bits[index]) g = tools.setlsb(g, message_bits[index + 1]) b = tools.setlsb(b, message_bits[index + 2]) # Save the new pixel if img.mode == "RGBA": encoded.putpixel((col, row), (r, g, b, pixel[3])) else: encoded.putpixel((col, row), (r, g, b)) index += 3 else: img.close() return encoded
[ "def", "hide", "(", "input_image", ":", "Union", "[", "str", ",", "IO", "[", "bytes", "]", "]", ",", "message", ":", "str", ",", "encoding", ":", "str", "=", "\"UTF-8\"", ",", "auto_convert_rgb", ":", "bool", "=", "False", ",", ")", ":", "message_length", "=", "len", "(", "message", ")", "assert", "message_length", "!=", "0", ",", "\"message length is zero\"", "img", "=", "tools", ".", "open_image", "(", "input_image", ")", "if", "img", ".", "mode", "not", "in", "[", "\"RGB\"", ",", "\"RGBA\"", "]", ":", "if", "not", "auto_convert_rgb", ":", "print", "(", "\"The mode of the image is not RGB. Mode is {}\"", ".", "format", "(", "img", ".", "mode", ")", ")", "answer", "=", "input", "(", "\"Convert the image to RGB ? [Y / n]\\n\"", ")", "or", "\"Y\"", "if", "answer", ".", "lower", "(", ")", "==", "\"n\"", ":", "raise", "Exception", "(", "\"Not a RGB image.\"", ")", "img", "=", "img", ".", "convert", "(", "\"RGB\"", ")", "encoded", "=", "img", ".", "copy", "(", ")", "width", ",", "height", "=", "img", ".", "size", "index", "=", "0", "message", "=", "str", "(", "message_length", ")", "+", "\":\"", "+", "str", "(", "message", ")", "message_bits", "=", "\"\"", ".", "join", "(", "tools", ".", "a2bits_list", "(", "message", ",", "encoding", ")", ")", "message_bits", "+=", "\"0\"", "*", "(", "(", "3", "-", "(", "len", "(", "message_bits", ")", "%", "3", ")", ")", "%", "3", ")", "npixels", "=", "width", "*", "height", "len_message_bits", "=", "len", "(", "message_bits", ")", "if", "len_message_bits", ">", "npixels", "*", "3", ":", "raise", "Exception", "(", "\"The message you want to hide is too long: {}\"", ".", "format", "(", "message_length", ")", ")", "for", "row", "in", "range", "(", "height", ")", ":", "for", "col", "in", "range", "(", "width", ")", ":", "if", "index", "+", "3", "<=", "len_message_bits", ":", "# Get the colour component.", "pixel", "=", "img", ".", "getpixel", "(", "(", "col", ",", "row", ")", ")", "r", "=", "pixel", "[", "0", "]", "g", "=", "pixel", "[", "1", "]", "b", "=", "pixel", "[", "2", "]", "# Change the Least Significant Bit of each colour component.", "r", "=", "tools", ".", "setlsb", "(", "r", ",", "message_bits", "[", "index", "]", ")", "g", "=", "tools", ".", "setlsb", "(", "g", ",", "message_bits", "[", "index", "+", "1", "]", ")", "b", "=", "tools", ".", "setlsb", "(", "b", ",", "message_bits", "[", "index", "+", "2", "]", ")", "# Save the new pixel", "if", "img", ".", "mode", "==", "\"RGBA\"", ":", "encoded", ".", "putpixel", "(", "(", "col", ",", "row", ")", ",", "(", "r", ",", "g", ",", "b", ",", "pixel", "[", "3", "]", ")", ")", "else", ":", "encoded", ".", "putpixel", "(", "(", "col", ",", "row", ")", ",", "(", "r", ",", "g", ",", "b", ")", ")", "index", "+=", "3", "else", ":", "img", ".", "close", "(", ")", "return", "encoded" ]
Hide a message (string) in an image with the LSB (Least Significant Bit) technique.
[ "Hide", "a", "message", "(", "string", ")", "in", "an", "image", "with", "the", "LSB", "(", "Least", "Significant", "Bit", ")", "technique", "." ]
train
https://github.com/cedricbonhomme/Stegano/blob/502e6303791d348e479290c22108551ba3be254f/stegano/lsb/lsb.py#L33-L97
0.000466
COALAIP/pycoalaip
coalaip/model_validators.py
is_creation_model
def is_creation_model(instance, attribute, value): """Must include at least a ``name`` key.""" creation_name = value.get('name') if not isinstance(creation_name, str): instance_name = instance.__class__.__name__ err_str = ("'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'").format(attr=attribute.name, cls=instance_name, value=creation_name) raise ModelDataError(err_str)
python
def is_creation_model(instance, attribute, value): """Must include at least a ``name`` key.""" creation_name = value.get('name') if not isinstance(creation_name, str): instance_name = instance.__class__.__name__ err_str = ("'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'").format(attr=attribute.name, cls=instance_name, value=creation_name) raise ModelDataError(err_str)
[ "def", "is_creation_model", "(", "instance", ",", "attribute", ",", "value", ")", ":", "creation_name", "=", "value", ".", "get", "(", "'name'", ")", "if", "not", "isinstance", "(", "creation_name", ",", "str", ")", ":", "instance_name", "=", "instance", ".", "__class__", ".", "__name__", "err_str", "=", "(", "\"'name' must be given as a string in the '{attr}' \"", "\"parameter of a '{cls}'. Given \"", "\"'{value}'\"", ")", ".", "format", "(", "attr", "=", "attribute", ".", "name", ",", "cls", "=", "instance_name", ",", "value", "=", "creation_name", ")", "raise", "ModelDataError", "(", "err_str", ")" ]
Must include at least a ``name`` key.
[ "Must", "include", "at", "least", "a", "name", "key", "." ]
train
https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/model_validators.py#L45-L56
0.001751
wavefrontHQ/python-client
wavefront_api_client/api/search_api.py
SearchApi.search_web_hook_entities
def search_web_hook_entities(self, **kwargs): # noqa: E501 """Search over a customer's webhooks # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_web_hook_entities(async_req=True) >>> result = thread.get() :param async_req bool :param SortableSearchRequest body: :return: ResponseContainerPagedNotificant If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501 return data
python
def search_web_hook_entities(self, **kwargs): # noqa: E501 """Search over a customer's webhooks # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_web_hook_entities(async_req=True) >>> result = thread.get() :param async_req bool :param SortableSearchRequest body: :return: ResponseContainerPagedNotificant If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501 return data
[ "def", "search_web_hook_entities", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "search_web_hook_entities_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "search_web_hook_entities_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
Search over a customer's webhooks # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_web_hook_entities(async_req=True) >>> result = thread.get() :param async_req bool :param SortableSearchRequest body: :return: ResponseContainerPagedNotificant If the method is called asynchronously, returns the request thread.
[ "Search", "over", "a", "customer", "s", "webhooks", "#", "noqa", ":", "E501" ]
train
https://github.com/wavefrontHQ/python-client/blob/b0f1046a8f68c2c7d69e395f7167241f224c738a/wavefront_api_client/api/search_api.py#L5017-L5037
0.002179
Dallinger/Dallinger
dallinger/networks.py
FullyConnected.add_node
def add_node(self, node): """Add a node, connecting it to everyone and back.""" other_nodes = [n for n in self.nodes() if n.id != node.id] for n in other_nodes: if isinstance(n, Source): node.connect(direction="from", whom=n) else: node.connect(direction="both", whom=n)
python
def add_node(self, node): """Add a node, connecting it to everyone and back.""" other_nodes = [n for n in self.nodes() if n.id != node.id] for n in other_nodes: if isinstance(n, Source): node.connect(direction="from", whom=n) else: node.connect(direction="both", whom=n)
[ "def", "add_node", "(", "self", ",", "node", ")", ":", "other_nodes", "=", "[", "n", "for", "n", "in", "self", ".", "nodes", "(", ")", "if", "n", ".", "id", "!=", "node", ".", "id", "]", "for", "n", "in", "other_nodes", ":", "if", "isinstance", "(", "n", ",", "Source", ")", ":", "node", ".", "connect", "(", "direction", "=", "\"from\"", ",", "whom", "=", "n", ")", "else", ":", "node", ".", "connect", "(", "direction", "=", "\"both\"", ",", "whom", "=", "n", ")" ]
Add a node, connecting it to everyone and back.
[ "Add", "a", "node", "connecting", "it", "to", "everyone", "and", "back", "." ]
train
https://github.com/Dallinger/Dallinger/blob/76ca8217c709989c116d0ebd8fca37bd22f591af/dallinger/networks.py#L60-L68
0.005698
manns/pyspread
pyspread/src/gui/_main_window.py
MainWindowEventHandlers.OnMove
def OnMove(self, event): """Main window move event""" # Store window position in config position = self.main_window.GetScreenPositionTuple() config["window_position"] = repr(position)
python
def OnMove(self, event): """Main window move event""" # Store window position in config position = self.main_window.GetScreenPositionTuple() config["window_position"] = repr(position)
[ "def", "OnMove", "(", "self", ",", "event", ")", ":", "# Store window position in config", "position", "=", "self", ".", "main_window", ".", "GetScreenPositionTuple", "(", ")", "config", "[", "\"window_position\"", "]", "=", "repr", "(", "position", ")" ]
Main window move event
[ "Main", "window", "move", "event" ]
train
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_main_window.py#L438-L444
0.009217
ethereum/pyethereum
ethereum/experimental/pruning_trie.py
Trie.delete
def delete(self, key): """ :param key: a string with length of [0, 32] """ if not is_string(key): raise Exception("Key must be string") if len(key) > 32: raise Exception("Max key length is 32") old_root = copy.deepcopy(self.root_node) self.root_node = self._delete_and_delete_storage( self.root_node, bin_to_nibbles(to_string(key))) self.replace_root_hash(old_root, self.root_node)
python
def delete(self, key): """ :param key: a string with length of [0, 32] """ if not is_string(key): raise Exception("Key must be string") if len(key) > 32: raise Exception("Max key length is 32") old_root = copy.deepcopy(self.root_node) self.root_node = self._delete_and_delete_storage( self.root_node, bin_to_nibbles(to_string(key))) self.replace_root_hash(old_root, self.root_node)
[ "def", "delete", "(", "self", ",", "key", ")", ":", "if", "not", "is_string", "(", "key", ")", ":", "raise", "Exception", "(", "\"Key must be string\"", ")", "if", "len", "(", "key", ")", ">", "32", ":", "raise", "Exception", "(", "\"Max key length is 32\"", ")", "old_root", "=", "copy", ".", "deepcopy", "(", "self", ".", "root_node", ")", "self", ".", "root_node", "=", "self", ".", "_delete_and_delete_storage", "(", "self", ".", "root_node", ",", "bin_to_nibbles", "(", "to_string", "(", "key", ")", ")", ")", "self", ".", "replace_root_hash", "(", "old_root", ",", "self", ".", "root_node", ")" ]
:param key: a string with length of [0, 32]
[ ":", "param", "key", ":", "a", "string", "with", "length", "of", "[", "0", "32", "]" ]
train
https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L778-L792
0.00404
PythonCharmers/python-future
src/future/builtins/newround.py
from_float_26
def from_float_26(f): """Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0') """ import math as _math from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) if isinstance(f, (int, long)): # handle integer inputs return Decimal(f) if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float return Decimal(repr(f)) if _math.copysign(1.0, f) == 1.0: sign = 0 else: sign = 1 n, d = abs(f).as_integer_ratio() # int.bit_length() method doesn't exist on Py2.6: def bit_length(d): if d != 0: return len(bin(abs(d))) - 2 else: return 0 k = bit_length(d) - 1 result = _dec_from_triple(sign, str(n*5**k), -k) return result
python
def from_float_26(f): """Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0') """ import math as _math from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) if isinstance(f, (int, long)): # handle integer inputs return Decimal(f) if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float return Decimal(repr(f)) if _math.copysign(1.0, f) == 1.0: sign = 0 else: sign = 1 n, d = abs(f).as_integer_ratio() # int.bit_length() method doesn't exist on Py2.6: def bit_length(d): if d != 0: return len(bin(abs(d))) - 2 else: return 0 k = bit_length(d) - 1 result = _dec_from_triple(sign, str(n*5**k), -k) return result
[ "def", "from_float_26", "(", "f", ")", ":", "import", "math", "as", "_math", "from", "decimal", "import", "_dec_from_triple", "# only available on Py2.6 and Py2.7 (not 3.3)", "if", "isinstance", "(", "f", ",", "(", "int", ",", "long", ")", ")", ":", "# handle integer inputs", "return", "Decimal", "(", "f", ")", "if", "_math", ".", "isinf", "(", "f", ")", "or", "_math", ".", "isnan", "(", "f", ")", ":", "# raises TypeError if not a float", "return", "Decimal", "(", "repr", "(", "f", ")", ")", "if", "_math", ".", "copysign", "(", "1.0", ",", "f", ")", "==", "1.0", ":", "sign", "=", "0", "else", ":", "sign", "=", "1", "n", ",", "d", "=", "abs", "(", "f", ")", ".", "as_integer_ratio", "(", ")", "# int.bit_length() method doesn't exist on Py2.6:", "def", "bit_length", "(", "d", ")", ":", "if", "d", "!=", "0", ":", "return", "len", "(", "bin", "(", "abs", "(", "d", ")", ")", ")", "-", "2", "else", ":", "return", "0", "k", "=", "bit_length", "(", "d", ")", "-", "1", "result", "=", "_dec_from_triple", "(", "sign", ",", "str", "(", "n", "*", "5", "**", "k", ")", ",", "-", "k", ")", "return", "result" ]
Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0')
[ "Converts", "a", "float", "to", "a", "decimal", "number", "exactly", "." ]
train
https://github.com/PythonCharmers/python-future/blob/c423752879acc05eebc29b0bb9909327bd5c7308/src/future/builtins/newround.py#L55-L96
0.00202
ic-labs/django-icekit
icekit/publishing/admin.py
PublishingFluentPagesParentAdminMixin.get_queryset
def get_queryset(self, request): """ Show only DRAFT Fluent page items in admin. NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus PUBLISHED objects, since there the top-level `UrlNode` model and queryset don't know about ICEKit publishing. """ self.request = request qs = super(PublishingFluentPagesParentAdminMixin, self) \ .get_queryset(request) qs = qs.filter(status=UrlNode.DRAFT) return qs
python
def get_queryset(self, request): """ Show only DRAFT Fluent page items in admin. NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus PUBLISHED objects, since there the top-level `UrlNode` model and queryset don't know about ICEKit publishing. """ self.request = request qs = super(PublishingFluentPagesParentAdminMixin, self) \ .get_queryset(request) qs = qs.filter(status=UrlNode.DRAFT) return qs
[ "def", "get_queryset", "(", "self", ",", "request", ")", ":", "self", ".", "request", "=", "request", "qs", "=", "super", "(", "PublishingFluentPagesParentAdminMixin", ",", "self", ")", ".", "get_queryset", "(", "request", ")", "qs", "=", "qs", ".", "filter", "(", "status", "=", "UrlNode", ".", "DRAFT", ")", "return", "qs" ]
Show only DRAFT Fluent page items in admin. NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus PUBLISHED objects, since there the top-level `UrlNode` model and queryset don't know about ICEKit publishing.
[ "Show", "only", "DRAFT", "Fluent", "page", "items", "in", "admin", "." ]
train
https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L729-L742
0.003976
eallik/spinoff
geventreactor/__init__.py
GeventReactor.removeReader
def removeReader(self, selectable): """Remove a FileDescriptor for notification of data available to read.""" try: if selectable.disconnected: self._reads[selectable].kill(block=False) del self._reads[selectable] else: self._reads[selectable].pause() except KeyError: pass
python
def removeReader(self, selectable): """Remove a FileDescriptor for notification of data available to read.""" try: if selectable.disconnected: self._reads[selectable].kill(block=False) del self._reads[selectable] else: self._reads[selectable].pause() except KeyError: pass
[ "def", "removeReader", "(", "self", ",", "selectable", ")", ":", "try", ":", "if", "selectable", ".", "disconnected", ":", "self", ".", "_reads", "[", "selectable", "]", ".", "kill", "(", "block", "=", "False", ")", "del", "self", ".", "_reads", "[", "selectable", "]", "else", ":", "self", ".", "_reads", "[", "selectable", "]", ".", "pause", "(", ")", "except", "KeyError", ":", "pass" ]
Remove a FileDescriptor for notification of data available to read.
[ "Remove", "a", "FileDescriptor", "for", "notification", "of", "data", "available", "to", "read", "." ]
train
https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/geventreactor/__init__.py#L435-L444
0.007895
hyperledger/sawtooth-core
validator/sawtooth_validator/state/batch_tracker.py
BatchTracker.get_statuses
def get_statuses(self, batch_ids): """Returns a statuses dict for the requested batches. Args: batch_ids (list of str): The ids of the batches to get statuses for Returns: dict: A dict with keys of batch ids, and values of status enums """ with self._lock: return {b: self.get_status(b) for b in batch_ids}
python
def get_statuses(self, batch_ids): """Returns a statuses dict for the requested batches. Args: batch_ids (list of str): The ids of the batches to get statuses for Returns: dict: A dict with keys of batch ids, and values of status enums """ with self._lock: return {b: self.get_status(b) for b in batch_ids}
[ "def", "get_statuses", "(", "self", ",", "batch_ids", ")", ":", "with", "self", ".", "_lock", ":", "return", "{", "b", ":", "self", ".", "get_status", "(", "b", ")", "for", "b", "in", "batch_ids", "}" ]
Returns a statuses dict for the requested batches. Args: batch_ids (list of str): The ids of the batches to get statuses for Returns: dict: A dict with keys of batch ids, and values of status enums
[ "Returns", "a", "statuses", "dict", "for", "the", "requested", "batches", "." ]
train
https://github.com/hyperledger/sawtooth-core/blob/8cf473bc2207e51f02bd182d825158a57d72b098/validator/sawtooth_validator/state/batch_tracker.py#L130-L140
0.005208
heroku/sf-suds
suds/wsdl.py
Binding.resolvesoapbody
def resolvesoapbody(self, definitions, op): """ Resolve soap body I{message} parts by cross-referencing with operation defined in port type. @param definitions: A definitions object. @type definitions: L{Definitions} @param op: An I{operation} object. @type op: I{operation} """ ptop = self.type.operation(op.name) if ptop is None: raise Exception, \ "operation '%s' not defined in portType" % op.name soap = op.soap parts = soap.input.body.parts if len(parts): pts = [] for p in ptop.input.parts: if p.name in parts: pts.append(p) soap.input.body.parts = pts else: soap.input.body.parts = ptop.input.parts parts = soap.output.body.parts if len(parts): pts = [] for p in ptop.output.parts: if p.name in parts: pts.append(p) soap.output.body.parts = pts else: soap.output.body.parts = ptop.output.parts
python
def resolvesoapbody(self, definitions, op): """ Resolve soap body I{message} parts by cross-referencing with operation defined in port type. @param definitions: A definitions object. @type definitions: L{Definitions} @param op: An I{operation} object. @type op: I{operation} """ ptop = self.type.operation(op.name) if ptop is None: raise Exception, \ "operation '%s' not defined in portType" % op.name soap = op.soap parts = soap.input.body.parts if len(parts): pts = [] for p in ptop.input.parts: if p.name in parts: pts.append(p) soap.input.body.parts = pts else: soap.input.body.parts = ptop.input.parts parts = soap.output.body.parts if len(parts): pts = [] for p in ptop.output.parts: if p.name in parts: pts.append(p) soap.output.body.parts = pts else: soap.output.body.parts = ptop.output.parts
[ "def", "resolvesoapbody", "(", "self", ",", "definitions", ",", "op", ")", ":", "ptop", "=", "self", ".", "type", ".", "operation", "(", "op", ".", "name", ")", "if", "ptop", "is", "None", ":", "raise", "Exception", ",", "\"operation '%s' not defined in portType\"", "%", "op", ".", "name", "soap", "=", "op", ".", "soap", "parts", "=", "soap", ".", "input", ".", "body", ".", "parts", "if", "len", "(", "parts", ")", ":", "pts", "=", "[", "]", "for", "p", "in", "ptop", ".", "input", ".", "parts", ":", "if", "p", ".", "name", "in", "parts", ":", "pts", ".", "append", "(", "p", ")", "soap", ".", "input", ".", "body", ".", "parts", "=", "pts", "else", ":", "soap", ".", "input", ".", "body", ".", "parts", "=", "ptop", ".", "input", ".", "parts", "parts", "=", "soap", ".", "output", ".", "body", ".", "parts", "if", "len", "(", "parts", ")", ":", "pts", "=", "[", "]", "for", "p", "in", "ptop", ".", "output", ".", "parts", ":", "if", "p", ".", "name", "in", "parts", ":", "pts", ".", "append", "(", "p", ")", "soap", ".", "output", ".", "body", ".", "parts", "=", "pts", "else", ":", "soap", ".", "output", ".", "body", ".", "parts", "=", "ptop", ".", "output", ".", "parts" ]
Resolve soap body I{message} parts by cross-referencing with operation defined in port type. @param definitions: A definitions object. @type definitions: L{Definitions} @param op: An I{operation} object. @type op: I{operation}
[ "Resolve", "soap", "body", "I", "{", "message", "}", "parts", "by", "cross", "-", "referencing", "with", "operation", "defined", "in", "port", "type", "." ]
train
https://github.com/heroku/sf-suds/blob/44b6743a45ff4447157605d6fecc9bf5922ce68a/suds/wsdl.py#L677-L708
0.003527
quantumlib/Cirq
cirq/sim/wave_function.py
_validate_num_qubits
def _validate_num_qubits(state: np.ndarray) -> int: """Validates that state's size is a power of 2, returning number of qubits. """ size = state.size if size & (size - 1): raise ValueError('state.size ({}) is not a power of two.'.format(size)) return size.bit_length() - 1
python
def _validate_num_qubits(state: np.ndarray) -> int: """Validates that state's size is a power of 2, returning number of qubits. """ size = state.size if size & (size - 1): raise ValueError('state.size ({}) is not a power of two.'.format(size)) return size.bit_length() - 1
[ "def", "_validate_num_qubits", "(", "state", ":", "np", ".", "ndarray", ")", "->", "int", ":", "size", "=", "state", ".", "size", "if", "size", "&", "(", "size", "-", "1", ")", ":", "raise", "ValueError", "(", "'state.size ({}) is not a power of two.'", ".", "format", "(", "size", ")", ")", "return", "size", ".", "bit_length", "(", ")", "-", "1" ]
Validates that state's size is a power of 2, returning number of qubits.
[ "Validates", "that", "state", "s", "size", "is", "a", "power", "of", "2", "returning", "number", "of", "qubits", "." ]
train
https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/sim/wave_function.py#L493-L499
0.003333
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.pull
def pull(self,bookName=None,sheetName=None): """pull data into this OR.SHEET from a real book/sheet in Origin""" # tons of validation if bookName is None and self.bookName: bookName=self.bookName if sheetName is None and self.sheetName: sheetName=self.sheetName if bookName is None: bookName=OR.activeBook() if bookName and sheetName is None: sheetName=OR.activeSheet() if not bookName or not sheetName: print("can't figure out where to pull from! [%s]%s"%(bookName,sheetName)) return # finally doing the thing poSheet=OR.getSheet(bookName,sheetName) self.bookName=bookName self.sheetName=sheetName self.desc=poSheet.GetLongName() self.colNames=[poCol.GetName() for poCol in poSheet.Columns()] self.colDesc=[poCol.GetLongName() for poCol in poSheet.Columns()] self.colUnits=[poCol.GetUnits() for poCol in poSheet.Columns()] self.colComments=[poCol.GetComments() for poCol in poSheet.Columns()] self.colTypes=[poCol.GetType() for poCol in poSheet.Columns()] self.colData=[poCol.GetData() for poCol in poSheet.Columns()]
python
def pull(self,bookName=None,sheetName=None): """pull data into this OR.SHEET from a real book/sheet in Origin""" # tons of validation if bookName is None and self.bookName: bookName=self.bookName if sheetName is None and self.sheetName: sheetName=self.sheetName if bookName is None: bookName=OR.activeBook() if bookName and sheetName is None: sheetName=OR.activeSheet() if not bookName or not sheetName: print("can't figure out where to pull from! [%s]%s"%(bookName,sheetName)) return # finally doing the thing poSheet=OR.getSheet(bookName,sheetName) self.bookName=bookName self.sheetName=sheetName self.desc=poSheet.GetLongName() self.colNames=[poCol.GetName() for poCol in poSheet.Columns()] self.colDesc=[poCol.GetLongName() for poCol in poSheet.Columns()] self.colUnits=[poCol.GetUnits() for poCol in poSheet.Columns()] self.colComments=[poCol.GetComments() for poCol in poSheet.Columns()] self.colTypes=[poCol.GetType() for poCol in poSheet.Columns()] self.colData=[poCol.GetData() for poCol in poSheet.Columns()]
[ "def", "pull", "(", "self", ",", "bookName", "=", "None", ",", "sheetName", "=", "None", ")", ":", "# tons of validation", "if", "bookName", "is", "None", "and", "self", ".", "bookName", ":", "bookName", "=", "self", ".", "bookName", "if", "sheetName", "is", "None", "and", "self", ".", "sheetName", ":", "sheetName", "=", "self", ".", "sheetName", "if", "bookName", "is", "None", ":", "bookName", "=", "OR", ".", "activeBook", "(", ")", "if", "bookName", "and", "sheetName", "is", "None", ":", "sheetName", "=", "OR", ".", "activeSheet", "(", ")", "if", "not", "bookName", "or", "not", "sheetName", ":", "print", "(", "\"can't figure out where to pull from! [%s]%s\"", "%", "(", "bookName", ",", "sheetName", ")", ")", "return", "# finally doing the thing", "poSheet", "=", "OR", ".", "getSheet", "(", "bookName", ",", "sheetName", ")", "self", ".", "bookName", "=", "bookName", "self", ".", "sheetName", "=", "sheetName", "self", ".", "desc", "=", "poSheet", ".", "GetLongName", "(", ")", "self", ".", "colNames", "=", "[", "poCol", ".", "GetName", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colDesc", "=", "[", "poCol", ".", "GetLongName", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colUnits", "=", "[", "poCol", ".", "GetUnits", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colComments", "=", "[", "poCol", ".", "GetComments", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colTypes", "=", "[", "poCol", ".", "GetType", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colData", "=", "[", "poCol", ".", "GetData", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]" ]
pull data into this OR.SHEET from a real book/sheet in Origin
[ "pull", "data", "into", "this", "OR", ".", "SHEET", "from", "a", "real", "book", "/", "sheet", "in", "Origin" ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L138-L160
0.021886
HazardDede/dictmentor
dictmentor/utils.py
eval_first_non_none
def eval_first_non_none(eval_list: Iterable[Callable[..., Any]], **kwargs: Any) -> Any: """ Executes a list of functions and returns the first non none result. All kwargs will be passed as kwargs to each individual function. If all functions return None, None is the overall result. Examples: >>> eval_first_non_none((lambda: None, lambda: None, lambda: 3)) 3 >>> print(eval_first_non_none([lambda: None, lambda: None, lambda: None])) None >>> eval_first_non_none([ ... lambda cnt: cnt if cnt == 1 else None, ... lambda cnt: cnt if cnt == 2 else None, ... lambda cnt: cnt if cnt == 3 else None] ... , cnt=2) 2 """ Validator.is_real_iterable(raise_ex=True, eval_list=eval_list) for eval_fun in eval_list: res = eval_fun(**kwargs) if res is not None: return res return None
python
def eval_first_non_none(eval_list: Iterable[Callable[..., Any]], **kwargs: Any) -> Any: """ Executes a list of functions and returns the first non none result. All kwargs will be passed as kwargs to each individual function. If all functions return None, None is the overall result. Examples: >>> eval_first_non_none((lambda: None, lambda: None, lambda: 3)) 3 >>> print(eval_first_non_none([lambda: None, lambda: None, lambda: None])) None >>> eval_first_non_none([ ... lambda cnt: cnt if cnt == 1 else None, ... lambda cnt: cnt if cnt == 2 else None, ... lambda cnt: cnt if cnt == 3 else None] ... , cnt=2) 2 """ Validator.is_real_iterable(raise_ex=True, eval_list=eval_list) for eval_fun in eval_list: res = eval_fun(**kwargs) if res is not None: return res return None
[ "def", "eval_first_non_none", "(", "eval_list", ":", "Iterable", "[", "Callable", "[", "...", ",", "Any", "]", "]", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Any", ":", "Validator", ".", "is_real_iterable", "(", "raise_ex", "=", "True", ",", "eval_list", "=", "eval_list", ")", "for", "eval_fun", "in", "eval_list", ":", "res", "=", "eval_fun", "(", "*", "*", "kwargs", ")", "if", "res", "is", "not", "None", ":", "return", "res", "return", "None" ]
Executes a list of functions and returns the first non none result. All kwargs will be passed as kwargs to each individual function. If all functions return None, None is the overall result. Examples: >>> eval_first_non_none((lambda: None, lambda: None, lambda: 3)) 3 >>> print(eval_first_non_none([lambda: None, lambda: None, lambda: None])) None >>> eval_first_non_none([ ... lambda cnt: cnt if cnt == 1 else None, ... lambda cnt: cnt if cnt == 2 else None, ... lambda cnt: cnt if cnt == 3 else None] ... , cnt=2) 2
[ "Executes", "a", "list", "of", "functions", "and", "returns", "the", "first", "non", "none", "result", ".", "All", "kwargs", "will", "be", "passed", "as", "kwargs", "to", "each", "individual", "function", ".", "If", "all", "functions", "return", "None", "None", "is", "the", "overall", "result", "." ]
train
https://github.com/HazardDede/dictmentor/blob/f50ca26ed04f7a924cde6e4d464c4f6ccba4e320/dictmentor/utils.py#L112-L136
0.0054
liampauling/betfair
betfairlightweight/filters.py
cancel_instruction
def cancel_instruction(bet_id, size_reduction=None): """ Instruction to fully or partially cancel an order (only applies to LIMIT orders) :param str bet_id: identifier of the bet to cancel. :param float size_reduction: If supplied then this is a partial cancel. :returns: cancellation report detailing status, cancellation requested and actual cancellation details. :rtype: dict """ args = locals() return { to_camel_case(k): v for k, v in args.items() if v is not None }
python
def cancel_instruction(bet_id, size_reduction=None): """ Instruction to fully or partially cancel an order (only applies to LIMIT orders) :param str bet_id: identifier of the bet to cancel. :param float size_reduction: If supplied then this is a partial cancel. :returns: cancellation report detailing status, cancellation requested and actual cancellation details. :rtype: dict """ args = locals() return { to_camel_case(k): v for k, v in args.items() if v is not None }
[ "def", "cancel_instruction", "(", "bet_id", ",", "size_reduction", "=", "None", ")", ":", "args", "=", "locals", "(", ")", "return", "{", "to_camel_case", "(", "k", ")", ":", "v", "for", "k", ",", "v", "in", "args", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}" ]
Instruction to fully or partially cancel an order (only applies to LIMIT orders) :param str bet_id: identifier of the bet to cancel. :param float size_reduction: If supplied then this is a partial cancel. :returns: cancellation report detailing status, cancellation requested and actual cancellation details. :rtype: dict
[ "Instruction", "to", "fully", "or", "partially", "cancel", "an", "order", "(", "only", "applies", "to", "LIMIT", "orders", ")", ":", "param", "str", "bet_id", ":", "identifier", "of", "the", "bet", "to", "cancel", ".", ":", "param", "float", "size_reduction", ":", "If", "supplied", "then", "this", "is", "a", "partial", "cancel", "." ]
train
https://github.com/liampauling/betfair/blob/8479392eb4849c525d78d43497c32c0bb108e977/betfairlightweight/filters.py#L234-L246
0.005769
quodlibet/mutagen
mutagen/aac.py
_ADTSStream.frequency
def frequency(self): """0 means unknown""" assert self.parsed_frames, "no frame parsed yet" f_index = self._fixed_header_key[4] try: return _FREQS[f_index] except IndexError: return 0
python
def frequency(self): """0 means unknown""" assert self.parsed_frames, "no frame parsed yet" f_index = self._fixed_header_key[4] try: return _FREQS[f_index] except IndexError: return 0
[ "def", "frequency", "(", "self", ")", ":", "assert", "self", ".", "parsed_frames", ",", "\"no frame parsed yet\"", "f_index", "=", "self", ".", "_fixed_header_key", "[", "4", "]", "try", ":", "return", "_FREQS", "[", "f_index", "]", "except", "IndexError", ":", "return", "0" ]
0 means unknown
[ "0", "means", "unknown" ]
train
https://github.com/quodlibet/mutagen/blob/e393df5971ba41ba5a50de9c2c9e7e5484d82c4e/mutagen/aac.py#L132-L141
0.008032
westonplatter/fast_arrow
fast_arrow/resources/option_chain.py
OptionChain.fetch
def fetch(cls, client, _id, symbol): """ fetch option chain for instrument """ url = "https://api.robinhood.com/options/chains/" params = { "equity_instrument_ids": _id, "state": "active", "tradability": "tradable" } data = client.get(url, params=params) def filter_func(x): return x["symbol"] == symbol results = list(filter(filter_func, data["results"])) return results[0]
python
def fetch(cls, client, _id, symbol): """ fetch option chain for instrument """ url = "https://api.robinhood.com/options/chains/" params = { "equity_instrument_ids": _id, "state": "active", "tradability": "tradable" } data = client.get(url, params=params) def filter_func(x): return x["symbol"] == symbol results = list(filter(filter_func, data["results"])) return results[0]
[ "def", "fetch", "(", "cls", ",", "client", ",", "_id", ",", "symbol", ")", ":", "url", "=", "\"https://api.robinhood.com/options/chains/\"", "params", "=", "{", "\"equity_instrument_ids\"", ":", "_id", ",", "\"state\"", ":", "\"active\"", ",", "\"tradability\"", ":", "\"tradable\"", "}", "data", "=", "client", ".", "get", "(", "url", ",", "params", "=", "params", ")", "def", "filter_func", "(", "x", ")", ":", "return", "x", "[", "\"symbol\"", "]", "==", "symbol", "results", "=", "list", "(", "filter", "(", "filter_func", ",", "data", "[", "\"results\"", "]", ")", ")", "return", "results", "[", "0", "]" ]
fetch option chain for instrument
[ "fetch", "option", "chain", "for", "instrument" ]
train
https://github.com/westonplatter/fast_arrow/blob/514cbca4994f52a97222058167830a302e313d04/fast_arrow/resources/option_chain.py#L4-L19
0.003976
jsommers/switchyard
switchyard/lib/topo/topobuild.py
save_graph
def save_graph(cn_topo, filename, showintfs=False, showaddrs=False): ''' Save the topology to an image file ''' __do_draw(cn_topo, showintfs=showintfs, showaddrs=showaddrs) pyp.savefig(filename)
python
def save_graph(cn_topo, filename, showintfs=False, showaddrs=False): ''' Save the topology to an image file ''' __do_draw(cn_topo, showintfs=showintfs, showaddrs=showaddrs) pyp.savefig(filename)
[ "def", "save_graph", "(", "cn_topo", ",", "filename", ",", "showintfs", "=", "False", ",", "showaddrs", "=", "False", ")", ":", "__do_draw", "(", "cn_topo", ",", "showintfs", "=", "showintfs", ",", "showaddrs", "=", "showaddrs", ")", "pyp", ".", "savefig", "(", "filename", ")" ]
Save the topology to an image file
[ "Save", "the", "topology", "to", "an", "image", "file" ]
train
https://github.com/jsommers/switchyard/blob/fdcb3869c937dcedbd6ea7a7822ebd412bf1e2b0/switchyard/lib/topo/topobuild.py#L443-L448
0.009302
erijo/tellcore-py
tellcore/telldus.py
Device.get_parameter
def get_parameter(self, name): """Get a parameter.""" default_value = "$%!)(INVALID)(!%$" value = self.lib.tdGetDeviceParameter(self.id, name, default_value) if value == default_value: raise AttributeError(name) return value
python
def get_parameter(self, name): """Get a parameter.""" default_value = "$%!)(INVALID)(!%$" value = self.lib.tdGetDeviceParameter(self.id, name, default_value) if value == default_value: raise AttributeError(name) return value
[ "def", "get_parameter", "(", "self", ",", "name", ")", ":", "default_value", "=", "\"$%!)(INVALID)(!%$\"", "value", "=", "self", ".", "lib", ".", "tdGetDeviceParameter", "(", "self", ".", "id", ",", "name", ",", "default_value", ")", "if", "value", "==", "default_value", ":", "raise", "AttributeError", "(", "name", ")", "return", "value" ]
Get a parameter.
[ "Get", "a", "parameter", "." ]
train
https://github.com/erijo/tellcore-py/blob/7a1eb53e12ef039a2350933e502633df7560f6a8/tellcore/telldus.py#L333-L339
0.007246
woolfson-group/isambard
isambard/ampal/base_ampal.py
Polymer.relabel_atoms
def relabel_atoms(self, start=1): """Relabels all `Atoms` in numerical order. Parameters ---------- start : int, optional Offset the labelling by `start` residues. """ counter = start for atom in self.get_atoms(): atom.id = counter counter += 1 return
python
def relabel_atoms(self, start=1): """Relabels all `Atoms` in numerical order. Parameters ---------- start : int, optional Offset the labelling by `start` residues. """ counter = start for atom in self.get_atoms(): atom.id = counter counter += 1 return
[ "def", "relabel_atoms", "(", "self", ",", "start", "=", "1", ")", ":", "counter", "=", "start", "for", "atom", "in", "self", ".", "get_atoms", "(", ")", ":", "atom", ".", "id", "=", "counter", "counter", "+=", "1", "return" ]
Relabels all `Atoms` in numerical order. Parameters ---------- start : int, optional Offset the labelling by `start` residues.
[ "Relabels", "all", "Atoms", "in", "numerical", "order", "." ]
train
https://github.com/woolfson-group/isambard/blob/ebc33b48a28ad217e18f93b910dfba46e6e71e07/isambard/ampal/base_ampal.py#L543-L555
0.005682
pyviz/holoviews
holoviews/element/graphs.py
Graph.nodes
def nodes(self): """ Computes the node positions the first time they are requested if no explicit node information was supplied. """ if self._nodes is None: self._nodes = layout_nodes(self, only_nodes=True) return self._nodes
python
def nodes(self): """ Computes the node positions the first time they are requested if no explicit node information was supplied. """ if self._nodes is None: self._nodes = layout_nodes(self, only_nodes=True) return self._nodes
[ "def", "nodes", "(", "self", ")", ":", "if", "self", ".", "_nodes", "is", "None", ":", "self", ".", "_nodes", "=", "layout_nodes", "(", "self", ",", "only_nodes", "=", "True", ")", "return", "self", ".", "_nodes" ]
Computes the node positions the first time they are requested if no explicit node information was supplied.
[ "Computes", "the", "node", "positions", "the", "first", "time", "they", "are", "requested", "if", "no", "explicit", "node", "information", "was", "supplied", "." ]
train
https://github.com/pyviz/holoviews/blob/ae0dd2f3de448b0ca5e9065aabd6ef8d84c7e655/holoviews/element/graphs.py#L362-L369
0.007018