repo
stringlengths
1
29
path
stringlengths
24
332
code
stringlengths
39
579k
openupgradelib-2.0.0
openupgradelib-2.0.0//openupgradelib/openupgrade.pyfile:/openupgradelib/openupgrade.py:function:reactivate_workflow_transitions/reactivate_workflow_transitions
def reactivate_workflow_transitions(cr, transition_conditions): """ Reactivate workflow transition previously deactivated by deactivate_workflow_transitions. :param transition_conditions: a dictionary returned by deactivate_workflow_transitions .. versionadded:: 7.0 .. deprecated:: 11.0 Workflows were removed from Odoo as of version 11.0 """ for transition_id, condition in transition_conditions.iteritems(): cr.execute('update wkf_transition set condition = %s where id = %s', (condition, transition_id))
aws-xray-sdk-2.5.0
aws-xray-sdk-2.5.0//aws_xray_sdk/ext/util.pyfile:/aws_xray_sdk/ext/util.py:function:calculate_segment_name/calculate_segment_name
def calculate_segment_name(host_name, recorder): """ Returns the segment name based on recorder configuration and input host name. This is a helper generally used in web framework middleware where a host name is available from incoming request's headers. """ if recorder.dynamic_naming: return recorder.dynamic_naming.get_name(host_name) else: return recorder.service
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/props.pyfile:/bpy/props.py:function:StringProperty/StringProperty
def StringProperty(name: str='', description: str='', default: str='', maxlen: int=0, options: set={'ANIMATABLE'}, subtype: str='NONE', update =None, get=None, set=None): """Returns a new string property definition. :param name: Name used in the user interface. :type name: str :param description: Text used for the tooltip and api documentation. :type description: str :param default: initializer string. :type default: str :param maxlen: maximum length of the string. :type maxlen: int :param options: Enumerator in [‘HIDDEN’, ‘SKIP_SAVE’, ‘ANIMATABLE’, ‘LIBRARY_EDITABLE’, ‘PROPORTIONAL’,’TEXTEDIT_UPDATE’]. :type options: set :param subtype: Enumerator in [‘FILE_PATH’, ‘DIR_PATH’, ‘FILE_NAME’, ‘BYTE_STRING’, ‘PASSWORD’, ‘NONE’]. :type subtype: str :param update: Function to be called when this value is modified, This function must take 2 values (self, context) and return None. Warning there are no safety checks to avoid infinite recursion. :param get: Function to be called when this value is ‘read’, This function must take 1 value (self) and return the value of the property. :param set: Function to be called when this value is ‘written’, This function must take 2 values (self, value) and return None. """ pass
circ2
circ2//genomic_interval.pyclass:Interval/__map
@staticmethod def __map(index, interval): """ update for CIRCexplorer particularly. """ tmp_fragment = [] if not interval: return index for dex in index: while True: try: fragment = interval.pop(0) except IndexError: if tmp_fragment: interval.extend(tmp_fragment) tmp_fragment = [] break else: return index if fragment[0] < dex[0]: continue elif fragment[0] >= dex[1]: interval.insert(0, fragment) interval[0:0] = tmp_fragment tmp_fragment = [] break elif fragment[1] > dex[1]: tmp_fragment.append(fragment) continue else: dex += fragment[2:] else: return index
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/wm.pyfile:/bpy/ops/wm.py:function:keyconfig_export/keyconfig_export
def keyconfig_export(filepath: str='keymap.py', filter_folder: bool=True, filter_text: bool=True, filter_python: bool=True): """Export key configuration to a python script :param filepath: filepath :type filepath: str :param filter_folder: Filter folders :type filter_folder: bool :param filter_text: Filter text :type filter_text: bool :param filter_python: Filter python :type filter_python: bool """ pass
dulwichTest-0.18.7
dulwichTest-0.18.7//dulwich/repo.pyfile:/dulwich/repo.py:function:serialize_graftpoints/serialize_graftpoints
def serialize_graftpoints(graftpoints): """Convert a dictionary of grafts into string The graft dictionary is: <commit sha1>: [<parent sha1>*] Each line is formatted as: <commit sha1> <parent sha1> [<parent sha1>]* https://git.wiki.kernel.org/index.php/GraftPoint """ graft_lines = [] for commit, parents in graftpoints.items(): if parents: graft_lines.append(commit + b' ' + b' '.join(parents)) else: graft_lines.append(commit) return b'\n'.join(graft_lines)
elexio_api
elexio_api//tools.pyfile:/tools.py:function:_parse_names/_parse_names
def _parse_names(last_name_dict): """Helper function to unpack the data when grouped by last name letter """ big_list = [] for last_letter, people_with_last in last_name_dict.items(): for person in people_with_last: big_list.append(person) return big_list
PREDICT
PREDICT//imagefeatures/contour_functions.pyfile:/imagefeatures/contour_functions.py:function:cmp/cmp
def cmp(a, b): """Built in in python 2, but defined for python 3""" return (a > b) - (a < b)
chemmltoolkit
chemmltoolkit//utils/list_utils.pyfile:/utils/list_utils.py:function:merge_dict/merge_dict
def merge_dict(d: dict, defaults: dict) ->dict: """Merges a dictionary with a set of default values. This will combine the entries in two dictionaries, with the first argument taking preference. Args: d: The dictionary of specified values. default: The dictionary of default values. Returns: The merged dictionary. """ return {**defaults, **d}
morepath
morepath//traject.pyfile:/traject.py:function:create_path/create_path
def create_path(segments): """Builds a path from a list of segments. :param stack: a list of segments :return: a path """ return '/' + '/'.join(segments)
pykerberos-1.2.1
pykerberos-1.2.1//pysrc/kerberos.pyfile:/pysrc/kerberos.py:function:authGSSClientUnwrap/authGSSClientUnwrap
def authGSSClientUnwrap(context, challenge): """ Perform the client side GSSAPI unwrap step @param challenge: a string containing the base64-encoded server data. @return: a result code (see above) """
graph-alchemy-0.1.0
graph-alchemy-0.1.0//graphalchemy/basemodels.pyclass:BaseNode/create
@classmethod def create(cls, obj=None, attrs=None, id=None, **kwargs): """ creates an instance of class, given an object and the attributes to take from it. (if attrs is None, tries to get cls.attrs next. If an attribute is not on object, stores None instead. kwargs are passed through, but are overwritten by attributes on object if in `attrs`. If obj is None, just creates with kwargs. Note that id is a primary key, so be careful with passing it!""" attrs = attrs or cls.attrs if object: for k in attrs: v = getattr(obj, k, None) if v is not None: kwargs[k] = v return cls(id=id, **kwargs)
astropy-4.0.1.post1
astropy-4.0.1.post1//astropy/io/fits/header.pyclass:Header/fromkeys
@classmethod def fromkeys(cls, iterable, value=None): """ Similar to :meth:`dict.fromkeys`--creates a new `Header` from an iterable of keywords and an optional default value. This method is not likely to be particularly useful for creating real world FITS headers, but it is useful for testing. Parameters ---------- iterable Any iterable that returns strings representing FITS keywords. value : optional A default value to assign to each keyword; must be a valid type for FITS keywords. Returns ------- header A new `Header` instance. """ d = cls() if not isinstance(value, tuple): value = value, for key in iterable: d.append((key,) + value) return d
devo-sdk-3.3.0
devo-sdk-3.3.0//devo/common/dates/dateoperations.pyfile:/devo/common/dates/dateoperations.py:function:week/week
def week(): """ Return millis for a week :return: 7 * 24 * 60 * 60 * 1000 """ return 604800000
lalapps
lalapps//power.pyfile:/power.py:function:psds_from_job_length/psds_from_job_length
def psds_from_job_length(timing_params, t): """ Return the number of PSDs that can fit into a job of length t seconds. In general, the return value is a non-integer. """ if t < 0: raise ValueError(t) t = t * timing_params.resample_rate - 2 * timing_params.filter_corruption if t < timing_params.psd_length: return 0 return (t - timing_params.psd_length) / timing_params.psd_shift + 1
fake-bpy-module-2.78-20200428
fake-bpy-module-2.78-20200428//bpy/ops/mball.pyfile:/bpy/ops/mball.py:function:duplicate_metaelems/duplicate_metaelems
def duplicate_metaelems(): """Duplicate selected metaelement(s) """ pass
alien_invasion_spbe
alien_invasion_spbe//game_functions.pyfile:/game_functions.py:function:get_num_rows/get_num_rows
def get_num_rows(ai_settings, ship_height, alien_height): """determine the number of rows of aliens to fit the screen""" available_space_y = (ai_settings.screen_height - 3 * alien_height - ship_height) num_rows = int(available_space_y / (2 * alien_height)) return num_rows
compare50-1.2.0
compare50-1.2.0//compare50/_data.pyclass:Submission/get
@classmethod def get(cls, id): """Retrieve submission corresponding to specified id""" return cls._store.objects[id]
nngt-1.3.2
nngt-1.3.2//nngt/core/graph_datastruct.pyclass:NeuralPop/_nest_reset
@classmethod def _nest_reset(cls): """ Reset the _to_nest bool and potential parent networks. """ for pop in cls.__pops.valuerefs(): if pop() is not None: pop()._to_nest = False for g in pop().values(): g._to_nest = False if pop().parent is not None: pop().parent._nest_gids = None
wmcore-1.1.19.2
wmcore-1.1.19.2//src/python/WMCore/ReqMgr/CherryPyThreads/HeartbeatMonitor.pyfile:/src/python/WMCore/ReqMgr/CherryPyThreads/HeartbeatMonitor.py:function:_getRequestNumEvents/_getRequestNumEvents
def _getRequestNumEvents(propertyValue): """ WMStats server can return this argument in three different data types. If there are multiple values, return only the first one. """ if not propertyValue: return 0 elif isinstance(propertyValue, list): return propertyValue[0] return propertyValue
parlai
parlai//core/loader.pyfile:/core/loader.py:function:_name_to_agent_class/_name_to_agent_class
def _name_to_agent_class(name: str): """ Convert agent name to class. This adds "Agent" to the end of the name and uppercases the first letter and the first letter appearing after each underscore (underscores are removed). :param name: name of agent, e.g. local_human :return: class of agent, e.g. LocalHumanAgent. """ words = name.split('_') class_name = '' for w in words: class_name += w[0].upper() + w[1:] class_name += 'Agent' return class_name
execnet-1.7.1
execnet-1.7.1//execnet/gateway_bootstrap.pyfile:/execnet/gateway_bootstrap.py:function:fix_pid_for_jython_popen/fix_pid_for_jython_popen
def fix_pid_for_jython_popen(gw): """ fix for jython 2.5.1 """ spec, io = gw.spec, gw._io if spec.popen and not spec.via: if io.popen.pid is None: io.popen.pid = gw.remote_exec( 'import os; channel.send(os.getpid())').receive()
header2whatever
header2whatever//_pcpp/preprocessor.pyfile:/_pcpp/preprocessor.py:function:t_CPP_COMMENT1/t_CPP_COMMENT1
def t_CPP_COMMENT1(t): """(/\\*(.|\\n)*?\\*/)""" ncr = t.value.count('\n') t.lexer.lineno += ncr return t
astropy
astropy//extern/configobj/validate.pyfile:/extern/configobj/validate.py:function:dottedQuadToNum/dottedQuadToNum
def dottedQuadToNum(ip): """ Convert decimal dotted quad string to long integer >>> int(dottedQuadToNum('1 ')) 1 >>> int(dottedQuadToNum(' 1.2')) 16777218 >>> int(dottedQuadToNum(' 1.2.3 ')) 16908291 >>> int(dottedQuadToNum('1.2.3.4')) 16909060 >>> dottedQuadToNum('255.255.255.255') 4294967295 >>> dottedQuadToNum('255.255.255.256') Traceback (most recent call last): ValueError: Not a good dotted-quad IP: 255.255.255.256 """ import socket, struct try: return struct.unpack('!L', socket.inet_aton(ip.strip()))[0] except socket.error: raise ValueError('Not a good dotted-quad IP: %s' % ip) return
pydent
pydent//base.pyclass:ModelRegistry/__getattr__
def __getattr__(cls, item): """ Special warning for attribute errors. Its likely that user may have wanted to use a model interface instead of the Base class. """ raise AttributeError( "'{0}' has no attribute '{1}'. Method may be a ModelInterface method. Did you mean '<yoursession>.{0}.{1}'?" .format(cls.__name__, item))
zope.app.tree-4.0.0
zope.app.tree-4.0.0//src/zope/app/tree/interfaces.pyclass:IChildObjects/getChildObjects
def getChildObjects(): """Return a sequence of child objects """
mlfinlab
mlfinlab//portfolio_optimization/tic.pyclass:TIC/_get_atoms
@staticmethod def _get_atoms(linkage, element): """ Getting the atoms included in an element from a linkage object Atoms are the basic assets in a portfolio and not clusters. :param linkage: (np.array) Global linkage object :param element: (int) Element id to get atoms from :return: (list) Set of atoms """ element_list = [element] while True: item_ = max(element_list) if item_ > linkage.shape[0]: element_list.remove(item_) element_list.append(linkage['i0'][item_ - linkage.shape[0] - 1]) element_list.append(linkage['i1'][item_ - linkage.shape[0] - 1]) else: break return element_list
zope.i18n-4.7.0
zope.i18n-4.7.0//src/zope/i18n/interfaces/locales.pyclass:ILocaleProvider/loadLocale
def loadLocale(language=None, country=None, variant=None): """Load the locale with the specs that are given by the arguments of the method. Note that the LocaleProvider must know where to get the locales from."""
webscaff
webscaff//commands/run/certbot.pyfile:/commands/run/certbot.py:function:get_certificate/get_certificate
def get_certificate(ctx): """Get certificates from Certbot for HTTPS using webroot plugin. :param ctx: """ project = ctx.project domain = project.domain email = project.email or '' webroot = ctx.paths.remote.project.state.certbot if email: email = '--email %s' % email command = ( 'certbot --agree-tos --no-eff-email %s certonly --webroot -d %s -w %s' % (email, domain, webroot)) ctx.sudo(command)
ccstudiodss-0.2.23
ccstudiodss-0.2.23//versioneer.pyfile:/versioneer.py:function:plus_or_dot/plus_or_dot
def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if '+' in pieces.get('closest-tag', ''): return '.' return '+'
ddlgenerator-0.1.9
ddlgenerator-0.1.9//ddlgenerator/ddlgenerator.pyfile:/ddlgenerator/ddlgenerator.py:function:emit_db_sequence_updates/emit_db_sequence_updates
def emit_db_sequence_updates(engine): """Set database sequence objects to match the source db Relevant only when generated from SQLAlchemy connection. Needed to avoid subsequent unique key violations after DB build.""" if engine and engine.name == 'postgresql': conn = engine.connect() qry = """SELECT 'SELECT last_value FROM ' || n.nspname || '.' || c.relname || ';' AS qry, n.nspname || '.' || c.relname AS qual_name FROM pg_namespace n JOIN pg_class c ON (n.oid = c.relnamespace) WHERE c.relkind = 'S'""" for qry, qual_name in list(conn.execute(qry)): lastval, = conn.execute(qry).first() nextval = int(lastval) + 1 yield 'ALTER SEQUENCE %s RESTART WITH %s;' % (qual_name, nextval)
gordo-0.55.1
gordo-0.55.1//gordo/workflow/workflow_generator/workflow_generator.pyfile:/gordo/workflow/workflow_generator/workflow_generator.py:function:_docker_friendly_version/_docker_friendly_version
def _docker_friendly_version(version): """ Some untagged versions may have a '+' in which case is not a valid docker tag """ return version.replace('+', '_')
kpa
kpa//iter_utils.pyfile:/iter_utils.py:function:iterlen/iterlen
def iterlen(iterator): """returns length of iterator""" return sum(1 for _ in iterator)
qiskit-aqua-0.7.0
qiskit-aqua-0.7.0//qiskit/aqua/algorithms/minimum_eigen_solvers/cplex/simple_cplex.pyclass:SimpleCPLEX/_convert_coefficients
@staticmethod def _convert_coefficients(coef): """ Convert 'x', and '-x' into ('x', 1) and ('x', -1), respectively. Args: coef (list[(int, float) or int]): coef Returns: tuple: ind, val Raises: RuntimeError: unsupported type """ ind = [] val = [] for e in coef: if isinstance(e, tuple): assert len(e) == 2 ind.append(e[0]) val.append(e[1]) elif isinstance(e, int): if e >= 0: ind.append(e) val.append(1) else: ind.append(-e) val.append(-1) else: raise RuntimeError('unsupported type:' + str(e)) return ind, val
onnx_coreml
onnx_coreml//_operators_nd.pyfile:/_operators_nd.py:function:_convert_matmul/_convert_matmul
def _convert_matmul(builder, node, graph, err): """ convert to CoreML BatchedMatMul Layer: https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L3473 """ weight_name = node.inputs[1] W = None weight_as_layer_parameter = False if weight_name in node.input_tensors: W = node.input_tensors[weight_name] if W is not None: if len(W.shape) != 2: builder.add_load_constant_nd(node.name + '_const_weight_input', weight_name, constant_value=W, shape=W.shape) else: weight_as_layer_parameter = True if weight_as_layer_parameter: builder.add_batched_mat_mul(name=node.name, input_names=[node. inputs[0]], output_name=node.outputs[0], weight_matrix_rows=W. shape[0], weight_matrix_columns=W.shape[1], W=W) else: builder.add_batched_mat_mul(name=node.name, input_names=[node. inputs[0], weight_name], output_name=node.outputs[0])
yaargh-0.28.0
yaargh-0.28.0//yaargh/assembling.pyfile:/yaargh/assembling.py:function:_guess/_guess
def _guess(kwargs): """ Adds types, actions, etc. to given argument specification. For example, ``default=3`` implies ``type=int``. :param arg: a :class:`argh.utils.Arg` instance """ guessed = {} TYPE_AWARE_ACTIONS = 'store', 'append' value = kwargs.get('default') if value is not None: if isinstance(value, bool): if kwargs.get('action') is None: guessed['action'] = 'store_false' if value else 'store_true' elif kwargs.get('type') is None: if kwargs.get('action', 'store') in TYPE_AWARE_ACTIONS: guessed['type'] = type(value) if kwargs.get('choices') and 'type' not in list(guessed) + list(kwargs): guessed['type'] = type(kwargs['choices'][0]) return dict(kwargs, **guessed)
pyoai-2.5.0
pyoai-2.5.0//src/oaipmh/interfaces.pyclass:IOAI/identify
def identify(): """Retrieve information about the repository. Returns an Identify object describing the repository. """
RelStorage-3.0.1
RelStorage-3.0.1//src/relstorage/adapters/interfaces.pyclass:IPackUndo/deleteObject
def deleteObject(cursor, oid_int, tid_int): """ Delete the revision of *oid_int* in transaction *tid_int*. This method marks an object as deleted via a new object revision. Subsequent attempts to load current data for the object will fail with a POSKeyError, but loads for non-current data will suceed if there are previous non-delete records. The object will be removed from the storage when all not-delete records are removed. The serial argument must match the most recently committed serial for the object. This is a seat belt. --- Documentation for ``IExternalGC`` In history-free databases there is no such thing as a delete record, so this should remove the single revision of *oid_int* (which *should* be checked to verify it is at *tid_int*), leading all access to *oid_int* in the future to throw ``POSKeyError``. In history preserving databases, this means to set the state for the object at the transaction to NULL, signifying that it's been deleted. A subsequent pack operation is required to actually remove these deleted items. """
cogent-1.9
cogent-1.9//cogent/parse/meme.pyfile:/cogent/parse/meme.py:function:dictFromList/dictFromList
def dictFromList(data_list): """Returns a dict given a list. - Dict created from a list where list contains alternating key, value pairs. - ex: [key1, value1, key2, value2] returns: {key1:value1, key2:value2} """ data_dict = {} for i in range(0, len(data_list) - 1, 2): if data_list[i] in data_dict: data_dict[data_list[i]] = data_dict[data_list[i] ] + ' ' + data_list[i + 1] else: data_dict[data_list[i]] = data_list[i + 1] return data_dict
open3SPN2-0.1.2
open3SPN2-0.1.2//open3SPN2/ff3SPN2.pyfile:/open3SPN2/ff3SPN2.py:function:test_DNA_from_seq/test_DNA_from_seq
def test_DNA_from_seq(): """ Test correct DNA initialization from sequence files""" pass
pyGeno-2.0.0
pyGeno-2.0.0//pyGeno/tools/UsefulFunctions.pyfile:/pyGeno/tools/UsefulFunctions.py:function:showDifferences/showDifferences
def showDifferences(seq1, seq2): """Returns a string highligthing differences between seq1 and seq2: * Matches by '-' * Differences : 'A|T' * Exceeded length : '#' """ ret = [] for i in range(max(len(seq1), len(seq2))): if i >= len(seq1): c1 = '#' else: c1 = seq1[i] if i >= len(seq2): c2 = '#' else: c2 = seq2[i] if c1 != c2: ret.append('%s|%s' % (c1, c2)) else: ret.append('-') return ''.join(ret)
bardeen
bardeen//code_style.pyfile:/code_style.py:function:_prefix_empty_lines/_prefix_empty_lines
def _prefix_empty_lines(lines, goal_empty_count): """ Internal function to prepend empty lines (no prefix considerations). """ """ find current empty line count """ current_empty_count = 0 for line in lines: if line.strip(): break current_empty_count += 1 """ insert newlines if too few """ lines = [''] * (goal_empty_count - current_empty_count) + lines """ remove newlines if too many """ if current_empty_count > goal_empty_count: lines = lines[current_empty_count - goal_empty_count:] """ done! """ return lines
edgy_lines
edgy_lines//Edge2.pyclass:Edge/is_point
@staticmethod def is_point(line): """ Returns true if the specified line is in fact a point. 'line' is a line segment of form [x0,y0,x1,y1]. Edge.is_point(np.arr[float(x4)]) -> bool """ x0, y0, x1, y1 = line return x0 == x1 and y0 == y1
mxnet
mxnet//contrib/onnx/mx2onnx/_op_translations.pyfile:/contrib/onnx/mx2onnx/_op_translations.py:function:transform_padding/transform_padding
def transform_padding(pad_width): """Helper function to convert padding format for pad operator. """ num_pad_values = len(pad_width) onnx_pad_width = [0] * num_pad_values start_index = 0 end_index = int(num_pad_values / 2) for idx in range(0, num_pad_values): if idx % 2 == 0: onnx_pad_width[start_index] = pad_width[idx] start_index += 1 else: onnx_pad_width[end_index] = pad_width[idx] end_index += 1 return onnx_pad_width
dnsdb-0.2.5
dnsdb-0.2.5//dnsdb/utils.pyfile:/dnsdb/utils.py:function:debug/debug
def debug(result): """ Function to debug output using the console / ipython. USAGE::: from dnsdb.utils import debug r = dnsdb.search(name="www.fsi.io") debug(r) :param result: object :return: text: stdout """ if result.status_code: print('Status Code: {}'.format(result.status_code)) else: print('Status Code: None') if result.error: print('Error: {}'.format(result.error)) else: print('Error: None') if result.quota: print('Quota: {}'.format(result.quota)) else: print('Quota: None') if result.cached: print('Cached: {}'.format(result.cached)) else: print('Cached: None') if result.records: print('Records exist: True') print('Number of records: {}'.format(len(result.records))) else: print('Records exist: False')
zope.schema-6.0.0
zope.schema-6.0.0//src/zope/schema/interfaces.pyclass:IField/set
def set(object, value): """Set the value of the field for the object Raises a type error if the field is a read-only field. """
beebird-0.0.1
beebird-0.0.1//beebird/task.pyclass:Task/importAllTasks
@staticmethod def importAllTasks(): """ import all available tasks in qbackup/tasks folder the ./tasks/__init__.py will dynamically import all sub-packages """ from . import tasks
fake-bpy-module-2.78-20200428
fake-bpy-module-2.78-20200428//freestyle/utils/ContextFunctions.pyfile:/freestyle/utils/ContextFunctions.py:function:get_border/get_border
def get_border(): """Returns the border. :return: A tuple of 4 numbers (xmin, ymin, xmax, ymax). """ pass
modoboa
modoboa//core/password_hashers/base.pyclass:MetaHasher/label
@property def label(cls): """Returns the label of the hasher""" return cls.name if not cls._weak else '{} (weak)'.format(cls.name)
featuretools-0.14.0
featuretools-0.14.0//featuretools/computational_backends/calculate_feature_matrix.pyfile:/featuretools/computational_backends/calculate_feature_matrix.py:function:_add_approx_entity_index_var/_add_approx_entity_index_var
def _add_approx_entity_index_var(es, target_entity_id, cutoffs, path): """ Add a variable to the cutoff df linking it to the entity at the end of the path. Return the updated cutoff df and the name of this variable. The name will consist of the variables which were joined through. """ last_child_var = 'instance_id' last_parent_var = es[target_entity_id].index for _, relationship in path: child_vars = [last_parent_var, relationship.child_variable.id] child_df = es[relationship.child_entity.id].df[child_vars] new_var_name = '%s.%s' % (last_child_var, relationship. child_variable.id) to_rename = {relationship.child_variable.id: new_var_name} child_df = child_df.rename(columns=to_rename) cutoffs = cutoffs.merge(child_df, left_on=last_child_var, right_on= last_parent_var) last_child_var = new_var_name last_parent_var = relationship.parent_variable.id return cutoffs, new_var_name
Qimport-0.1
Qimport-0.1//Downloader.pyfile:/Downloader.py:function:download_tanzil_translations/download_tanzil_translations
def download_tanzil_translations(): """ http://tanzil.net/trans/ """ pass
webmake-3.0.0
webmake-3.0.0//webmake/api.pyfile:/webmake/api.py:function:custom_function/custom_function
def custom_function(func, input_files, output_file): """ Calls a custom function which must create the output file. The custom function takes 3 parameters: ``input_files``, ``output_file`` and a boolean ``release``. """ from .modules import utils return {'dependencies_fn': utils.no_dependencies, 'compiler_fn': func, 'input': input_files, 'output': output_file, 'kwargs': {}}
opzet-19
opzet-19//opzet/object.pyclass:Object/selector
def selector(zelf, want=[]): """ see if this objects has the desired attributes. """ go = False for arg in want: if arg in zelf and zelf[arg]: go = True break return go
fake-bpy-module-2.78-20200428
fake-bpy-module-2.78-20200428//bpy/ops/time.pyfile:/bpy/ops/time.py:function:end_frame_set/end_frame_set
def end_frame_set(): """Set the end frame """ pass
pynwb-1.3.0
pynwb-1.3.0//versioneer.pyfile:/versioneer.py:function:render_git_describe_long/render_git_describe_long
def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces['closest-tag']: rendered = pieces['closest-tag'] rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if pieces['dirty']: rendered += '-dirty' return rendered
orgminer
orgminer//ResourceProfiler/raw_profiler.pyfile:/ResourceProfiler/raw_profiler.py:function:count_execution_frequency/count_execution_frequency
def count_execution_frequency(rl, scale=None): """Build resource profiles based on how frequently resources originated events of execution modes. Each column in the result profiles corresponds with an execution mode captured in the given resource log. Parameters ---------- rl : DataFrame A resource log. scale : {None, 'normalize', log'}, optional, default None Options for deciding how to scale the values of frequency counting. Could be one of the following: - ``None``, no scaling will be performed. - ``'normalize'``, scale the frequency values by the total count of executions by each resource. - ``'log'``, scale the frequency values by logarithm. Returns ------- DataFrame The constructed resource profiles. Raises ------ ValueError If the specified option for scaling is invalid. """ from collections import defaultdict mat = defaultdict(lambda : defaultdict(lambda : 0)) for res, trace in rl.groupby('resource'): for event in trace.itertuples(): exec_mode = event.case_type, event.activity_type, event.time_type mat[res][exec_mode] += 1 from pandas import DataFrame df = DataFrame.from_dict(mat, orient='index').fillna(0) if scale is None: return df elif scale == 'normalize': return df.div(df.sum(axis=1), axis=0) elif scale == 'log': from numpy import log return df.apply(lambda x: log(x + 1)) else: raise ValueError('Invalid value for parameter `{}`: {}'.format( 'scale', scale))
bangtext-0.2.12
bangtext-0.2.12//bang/plugins/feed.pyfile:/bang/plugins/feed.py:function:get_cdata/get_cdata
def get_cdata(text): """wrap the text in cdata escaping""" return '<![CDATA[{}]]>'.format(text)
iranlowo-0.0.8.3
iranlowo-0.0.8.3//src/torchtext/data/pipeline.pyclass:Pipeline/identity
@staticmethod def identity(x): """Return a copy of the input. This is here for serialization compatibility with pickle. """ return x
swampy-3.0.1
swampy-3.0.1//python3/Gui.pyfile:/python3/Gui.py:function:remove_options/remove_options
def remove_options(options, names): """Removes options from the dictionary. Modifies options. Args: options: dict names: list of keys """ for name in names: if name in options: del options[name]
bowtie
bowtie//_cache.pyfile:/_cache.py:function:validate/validate
def validate(key): """Check that the key is a string or bytestring. That's the only valid type of key. """ if not isinstance(key, (str, bytes)): raise KeyError('Key must be of type str or bytes, found type {}'. format(type(key)))
pyboto3-1.4.4
pyboto3-1.4.4//pyboto3/sfn.pyfile:/pyboto3/sfn.py:function:send_task_heartbeat/send_task_heartbeat
def send_task_heartbeat(taskToken=None): """ Used by workers to report to the service that the task represented by the specified taskToken is still making progress. This action resets the Heartbeat clock. The Heartbeat threshold is specified in the state machine's Amazon States Language definition. This action does not in itself create an event in the execution history. However, if the task times out, the execution history will contain an ActivityTimedOut event. See also: AWS API Documentation :example: response = client.send_task_heartbeat( taskToken='string' ) :type taskToken: string :param taskToken: [REQUIRED] The token that represents this task. Task tokens are generated by the service when the tasks are assigned to a worker (see GetActivityTask::taskToken). :rtype: dict :return: {} """ pass
statsmodels-0.11.1
statsmodels-0.11.1//statsmodels/graphics/mosaicplot.pyfile:/statsmodels/graphics/mosaicplot.py:function:_normalize_dataframe/_normalize_dataframe
def _normalize_dataframe(dataframe, index): """Take a pandas DataFrame and count the element present in the given columns, return a hierarchical index on those columns """ data = dataframe[index].dropna() grouped = data.groupby(index, sort=False) counted = grouped[index].count() averaged = counted.mean(axis=1) averaged = averaged.fillna(0.0) return averaged
gracedb-sdk-0.1.6
gracedb-sdk-0.1.6//versioneer.pyfile:/versioneer.py:function:scan_setup_py/scan_setup_py
def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open('setup.py', 'r') as f: for line in f.readlines(): if 'import versioneer' in line: found.add('import') if 'versioneer.get_cmdclass()' in line: found.add('cmdclass') if 'versioneer.get_version()' in line: found.add('get_version') if 'versioneer.VCS' in line: setters = True if 'versioneer.versionfile_source' in line: setters = True if len(found) != 3: print('') print('Your setup.py appears to be missing some important items') print('(but I might be wrong). Please make sure it has something') print('roughly like the following:') print('') print(' import versioneer') print(' setup( version=versioneer.get_version(),') print(' cmdclass=versioneer.get_cmdclass(), ...)') print('') errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print('now lives in setup.cfg, and should be removed from setup.py') print('') errors += 1 return errors
ScanLHA
ScanLHA//config.pyfile:/config.py:function:intersect/intersect
def intersect(list1, list2): """ Returns intersection of two lists """ return list(set(list1) & set(list2))
seqcluster-1.2.7
seqcluster-1.2.7//seqcluster/libs/peaks.pyfile:/seqcluster/libs/peaks.py:function:_get_locus/_get_locus
def _get_locus(cluster): """get the bigger locus""" return True
farmfs
farmfs//transduce.pyfile:/transduce.py:function:transduce/transduce
def transduce(transformer, reducer, seed, iterable): """ transformer is (a -> b) reducer is (b -> a -> b) seed is b iterable is [a] """ transformedReducer = transformer(reducer) accumulation = seed for value in iterable: accumulation = transformedReducer(accumulation, value) return accumulation
bbarchivist
bbarchivist//compat.pyfile:/compat.py:function:where_which/where_which
def where_which(path): """ Backwards compatibility wrapper for approximating which/where. :param path: Path to a file. :type path: str """ try: from shutil import which except ImportError: from shutilwhich import which finally: thepath = which(path) return thepath
jwst_gtvt
jwst_gtvt//math_extensionsx.pyfile:/math_extensionsx.py:function:output_as_percentage/output_as_percentage
def output_as_percentage(num, fractional_digits=1): """Output a percentage neatly. fractional_digits = number of digits to output as fractions of a percent. If None is supplied, there is no reduction in precision.""" if fractional_digits is not None: format_str = '%%.%.df' % fractional_digits else: format_str = '%f' return '%s%%' % (format_str % num)
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/clip.pyfile:/bpy/ops/clip.py:function:stabilize_2d_add/stabilize_2d_add
def stabilize_2d_add(): """Add selected tracks to 2D translation stabilization """ pass
wokkel
wokkel//iwokkel.pyclass:IPubSubService/getDefaultConfiguration
def getDefaultConfiguration(requestor, service, nodeType): """ Called when a default node configuration request has been received. @param requestor: The entity the request originated from. @type requestor: L{JID<twisted.words.protocols.jabber.jid.JID>} @param service: The entity the request was addressed to. @type service: L{JID<twisted.words.protocols.jabber.jid.JID>} @param nodeType: The type of node for which the configuration is retrieved, C{'leaf'} or C{'collection'}. @type nodeType: C{str} @return: A deferred that fires with a C{dict} representing the default node configuration. Keys are C{str}s that represent the field name. Values can be of types C{unicode}, C{int} or C{bool}. @rtype: L{Deferred<twisted.internet.defer.Deferred>} """
gpgmime-0.1
gpgmime-0.1//gpgmime/main.pyfile:/gpgmime/main.py:function:_/_
def _(s): """Placeholder for gettext; we may internationalize this library later.""" return s
miniKanren-1.0.1
miniKanren-1.0.1//versioneer.pyfile:/versioneer.py:function:scan_setup_py/scan_setup_py
def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open('setup.py', 'r') as f: for line in f.readlines(): if 'import versioneer' in line: found.add('import') if 'versioneer.get_cmdclass()' in line: found.add('cmdclass') if 'versioneer.get_version()' in line: found.add('get_version') if 'versioneer.VCS' in line: setters = True if 'versioneer.versionfile_source' in line: setters = True if len(found) != 3: print('') print('Your setup.py appears to be missing some important items') print('(but I might be wrong). Please make sure it has something') print('roughly like the following:') print('') print(' import versioneer') print(' setup( version=versioneer.get_version(),') print(' cmdclass=versioneer.get_cmdclass(), ...)') print('') errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print('now lives in setup.cfg, and should be removed from setup.py') print('') errors += 1 return errors
perceval-0.12.24
perceval-0.12.24//perceval/backends/core/dockerhub.pyclass:DockerHub/metadata_updated_on
@staticmethod def metadata_updated_on(item): """Extracts and coverts the update time from a Docker Hub item. The timestamp is extracted from 'fetched_on' field. This field is not part of the data provided by Docker Hub. It is added by this backend. :param item: item generated by the backend :returns: a UNIX timestamp """ return item['fetched_on']
idiosync
idiosync//sqlalchemy.pyclass:SqlEntry/find
@classmethod def find(cls, key): """Look up user database entry""" query = cls.db.query(cls.model.orm) attr = getattr(cls.model.orm, cls.model.key) row = query.filter(attr == key).one_or_none() return cls(row) if row is not None else None
symver-smap-0.2.5
symver-smap-0.2.5//src/smap/symver.pyfile:/src/smap/symver.py:function:bump_version/bump_version
def bump_version(version, abi_break): """ Bump a version depending if the ABI was broken or not If the ABI was broken, CUR is bumped; AGE and REV are set to zero. Otherwise, CUR is kept, AGE is bumped, and REV is set to zero. This also works with versions without the REV component (e.g. [1, 4, None]) :param version: A list in format [CUR, AGE, REV] :param abi_break: A boolean indication if the ABI was broken :returns: A list in format [CUR, AGE, REV] """ new_version = [] if abi_break: if version[0] is not None: new_version.append(version[0] + 1) new_version.extend([0] * len(version[1:])) else: if version[0] is not None: new_version.append(version[0]) if version[1] is not None: new_version.append(version[1] + 1) new_version.extend([0] * len(version[2:])) return new_version
borgmatic
borgmatic//config/normalize.pyfile:/config/normalize.py:function:normalize/normalize
def normalize(config): """ Given a configuration dict, apply particular hard-coded rules to normalize its contents to adhere to the configuration schema. """ exclude_if_present = config.get('location', {}).get('exclude_if_present') if isinstance(exclude_if_present, str): config['location']['exclude_if_present'] = [exclude_if_present]
growler-0.8.0
growler-0.8.0//growler/protocol.pyclass:GrowlerProtocol/get_factory
@classmethod def get_factory(cls, *args, **kw): """ A class function which returns a runnable which calls the factory function (i.e. the constructor) of the class with the arguments provided. This should makes it easy to bind GrowlerProtocol construction explicitly. All arguments are forwarded to the constructor. """ return lambda : cls.factory(*args, **kw)
metadocs
metadocs//include/example_project/classif/inference.pyfile:/include/example_project/classif/inference.py:function:send_batman/send_batman
def send_batman(person, model): """Returns whether or not the person Batman should be called on this person based on a statistical model Args: person (str): the target's name model (code.Model): model to perform inference with Returns: bool: blabla """ print(person, model) return person in model
moodleinspire
moodleinspire//processor/estimator.pyclass:Classifier/check_classes_balance
@staticmethod def check_classes_balance(counts): """Checks that the dataset contains enough samples of each class""" for item1 in counts: for item2 in counts: if item1 > item2 * 3: return ( 'Provided classes are very unbalanced, predictions may not be accurate.' ) return False
matchzoo-py-1.1.1
matchzoo-py-1.1.1//matchzoo/models/bimpm.pyfile:/matchzoo/models/bimpm.py:function:div_with_small_value/div_with_small_value
def div_with_small_value(n, d, eps=1e-08): """ Small values are replaced by 1e-8 to prevent it from exploding. :param n: tensor :param d: tensor :return: n/d: tensor """ d = d * (d > eps).float() + eps * (d <= eps).float() return n / d
seq-qc-2.0.4
seq-qc-2.0.4//seq_qc/pairs.pyfile:/seq_qc/pairs.py:function:verify_paired/verify_paired
def verify_paired(record1, record2): """Check if the two sequence records belong to the same fragment. In an matching pair the records are left and right pairs of each other, respectively. Returns True or False as appropriate. Handles both Casava formats: seq/1 and seq/2, and 'seq::... 1::...' and 'seq::... 2::...'. """ l_name, l_desc = record1.id, record1.description r_name, r_desc = record2.id, record2.description if l_name.endswith('/1') and r_name.endswith('/2'): subpart1 = l_name.split('/', 1)[0] subpart2 = r_name.split('/', 1)[0] if subpart1 and subpart1 == subpart2: return True elif l_name == r_name and l_desc.startswith('1:') and r_desc.startswith( '2:'): return True elif l_name == r_name: return True return False
clc-ansible-module-1.1.25
clc-ansible-module-1.1.25//clc_inv.pyfile:/clc_inv.py:function:_build_datacenter_groups/_build_datacenter_groups
def _build_datacenter_groups(hostvars): """ Return a dictionary of groups, one for each datacenter, containing all of the servers in that datacenter :param hostvars: The hostvars dictionary to parse :return: Dictionary of dynamically built Datacenter groups """ result = {} hostvars = hostvars.get('hostvars') for server in hostvars: datacenter = hostvars[server]['clc_data']['locationId'] if datacenter not in result: result[datacenter] = [] result[datacenter] += [server] return result
pipenv_devcheck
pipenv_devcheck//pipenv_setup_comp.pyfile:/pipenv_setup_comp.py:function:name_equality_check/name_equality_check
def name_equality_check(setup_deps, pipfile_deps): """ Checks that all names present in either dependency file are present in both dependency files Args: setup_deps (dict<str, list<tuple<str, str>>>): Dictionary from setup.py dependency name keys to a list of tuples as a value, with the tuples containing a comparision operator and a version specification. pipfile_deps (dict<str, list<tuple<str, str>>>): Dictionary from Pipfile dependency name keys to a list of tuples as a value, with the tuples containing a comparision operator and a version specification. Returns: bool: Whether the check passes - will always be true, otherwise the function will not reach this line. Raises: ValueError: If there are discrepancies between version names """ in_setup_not_pipfile = set(setup_deps.keys()).difference(set( pipfile_deps.keys())) in_pipfile_not_setup = set(pipfile_deps.keys()).difference(set( setup_deps.keys())) if len(in_setup_not_pipfile) or len(in_pipfile_not_setup): err_msg = 'Dependency name mismatch!\n' if len(in_setup_not_pipfile): err_msg += 'Dependencies in setup.py but not in Pipfile: ' + str( in_setup_not_pipfile) + '\n' if len(in_pipfile_not_setup): err_msg += 'Dependencies in Pipfile but not in setup.py: ' + str( in_pipfile_not_setup) + '\n' raise ValueError(err_msg) return True
loo.py-2017.2
loo.py-2017.2//loopy/transform/iname.pyfile:/loopy/transform/iname.py:function:separate_loop_head_tail_slab/separate_loop_head_tail_slab
def separate_loop_head_tail_slab(kernel, iname, head_it_count, tail_it_count): """Mark *iname* so that the separate code is generated for the lower *head_it_count* and the upper *tail_it_count* iterations of the loop on *iname*. """ iname_slab_increments = kernel.iname_slab_increments.copy() iname_slab_increments[iname] = head_it_count, tail_it_count return kernel.copy(iname_slab_increments=iname_slab_increments)
weasyprint
weasyprint//layout/blocks.pyfile:/layout/blocks.py:function:block_level_page_name/block_level_page_name
def block_level_page_name(sibling_before, sibling_after): """Return the next page name when siblings don't have the same names.""" before_page = sibling_before.page_values()[1] after_page = sibling_after.page_values()[0] if before_page != after_page: return after_page
breidablik
breidablik//analysis/read.pyfile:/analysis/read.py:function:_name_add/_name_add
def _name_add(name): """Add '+' to the front of positive values. Also keeps the '-' in front of negative values. """ name = str(float(name)) if name[0] != '-': name = '+' + name return name
dgl_cu102-0.4.3.post2.data
dgl_cu102-0.4.3.post2.data//purelib/dgl/data/citation_graph.pyfile:/purelib/dgl/data/citation_graph.py:function:_parse_index_file/_parse_index_file
def _parse_index_file(filename): """Parse index file.""" index = [] for line in open(filename): index.append(int(line.strip())) return index
pyboto3-1.4.4
pyboto3-1.4.4//pyboto3/s3.pyfile:/pyboto3/s3.py:function:get_bucket_logging/get_bucket_logging
def get_bucket_logging(Bucket=None): """ Returns the logging status of a bucket and the permissions users have to view and modify that status. To use GET, you must be the bucket owner. See also: AWS API Documentation :example: response = client.get_bucket_logging( Bucket='string' ) :type Bucket: string :param Bucket: [REQUIRED] :rtype: dict :return: { 'LoggingEnabled': { 'TargetBucket': 'string', 'TargetGrants': [ { 'Grantee': { 'DisplayName': 'string', 'EmailAddress': 'string', 'ID': 'string', 'Type': 'CanonicalUser'|'AmazonCustomerByEmail'|'Group', 'URI': 'string' }, 'Permission': 'FULL_CONTROL'|'READ'|'WRITE' }, ], 'TargetPrefix': 'string' } } """ pass
nemosis
nemosis//downloader.pyfile:/downloader.py:function:format_aemo_url/format_aemo_url
def format_aemo_url(url, year, month, filename): """This fills in the missing information in the AEMO url so data for the right month, year and file name are downloaded""" year = str(year) return url.format(year, year, month, filename[:-4])
chromewhip-0.3.4
chromewhip-0.3.4//chromewhip/protocol/profiler.pyclass:Profiler/stopPreciseCoverage
@classmethod def stopPreciseCoverage(cls): """Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. """ return cls.build_send_payload('stopPreciseCoverage', {}), None
claripy-8.20.1.7
claripy-8.20.1.7//claripy/simplifications.pyclass:SimplificationManager/rotate_shift_mask_simplifier
@staticmethod def rotate_shift_mask_simplifier(a, b): """ Handles the following case: ((A << a) | (A >> (_N - a))) & mask, where A being a BVS, a being a integer that is less than _N, _N is either 32 or 64, and mask can be evaluated to 0xffffffff (64-bit) or 0xffff (32-bit) after reversing the rotate-shift operation. It will be simplified to: (A & (mask >>> a)) <<< a """ if b.op != 'BVV': return None if a.op != '__or__' or len(a.args) != 2: return None a_0, a_1 = a.args if a_0.op != '__lshift__': return None if a_1.op != 'LShR': return None a_00, a_01 = a_0.args a_10, a_11 = a_1.args if not a_00 is a_10: return None if a_01.op != 'BVV' or a_11.op != 'BVV': return None lshift_ = a_01.args[0] rshift_ = a_11.args[0] bitwidth = lshift_ + rshift_ if bitwidth not in (32, 64): return None if bitwidth == 32: m = b.args[0] << rshift_ & 4294967295 | b.args[0] >> lshift_ if m != 65535: return None else: m = b.args[0] << rshift_ & 18446744073709551615 | b.args[0] >> lshift_ if m != 4294967295: return None masked_a = a_00 & m expr = masked_a << lshift_ | masked_a >> rshift_ return expr
djangolearn-0.1
djangolearn-0.1//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old
def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' return rendered
vumi_message_store
vumi_message_store//interfaces.pyclass:IMessageStoreBatchManager/batch_start
def batch_start(tags=(), **metadata): """ Create a new message batch. :param tags: Sequence of tags to add to the new batch. :param **metadata: Keyword parameters containing batch metadata. :returns: The batch identifier for the new batch. If async, a Deferred is returned instead. """
dtale
dtale//utils.pyfile:/utils.py:function:get_str_arg/get_str_arg
def get_str_arg(r, name, default=None): """ Retrieve argument from :attr:`flask:flask.request` and convert to string :param r: :attr:`flask:flask.request` :param name: argument name :type: str :param default: default value if parameter is non-existent, defaults to None :return: string argument value """ val = r.args.get(name) if val is None or val == '': return default else: try: return str(val) except BaseException: return default
net
net//api.pyfile:/api.py:function:set_config/set_config
def set_config(THREAD_LIMIT=None, PORT=None, PORT_RANGE=None, GROUP=None, IS_HUB=None): """ Set a configuration value. These are configuration values that can be set at runtime to modify your net configuration. :return: """
TwitchChatInterface
TwitchChatInterface//lib/twitchMessageHandler/parse.pyclass:parse/emotes
@staticmethod def emotes(tags) ->list: """ parse emotes to list""" try: if 'emotes' in tags and type(tags['emotes']) == str: emoticons = tags['emotes'].split('/') emotes = {} for emoticon in emoticons: part = emoticon.split(':') if part[1] == None: return emotes[part[0]] = part[1].split(',') tags['emotes-raw'] = tags['emotes'] tags['emotes'] = emotes if 'emotes' in tags and type(tags['emotes']) == bool: tags['emotes-raw'] = None return tags except: pass
PyEIS
PyEIS//PyEIS_Lin_KK.pyfile:/PyEIS_Lin_KK.py:function:KK_RC67/KK_RC67
def KK_RC67(w, Rs, R_values, t_values): """ Kramers-Kronig Function: -RC- Kristian B. Knudsen (kknu@berkeley.edu / kristianbknudsen@gmail.com) """ return Rs + R_values[0] / (1 + w * 1.0j * t_values[0]) + R_values[1] / ( 1 + w * 1.0j * t_values[1]) + R_values[2] / (1 + w * 1.0j * t_values[2] ) + R_values[3] / (1 + w * 1.0j * t_values[3]) + R_values[4] / (1 + w * 1.0j * t_values[4]) + R_values[5] / (1 + w * 1.0j * t_values[5] ) + R_values[6] / (1 + w * 1.0j * t_values[6]) + R_values[7] / (1 + w * 1.0j * t_values[7]) + R_values[8] / (1 + w * 1.0j * t_values[8] ) + R_values[9] / (1 + w * 1.0j * t_values[9]) + R_values[10] / (1 + w * 1.0j * t_values[10]) + R_values[11] / (1 + w * 1.0j * t_values[11] ) + R_values[12] / (1 + w * 1.0j * t_values[12]) + R_values[13] / ( 1 + w * 1.0j * t_values[13]) + R_values[14] / (1 + w * 1.0j * t_values[14]) + R_values[15] / (1 + w * 1.0j * t_values[15] ) + R_values[16] / (1 + w * 1.0j * t_values[16]) + R_values[17] / ( 1 + w * 1.0j * t_values[17]) + R_values[18] / (1 + w * 1.0j * t_values[18]) + R_values[19] / (1 + w * 1.0j * t_values[19] ) + R_values[20] / (1 + w * 1.0j * t_values[20]) + R_values[21] / ( 1 + w * 1.0j * t_values[21]) + R_values[22] / (1 + w * 1.0j * t_values[22]) + R_values[23] / (1 + w * 1.0j * t_values[23] ) + R_values[24] / (1 + w * 1.0j * t_values[24]) + R_values[25] / ( 1 + w * 1.0j * t_values[25]) + R_values[26] / (1 + w * 1.0j * t_values[26]) + R_values[27] / (1 + w * 1.0j * t_values[27] ) + R_values[28] / (1 + w * 1.0j * t_values[28]) + R_values[29] / ( 1 + w * 1.0j * t_values[29]) + R_values[30] / (1 + w * 1.0j * t_values[30]) + R_values[31] / (1 + w * 1.0j * t_values[31] ) + R_values[32] / (1 + w * 1.0j * t_values[32]) + R_values[33] / ( 1 + w * 1.0j * t_values[33]) + R_values[34] / (1 + w * 1.0j * t_values[34]) + R_values[35] / (1 + w * 1.0j * t_values[35] ) + R_values[36] / (1 + w * 1.0j * t_values[36]) + R_values[37] / ( 1 + w * 1.0j * t_values[37]) + R_values[38] / (1 + w * 1.0j * t_values[38]) + R_values[39] / (1 + w * 1.0j * t_values[39] ) + R_values[40] / (1 + w * 1.0j * t_values[40]) + R_values[41] / ( 1 + w * 1.0j * t_values[41]) + R_values[42] / (1 + w * 1.0j * t_values[42]) + R_values[43] / (1 + w * 1.0j * t_values[43] ) + R_values[44] / (1 + w * 1.0j * t_values[44]) + R_values[45] / ( 1 + w * 1.0j * t_values[45]) + R_values[46] / (1 + w * 1.0j * t_values[46]) + R_values[47] / (1 + w * 1.0j * t_values[47] ) + R_values[48] / (1 + w * 1.0j * t_values[48]) + R_values[49] / ( 1 + w * 1.0j * t_values[49]) + R_values[50] / (1 + w * 1.0j * t_values[50]) + R_values[51] / (1 + w * 1.0j * t_values[51] ) + R_values[52] / (1 + w * 1.0j * t_values[52]) + R_values[53] / ( 1 + w * 1.0j * t_values[53]) + R_values[54] / (1 + w * 1.0j * t_values[54]) + R_values[55] / (1 + w * 1.0j * t_values[55] ) + R_values[56] / (1 + w * 1.0j * t_values[56]) + R_values[57] / ( 1 + w * 1.0j * t_values[57]) + R_values[58] / (1 + w * 1.0j * t_values[58]) + R_values[59] / (1 + w * 1.0j * t_values[59] ) + R_values[60] / (1 + w * 1.0j * t_values[60]) + R_values[61] / ( 1 + w * 1.0j * t_values[61]) + R_values[62] / (1 + w * 1.0j * t_values[62]) + R_values[63] / (1 + w * 1.0j * t_values[63] ) + R_values[64] / (1 + w * 1.0j * t_values[64]) + R_values[65] / ( 1 + w * 1.0j * t_values[65]) + R_values[66] / (1 + w * 1.0j * t_values[66])
mayloop-1.0.0
mayloop-1.0.0//mayloop/imported/twisted/internet_interfaces.pyclass:IConsumer/unregisterProducer
def unregisterProducer(): """ Stop consuming data from a producer, without disconnecting. """
fake-bpy-module-2.78-20200428
fake-bpy-module-2.78-20200428//bpy/ops/font.pyfile:/bpy/ops/font.py:function:textbox_add/textbox_add
def textbox_add(): """Add a new text box """ pass
Mocha-0.12.1
Mocha-0.12.1//mocha/contrib/auth/models.pyclass:AuthUser/get_by_username
@classmethod def get_by_username(cls, username): """ Return a User by email address """ return cls.query().filter(cls.username == username).first()