repo
stringlengths 1
29
| path
stringlengths 24
332
| code
stringlengths 39
579k
|
---|---|---|
lexor-0.1.5rc0 | lexor-0.1.5rc0//lexor/core/converter.pyclass:Converter/remove_node | @staticmethod
def remove_node(node):
"""Removes the node from the current document it is in.
Returns the previous sibling is possible, otherwise it
returns the parent node.
"""
parent = node.parent
index = node.index
del parent[index]
if -1 < index < len(parent):
return parent[index]
return parent
|
YODA-Tools-0.2.0a0 | YODA-Tools-0.2.0a0//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe | def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance']:
rendered += '-%d-g%s' % (pieces['distance'], pieces['short'])
else:
rendered = pieces['short']
if pieces['dirty']:
rendered += '-dirty'
return rendered
|
bpy | bpy//ops/paint.pyfile:/ops/paint.py:function:vertex_color_hsv/vertex_color_hsv | def vertex_color_hsv(h: float=0.5, s: float=1.0, v: float=1.0):
"""Adjust vertex color HSV values
:param h: Hue
:type h: float
:param s: Saturation
:type s: float
:param v: Value
:type v: float
"""
pass
|
dropbox | dropbox//team_log.pyclass:EventDetails/file_request_create_details | @classmethod
def file_request_create_details(cls, val):
"""
Create an instance of this class set to the
``file_request_create_details`` tag with value ``val``.
:param FileRequestCreateDetails val:
:rtype: EventDetails
"""
return cls('file_request_create_details', val)
|
feedsearch-crawler-0.2.4 | feedsearch-crawler-0.2.4//feedsearch_crawler/crawler/lib.pyfile:/feedsearch_crawler/crawler/lib.py:function:to_bytes/to_bytes | def to_bytes(text, encoding: str='utf-8', errors: str='strict'):
"""Return the binary representation of `text`. If `text`
is already a bytes object, return it as-is."""
if not text:
return b''
if isinstance(text, bytes):
return text
return text.encode(encoding, errors)
|
spfpm-1.4 | spfpm-1.4//FixedPoint.pyclass:FXnum/_rawbuild | @classmethod
def _rawbuild(cls, fam, sv):
"""Shortcut for creating new FXnum instance, for internal use only."""
num = object.__new__(cls)
fam.validate(sv)
num.family = fam
num.scaledval = sv
return num
|
dijitso-2017.1.0 | dijitso-2017.1.0//dijitso/params.pyfile:/dijitso/params.py:function:merge_params/merge_params | def merge_params(default, params):
"""Merge two-level param dicts."""
p = {}
for category in default:
d = default[category].copy()
p[category] = d
v = params.get(category)
if v is not None:
p[category].update(v)
return p
|
ipyaml | ipyaml//convert.pyfile:/convert.py:function:format_source/format_source | def format_source(text):
"""Given source lines of code as a string, strip leading lines and
return a block of text. This is done so the output is valid YAML.
"""
src = text.splitlines()
code = []
first_non_empty_line = 0
for line in src:
if len(line.strip()) == 0:
code.append('')
first_non_empty_line += 1
else:
break
code += src[first_non_empty_line:]
return '\n'.join(code)
|
astroquery-0.4 | astroquery-0.4//astroquery/irsa_dust/utils.pyfile:/astroquery/irsa_dust/utils.py:function:find_result_node/find_result_node | def find_result_node(desc, xml_tree):
"""
Returns the <result> node with a <desc> child matching the given text.
Eg: if desc = "text to match", this function will find the following
result node:
<result>
<desc>text to match</desc>
</result>
Parameters
-----
xmlTree : `xml.etree.ElementTree`
the xml tree to search for the <result> node
desc : string
the text contained in the desc node
Returns
-----
node : the <result> node containing the child with the given desc
"""
result_nodes = xml_tree.findall('result')
for result_node in result_nodes:
result_desc = result_node.find('desc').text.strip()
if result_desc == desc:
return result_node
return None
|
cif2ice-0.2.2 | cif2ice-0.2.2//cif2ice/read_cif.pyfile:/cif2ice/read_cif.py:function:float_with_error/float_with_error | def float_with_error(x):
"""
some value in cif accompanies error like "1.234(5)
"""
if '?' in x:
return 0
pos = x.find('(')
if pos >= 0:
x = x[:pos]
return float(x)
|
breezy-3.0.2 | breezy-3.0.2//profile_imports.pyfile:/profile_imports.py:function:_repr_regexp/_repr_regexp | def _repr_regexp(pattern, max_len=30):
"""Present regexp pattern for logging, truncating if over max_len."""
if len(pattern) > max_len:
return repr(pattern[:max_len - 3]) + '...'
return repr(pattern)
|
ozelot-0.2.4 | ozelot-0.2.4//ozelot/etl/tasks.pyclass:TaskBase/get_name | @classmethod
def get_name(cls):
"""Return the task name (= class name)
"""
return cls.__name__
|
metano-1.3.0 | metano-1.3.0//src/paramparser.pyclass:LinExprParser/_evalGroups | @staticmethod
def _evalGroups(listOfTuples):
""" decode a list of linear terms given as a (sign, number, name) tuple,
where sign is '-' or '+', number is a string or None, and name is a
string; 'number' may itself carry a sign
Returns: list of (coefficient, name) tuples with float coefficients
"""
tuples = []
for sign, number, name in listOfTuples:
if not number:
number = 1.0
if sign == '-':
tuples.append((-float(number), name))
else:
tuples.append((float(number), name))
return tuples
|
logreader | logreader//console.pyfile:/console.py:function:allow_empty/allow_empty | def allow_empty(i):
"""Validates on all entered values"""
return i
|
lightpath-0.5.2 | lightpath-0.5.2//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe | def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance']:
rendered += '-%d-g%s' % (pieces['distance'], pieces['short'])
else:
rendered = pieces['short']
if pieces['dirty']:
rendered += '-dirty'
return rendered
|
dropbox-10.1.2 | dropbox-10.1.2//dropbox/team_log.pyclass:EventDetails/paper_published_link_view_details | @classmethod
def paper_published_link_view_details(cls, val):
"""
Create an instance of this class set to the
``paper_published_link_view_details`` tag with value ``val``.
:param PaperPublishedLinkViewDetails val:
:rtype: EventDetails
"""
return cls('paper_published_link_view_details', val)
|
mailman | mailman//interfaces/mailinglist.pyclass:IMailingList/get_roster | def get_roster(role):
"""Return the appropriate roster for the given role.
:param role: The requested roster's role.
:type role: MemberRole
:return: The requested roster.
:rtype: Roster
"""
|
k_combinat_for_sage-1.0.0 | k_combinat_for_sage-1.0.0//k_combinat_for_sage/partition.pyfile:/k_combinat_for_sage/partition.py:function:is_k_bounded/is_k_bounded | def is_k_bounded(ptn, k):
""" Returns ``True`` if and only if the partition ``ptn`` is bounded by ``k``.
EXAMPLES::
sage: is_k_bounded(Partition([4, 3, 1]), 4)
True
sage: is_k_bounded(Partition([4, 3, 1]), 7)
True
sage: is_k_bounded(Partition([4, 3, 1]), 3)
False
"""
if ptn.is_empty():
least_upper_bound = 0
else:
least_upper_bound = max(ptn)
return least_upper_bound <= k
|
ptpulse-0.1.1 | ptpulse-0.1.1//ptpulse/configuration.pyfile:/ptpulse/configuration.py:function:_get_bit_string/_get_bit_string | def _get_bit_string(value):
"""INTERNAL. Get string representation of an int in binary"""
return '{0:b}'.format(value).zfill(8)
|
datarobot | datarobot//utils/pagination.pyfile:/utils/pagination.py:function:unpaginate/unpaginate | def unpaginate(initial_url, initial_params, client):
""" Iterate over a paginated endpoint and get all results
Assumes the endpoint follows the "standard" pagination interface (data stored under "data",
"next" used to link next page, "offset" and "limit" accepted as query parameters).
Yields
------
data : dict
a series of objects from the endpoint's data, as raw server data
"""
resp_data = client.get(initial_url, params=initial_params).json()
for item in resp_data['data']:
yield item
while resp_data['next'] is not None:
next_url = resp_data['next']
resp_data = client.get(next_url).json()
for item in resp_data['data']:
yield item
|
heist | heist//seqtools.pyfile:/seqtools.py:function:cluster/cluster | def cluster(d):
"""
Utility function
"""
clusters = {}
for key, val in d.items():
clusters.setdefault(val, []).append(key)
return clusters
|
phply-1.2.5 | phply-1.2.5//phply/phpparse.pyfile:/phply/phpparse.py:function:p_trait_statement_list/p_trait_statement_list | def p_trait_statement_list(p):
"""trait_statement_list : trait_statement_list trait_statement
| empty"""
if len(p) == 3:
p[0] = p[1] + [p[2]]
else:
p[0] = []
|
pywps-4.2.3 | pywps-4.2.3//pywps/inout/basic.pyfile:/pywps/inout/basic.py:function:extend_instance/extend_instance | def extend_instance(obj, cls):
"""Apply mixins to a class instance after creation."""
base_cls = obj.__class__
base_cls_name = obj.__class__.__name__
obj.__class__ = type(base_cls_name, (cls, base_cls), {})
|
oci | oci//resource_manager/models/job_operation_details.pyclass:JobOperationDetails/get_subtype | @staticmethod
def get_subtype(object_dictionary):
"""
Given the hash representation of a subtype of this class,
use the info in the hash to return the class of the subtype.
"""
type = object_dictionary['operation']
if type == 'IMPORT_TF_STATE':
return 'ImportTfStateJobOperationDetails'
if type == 'PLAN':
return 'PlanJobOperationDetails'
if type == 'APPLY':
return 'ApplyJobOperationDetails'
if type == 'DESTROY':
return 'DestroyJobOperationDetails'
else:
return 'JobOperationDetails'
|
pyflyby-1.6.1 | pyflyby-1.6.1//bin/pyflyby/_py.pyfile:/bin/pyflyby/_py.py:function:_interpret_arg_mode/_interpret_arg_mode | def _interpret_arg_mode(arg, default='auto'):
"""
>>> _interpret_arg_mode("Str")
'string'
"""
if arg is None:
arg = default
if arg == 'auto' or arg == 'eval' or arg == 'string':
return arg
rarg = str(arg).strip().lower()
if rarg in ['eval', 'evaluate', 'exprs', 'expr', 'expressions',
'expression', 'e']:
return 'eval'
elif rarg in ['strings', 'string', 'str', 'strs', 'literal', 'literals',
's']:
return 'string'
elif rarg in ['auto', 'automatic', 'a']:
return 'auto'
elif rarg == 'error':
return 'error'
else:
raise ValueError(
'Invalid arg_mode=%r; expected one of eval/string/auto' % (arg,))
|
adjutant | adjutant//actions/utils.pyfile:/actions/utils.py:function:validate_steps/validate_steps | def validate_steps(validation_steps):
"""Helper function for validation in actions
Takes a list of validation functions or validation function results.
If function, will call it first, otherwise checks if valid. Will break
and return False on first validation failure, or return True if all valid.
It is best to pass in the functions and let this call them so that it
doesn't keep validating after the first invalid result.
"""
for step in validation_steps:
if callable(step):
if not step():
return False
if not step:
return False
return True
|
pydoro | pydoro//pydoro_core/sound.pyfile:/pydoro_core/sound.py:function:_play_sound_osx/_play_sound_osx | def _play_sound_osx(sound, block=True):
"""
Utilizes AppKit.NSSound. Tested and known to work with MP3 and WAVE on
OS X 10.11 with Python 2.7. Probably works with anything QuickTime supports.
Probably works on OS X 10.5 and newer. Probably works with all versions of
Python.
Inspired by (but not copied from) Aaron's Stack Overflow answer here:
http://stackoverflow.com/a/34568298/901641
I never would have tried using AppKit.NSSound without seeing his code.
"""
from AppKit import NSSound
from Foundation import NSURL
from time import sleep
if '://' not in sound:
if not sound.startswith('/'):
from os import getcwd
sound = getcwd() + '/' + sound
sound = 'file://' + sound
url = NSURL.URLWithString_(sound)
nssound = NSSound.alloc().initWithContentsOfURL_byReference_(url, True)
if not nssound:
raise IOError('Unable to load sound named: ' + sound)
nssound.play()
if block:
sleep(nssound.duration())
|
zopkio-0.2.5 | zopkio-0.2.5//zopkio/runtime.pyfile:/zopkio/runtime.py:function:set_active_config/set_active_config | def set_active_config(config):
"""
Private function to set the config mapping should only be called from the main file
:param configs:
:return:
"""
global _active_config
_active_config = config
|
aioauth-client-0.21.0 | aioauth-client-0.21.0//aioauth_client.pyclass:LichessClient/user_parse | @staticmethod
def user_parse(data):
"""Parse information from provider."""
yield 'id', data.get('id')
yield 'username', data.get('username')
yield 'gender', data.get('title')
profile = data.get('profile')
if profile is not None:
yield 'first_name', profile.get('firstName')
yield 'last_name', profile.get('lastName')
yield 'country', profile.get('country')
|
dl-helper-1.0.0 | dl-helper-1.0.0//dl_helper/krs/krs_utils.pyfile:/dl_helper/krs/krs_utils.py:function:model_layers_list/model_layers_list | def model_layers_list(model):
"""
List all layers in a Keras model
"""
print('Model layers:')
for i, layer in enumerate(model.layers):
print(' {:3}: {}'.format(i + 1, layer.name))
for n, w in zip(layer.trainable_weights, layer.get_weights()):
print(' {}: w = {}'.format(n, w.shape))
|
happi-1.3.0 | happi-1.3.0//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old | def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance'] or pieces['dirty']:
rendered += '.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
else:
rendered = '0.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
return rendered
|
task_forge | task_forge//lists/mongo.pyclass:List/__eval_str_literal | @staticmethod
def __eval_str_literal(expression):
"""Evaluate a string literal query."""
return {'$or': [{'title': {'$regex': expression.value}}, {'body': {
'$regex': expression.value}}, {'notes': {'$regex': expression.value}}]}
|
topology_docker | topology_docker//networks.pyfile:/networks.py:function:create_platform_network/create_platform_network | def create_platform_network(enode, category, config):
"""
Create a Topology managed network with given configuration (netns, prefix).
:param enode: The platform (a.k.a "engine") node to configure.
:param str category: Name of the panel category.
:param dict config: Configuration for the network. A dictionary like:
::
{
'netns': 'mynetns',
'managed_by': 'platform',
'prefix': ''
}
This dictionary is taken from the ``node._get_network_config()`` result.
"""
netns = config.get('netns', None)
if netns is None:
return
enode._docker_exec('ip netns add {}'.format(netns))
enode._docker_exec('ip netns exec {} ip link set dev lo up'.format(netns))
|
Products.CMFCore-2.4.6 | Products.CMFCore-2.4.6//Products/CMFCore/interfaces/_tools.pyclass:ISyndicationTool/getUpdatePeriod | def getUpdatePeriod(obj=None):
""" Return the update period for the syndicated feed
o falls back to the site-wide value if no object is passed in
o raises 'Syndication is not Allowed' if the site-wide policy
does not allow syndication
"""
|
scalegrid_cli-1.0.7 | scalegrid_cli-1.0.7//cli/sg.pyfile:/cli/sg.py:function:mongo_h/mongo_h | def mongo_h():
'\n \r\n MongoDB Help Menu\n\n Usage:\n sg-cli mongo <command> [<args>...]\n\n Options:\n -v, --verbose Increase verbosity\n -h, --help Show this menu\n -V --version Show version\n\n Commands:\n add-firewall-rules\n build-index\n compact\n create-alert-rule\n create-cloud-profile\n create-cluster\n create-follower-cluster\n delete-backup\n delete-cluster\n get-active-alerts\n get-cluster-credentials\n list-alert-rules\n list-backups\n list-cloud-profiles\n list-clusters\n patch-os\n pause-cluster\n peek-at-backup\n resolve-alerts\n restore-backup\n resume-cluster\n scale-up\n set-backup-schedule\n start-backup\n\n Use sg-cli <command> -h to open the help menu for the command.\n '
|
axiom-0.8.0 | axiom-0.8.0//axiom/iaxiom.pyclass:IAtomicFile/abort | def abort():
"""Give up on this file. Discard its contents.
"""
|
tryalgo-1.3.0 | tryalgo-1.3.0//tryalgo/dfs.pyfile:/tryalgo/dfs.py:function:dfs_grid/dfs_grid | def dfs_grid(grid, i, j, mark='X', free='.'):
"""DFS on a grid, mark connected component, iterative version
:param grid: matrix, 4-neighborhood
:param i,j: cell in this matrix, start of DFS exploration
:param free: symbol for walkable cells
:param mark: symbol to overwrite visited vertices
:complexity: linear
"""
height = len(grid)
width = len(grid[0])
to_visit = [(i, j)]
grid[i][j] = mark
while to_visit:
i1, j1 = to_visit.pop()
for i2, j2 in [(i1 + 1, j1), (i1, j1 + 1), (i1 - 1, j1), (i1, j1 - 1)]:
if 0 <= i2 < height and 0 <= j2 < width and grid[i2][j2] == free:
grid[i2][j2] = mark
to_visit.append((i2, j2))
|
Mikado-1.2.4 | Mikado-1.2.4//build/lib.macosx-10.7-x86_64-3.6/Mikado/loci/excluded.pyclass:Excluded/is_intersecting | @classmethod
def is_intersecting(cls):
"""Present to fulfill the contract with Abstractlocus, but
it only raises a NotImplementedError"""
raise NotImplementedError()
|
rig-2.4.1 | rig-2.4.1//rig/place_and_route/place/utils.pyfile:/rig/place_and_route/place/utils.py:function:resources_after_reservation/resources_after_reservation | def resources_after_reservation(res, constraint):
"""Return the resources available after a specified
ReserveResourceConstraint has been applied.
Note: the caller is responsible for testing that the constraint is
applicable to the core whose resources are being constrained.
Note: this function does not pay attention to the specific position of the
reserved regieon, only its magnitude.
"""
res = res.copy()
res[constraint.resource
] -= constraint.reservation.stop - constraint.reservation.start
return res
|
Products.CMFEditions-3.3.4 | Products.CMFEditions-3.3.4//Products/CMFEditions/interfaces/IModifier.pyclass:IConditionalModifier/getModifier | def getModifier():
"""Returns the modifier.
"""
|
wayround_org_http-0.7 | wayround_org_http-0.7//wayround_org/http/message.pyfile:/wayround_org/http/message.py:function:client_format_status_line/client_format_status_line | def client_format_status_line(method, requesttarget, httpversion='HTTP/1.1',
encoding='utf-8'):
"""
No quoting done by this function
"""
if type(method) == bytes:
method = str(method, encoding)
if type(requesttarget) == bytes:
requesttarget = str(requesttarget, encoding)
if type(httpversion) == bytes:
httpversion = str(httpversion, encoding)
return '{} {} {}'.format(method, requesttarget, httpversion)
|
acitoolkit-0.4 | acitoolkit-0.4//acitoolkit/aciphysobject.pyclass:ExternalSwitch/_get_dn_children | @staticmethod
def _get_dn_children(session, dn):
"""
Will get the children of the specified dn
"""
mo_query_url = '/api/mo/' + dn + '.json?query-target=children'
ret = session.get(mo_query_url)
node_data = ret.json()['imdata']
return node_data
|
kidx_nlu | kidx_nlu//featurizers/spacy_featurizer.pyfile:/featurizers/spacy_featurizer.py:function:ndim/ndim | def ndim(spacy_nlp: 'Language') ->int:
"""Number of features used to represent a document / sentence."""
return spacy_nlp.vocab.vectors_length
|
buienradar | buienradar//buienradar_xml.pyfile:/buienradar_xml.py:function:__get_int/__get_int | def __get_int(section, name):
"""Get the forecasted int from xml section."""
try:
return int(section[name])
except (ValueError, TypeError, KeyError):
return 0
|
hmf-3.0.8 | hmf-3.0.8//hmf/_framework.pyclass:Framework/_get_all_parameters | @classmethod
def _get_all_parameters(cls):
"""Yield all parameters as tuples of (name,obj)"""
for name in cls.get_all_parameter_names():
yield name, getattr(cls, name)
|
hanja-0.13.2 | hanja-0.13.2//hanja/hangul.pyfile:/hanja/hangul.py:function:separate/separate | def separate(ch):
"""한글 자모 분리. 주어진 한글 한 글자의 초성, 중성 초성을 반환함."""
uindex = ord(ch) - 44032
jongseong = uindex % 28
joongseong = (uindex - jongseong) // 28 % 21
choseong = (uindex - jongseong) // 28 // 21
return choseong, joongseong, jongseong
|
beren | beren//orthanc.pyclass:Orthanc/clean | @staticmethod
def clean(d):
"""Clean the parameter dict for endpoint semantics"""
n = {}
for k, v in d.items():
if v == None or v == False:
continue
elif v == True:
n[k] = ''
else:
n[k] = v
return n
|
pytypes-1.0b5 | pytypes-1.0b5//pytypes/util.pyfile:/pytypes/util.py:function:fromargskw/fromargskw | def fromargskw(argskw, argspecs, slf_or_clsm=False):
"""Turns a linearized list of args into (args, keywords) form
according to given argspecs (like inspect module provides).
"""
res_args = argskw
try:
kwds = argspecs.keywords
except AttributeError:
kwds = argspecs.varkw
if not kwds is None:
res_kw = argskw[-1]
res_args = argskw[:-1]
else:
res_kw = None
if not argspecs.varargs is None:
vargs_pos = len(argspecs.args) - 1 if slf_or_clsm else len(argspecs
.args)
if vargs_pos > 0:
res_lst = list(argskw[:vargs_pos])
res_lst.extend(argskw[vargs_pos])
res_args = tuple(res_lst)
else:
res_args = argskw[0]
try:
if len(argspecs.kwonlyargs) > 0:
res_kw = {} if res_kw is None else dict(res_kw)
ipos = -len(argspecs.kwonlyargs) - (0 if kwds is None else 1)
for name in argspecs.kwonlyargs:
res_kw[name] = argskw[ipos]
ipos += 1
except AttributeError:
pass
if res_kw is None:
res_kw = {}
return res_args, res_kw
|
agatereports | agatereports//engine/bands/bands.pyfile:/engine/bands/bands.py:function:process_no_pages/process_no_pages | def process_no_pages(report_info):
"""
Do nothing here. Saving to file is skipped in generate_pdf() method.
"""
pass
|
parallel_bilby-0.1.3 | parallel_bilby-0.1.3//src/analysis.pyfile:/src/analysis.py:function:main/main | def main():
""" Do nothing function to play nicely with MPI """
pass
|
CleanAdminDjango-1.5.3.1 | CleanAdminDjango-1.5.3.1//django/contrib/gis/gdal/prototypes/errcheck.pyfile:/django/contrib/gis/gdal/prototypes/errcheck.py:function:check_bool/check_bool | def check_bool(result, func, cargs):
"""Returns the boolean evaluation of the value."""
if bool(result):
return True
else:
return False
|
openfisca_france | openfisca_france//model/prelevements_obligatoires/impot_revenu/reductions_impot.pyclass:domsoc/formula_2017_01_01 | def formula_2017_01_01(foyer_fiscal, period, parameters):
"""
Investissements outre-mer dans le logement social (déclaration n°2042 IOM)
2017
"""
fhkh = foyer_fiscal('fhkh', period)
fhki = foyer_fiscal('fhki', period)
fhqj = foyer_fiscal('fhqj', period)
fhqs = foyer_fiscal('fhqs', period)
fhqw = foyer_fiscal('fhqw', period)
fhqx = foyer_fiscal('fhqx', period)
fhra = foyer_fiscal('fhra', period)
fhrb = foyer_fiscal('fhrb', period)
fhrc = foyer_fiscal('fhrc', period)
fhrd = foyer_fiscal('fhrd', period)
fhxa = foyer_fiscal('fhxa', period)
fhxb = foyer_fiscal('fhxb', period)
fhxc = foyer_fiscal('fhxc', period)
fhxe = foyer_fiscal('fhxe', period)
fhxf = foyer_fiscal('fhxf', period)
fhxg = foyer_fiscal('fhxg', period)
fhxh = foyer_fiscal('fhxh', period)
fhxi = foyer_fiscal('fhxi', period)
fhxk = foyer_fiscal('fhxk', period)
fhxl = foyer_fiscal('fhxl', period)
fhxm = foyer_fiscal('fhxm', period)
fhxn = foyer_fiscal('fhxn', period)
fhxo = foyer_fiscal('fhxo', period)
fhxp = foyer_fiscal('fhxp', period)
fhxq = foyer_fiscal('fhxq', period)
fhxr = foyer_fiscal('fhxr', period)
fhxs = foyer_fiscal('fhxs', period)
fhxt = foyer_fiscal('fhxt', period)
fhxu = foyer_fiscal('fhxu', period)
report_reduc_2012 = fhqj + fhqs + fhqw + fhqx
report_reduc_2013 = fhra + fhrb + fhrc + fhrd
report_reduc_2014 = fhxa + fhxb + fhxc + fhxe
report_reduc_2015 = fhxf + fhxg + fhxh + fhxi + fhxk
report_reduc_2016 = fhxl + fhxm + fhxn + fhxo + fhxp
reduc_invest_2017 = fhxq + fhxr + fhxs + fhxt + fhxu
return (report_reduc_2012 + report_reduc_2013 + report_reduc_2014 +
report_reduc_2015 + report_reduc_2016 + reduc_invest_2017)
|
qcengine-0.14.0 | qcengine-0.14.0//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old | def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance'] or pieces['dirty']:
rendered += '.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
else:
rendered = '0.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
return rendered
|
mxnet | mxnet//ndarray/gen_op.pyfile:/ndarray/gen_op.py:function:arccos/arccos | def arccos(data=None, out=None, name=None, **kwargs):
"""Returns element-wise inverse cosine of the input array.
The input should be in range `[-1, 1]`.
The output is in the closed interval :math:`[0, \\pi]`
.. math::
arccos([-1, -.707, 0, .707, 1]) = [\\pi, 3\\pi/4, \\pi/2, \\pi/4, 0]
The storage type of ``arccos`` output is always dense
Defined in src/operator/tensor/elemwise_unary_op_trig.cc:L206
Parameters
----------
data : NDArray
The input array.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return 0,
|
scipion-pyworkflow-3.0.0a7 | scipion-pyworkflow-3.0.0a7//pyworkflow/plugin.pyclass:Plugin/getVars | @classmethod
def getVars(cls):
""" Return the value of a given variable. """
return cls._vars
|
voyant_indicators | voyant_indicators//overlapping_studies/sma.pyfile:/overlapping_studies/sma.py:function:sma/sma | def sma(df, price, col, n):
"""
Simple Moving Average (SMA) is an arithmetic moving average calculated by
adding recent closing prices then dividing that by the number of time periods
in the calculation average.
SMA = (P_1 + ... + P_n) / n
Parameters:
df (pd.DataFrame): DataFrame which contain the asset price.
price (string): the column name of the price of the asset.
col (string): the column name for the n-day moving average results.
n (int): the total number of periods.
Returns:
df (pd.DataFrame): Dataframe with n-day moving average of the asset calculated.
Example Strategy:
Bullish
AND [Daily SMA(20,Daily Volume) > 40000]
AND [Daily SMA(60,Daily Close) > 20]
AND [Daily SMA(150,Daily Close) > 5 days ago Daily SMA(150,Daily Close)]
AND [Daily EMA(5,Daily Close) > Daily EMA(35,Daily Close)]
AND [Yesterday's Daily EMA(5,Daily Close) < Yesterday's Daily EMA(35,Daily Close)]
AND [Daily Volume > Daily SMA(200,Daily Volume)]
Bearish:
AND [Daily SMA(20,Daily Volume) > 40000]
AND [Daily SMA(60,Daily Close) > 20]
AND [Daily SMA(150,Daily Close) < 5 days ago Daily SMA(150,Daily Close)]
AND [Daily EMA(5,Daily Close) < Daily EMA(35,Daily Close)]
AND [Yesterday's Daily EMA(5,Daily Close) > Yesterday's Daily EMA(35,Daily Close)]
AND [Daily Volume > Daily SMA(200,Daily Volume)]
Citation: https://school.stockcharts.com/doku.php?id=technical_indicators:moving_averages
"""
df = df.copy()
if df.shape[0] < n:
df[col] = None
else:
df[col] = df[price].rolling(window=n).mean()
return df
|
pyDSlib | pyDSlib//ML/_devices.pyfile:/ML/_devices.py:function:list_local_devices/list_local_devices | def list_local_devices():
"""
List local devices (GPUs and CPUs) using tensorflow's tensorflow.python.client.device_lib
Returns:
--------
local_devices: list of 'DeviceAttributes' object. The DeviceAttributes.device_type value for each DeviceAttributes element in the list can be used to determine whether the device is a GPU or CPU.
"""
from tensorflow.python.client import device_lib
local_devices = device_lib.list_local_devices()
return local_devices
|
typeguess | typeguess//guess_core.pyfile:/guess_core.py:function:_try/_try | def _try(f, datum):
"""try to apply f to datum. Return None if ValueError occurs"""
try:
return f(datum)
except ValueError:
return None
|
argser | argser//docstring.pyfile:/docstring.py:function:_leading_ws/_leading_ws | def _leading_ws(t: str):
"""count leading whitespaces"""
return len(t) - len(t.lstrip())
|
wrangle-0.6.7 | wrangle-0.6.7//wrangle/utils/groupby_func.pyfile:/wrangle/utils/groupby_func.py:function:groupby_func/groupby_func | def groupby_func(data, func):
"""Streamlines the process of creating various
groupby elements in Pandas. Takes in a dataframe
and one of the supported functions as a string:
data : pandas groupby
A Pandas groupby object
func : str
The function to be used for grouping by
'median'
'mean'
'first'
'last'
'std'
'max'
'min'
'sum'
'random'
'freq'
'string'
'entropy'
...or you can simply input any custom function.
"""
import numpy as np
import pandas as pd
import scipy as sc
if func == 'median':
out = data.median()
elif func == 'mean':
out = data.mean()
elif func == 'first':
out = data.first()
elif func == 'last':
out = data.last()
elif func == 'std':
out = data.std()
elif func == 'max':
out = data.max()
elif func == 'min':
out = data.min()
elif func == 'sum':
out = data.sum()
elif func == 'random':
out = data.agg(np.random.choice)
elif func == 'freq':
out = data.agg(lambda x: x.value_counts().index[0])
elif func == 'string':
out = data.apply(lambda x: '%s' % ' '.join(x))
out = pd.DataFrame(out).reset_index()
elif func == 'entropy':
out = data.apply(lambda x: sc.stats.entropy(x)[0])
elif callable(func):
out = data.apply(func)
if isinstance(out, type(pd.Series())):
out = pd.DataFrame(out)
out.columns = [1]
out.reset_index(inplace=True)
return out
|
histogrammar | histogrammar//hgawk_grammar.pyfile:/hgawk_grammar.py:function:p_with_stmt_star_1/p_with_stmt_star_1 | def p_with_stmt_star_1(p):
"""with_stmt_star : COMMA with_item"""
p[0] = p[2]
|
igraph | igraph//drawing/utils.pyclass:Point/_make | @classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
"""Creates a new point from a sequence or iterable"""
result = new(cls, iterable)
if len(result) != 2:
raise TypeError('Expected 2 arguments, got %d' % len(result))
return result
|
flask_jsondash-6.2.3 | flask_jsondash-6.2.3//flask_jsondash/charts_builder.pyfile:/flask_jsondash/charts_builder.py:function:is_consecutive_rows/is_consecutive_rows | def is_consecutive_rows(lst):
"""Check if a list of integers is consecutive.
Args:
lst (list): The list of integers.
Returns:
True/False: If the list contains consecutive integers.
Originally taken from and modified:
http://stackoverflow.com/
questions/40091617/test-for-consecutive-numbers-in-list
"""
assert 0 not in lst, '0th index is invalid!'
lst = list(set(lst))
if not lst:
return True
setl = set(lst)
return len(lst) == len(setl) and setl == set(range(min(lst), max(lst) + 1))
|
eclcli-1.3.5 | eclcli-1.3.5//eclcli/sss/client.pyfile:/eclcli/sss/client.py:function:build_option_parser/build_option_parser | def build_option_parser(parser):
"""Hook to add global options"""
return parser
|
annif-0.46.0 | annif-0.46.0//annif/eval.pyfile:/annif/eval.py:function:false_positives/false_positives | def false_positives(y_true, y_pred):
"""calculate the number of false positives using bitwise operations,
emulating the way sklearn evaluation metric functions work"""
return (~y_true & y_pred).sum()
|
cis_interface | cis_interface//schema.pyfile:/schema.py:function:standardize/standardize | def standardize(instance, keys, is_singular=False, suffixes=None, altkeys=None
):
"""Standardize a component such that each key contains a list of dictionaries.
Args:
instance (dict): Component to standardize.
keys (list): Keys to standardize in the instance.
is_singular (bool, optional): If False, the keys are assumed to be plural
and singular alternatives are also checked. If True, the keys are
assumed to be singular and plural alternatives are also checked.
Defaults to False.
suffixes (list, optional): Suffixes to add to the keys to get a set of
alternate keys that should also be checked. Defaults to None as is
ignored.
altkeys (list, optional): List of lists of alternate keys. Defaults to None.
"""
for k in keys:
if k not in instance:
instance[k] = []
if not isinstance(instance[k], list):
instance[k] = [instance[k]]
if altkeys is None:
altkeys = []
if suffixes is not None:
for s in suffixes:
altkeys.append([('%s%s' % (k, s)) for k in keys])
if is_singular:
altkeys.append([('%ss%s' % (k, s)) for k in keys])
altkeys.append([('%s%ss' % (k, s)) for k in keys])
else:
altkeys.append([('%s%s' % (k[:-1], s)) for k in keys])
altkeys.append([('%s%ss' % (k[:-1], s)) for k in keys])
if is_singular:
altkeys.append([('%ss' % k) for k in keys])
else:
altkeys.append([k[:-1] for k in keys])
for ialtkeys in altkeys:
for k, kalt in zip(keys, ialtkeys):
if kalt in instance:
if isinstance(instance[kalt], list):
instance[k] += instance.pop(kalt)
else:
instance[k].append(instance.pop(kalt))
for k in keys:
for i in range(len(instance[k])):
if isinstance(instance[k][i], str):
instance[k][i] = {'name': instance[k][i]}
|
Kotti-2.0.1 | Kotti-2.0.1//kotti/views/navigation.pyfile:/kotti/views/navigation.py:function:includeme_local_navigation/includeme_local_navigation | def includeme_local_navigation(config):
""" Pyramid includeme hook.
:param config: app config
:type config: :class:`pyramid.config.Configurator`
"""
from kotti.views.slots import assign_slot
config.scan(__name__)
assign_slot('local-navigation', 'right')
|
gtrends-0.2.2 | gtrends-0.2.2//gtrends.pyfile:/gtrends.py:function:_scaleRep/_scaleRep | def _scaleRep(reportData):
"""
Scales reports of different sets of terms.
Using the percent change with the 1 month overlap should take care of the
variation in time of a single report. However, if, at the same moment in
time, a secondary report contains a term which is larger than the constant
term and so causes the constant to have different values, then the scale is
off. To fix this, we select a value for the constant term at the same time
across the new and old reports. factor = old / new, and multiply factor
across the new report to have the same scale as the old one.
"""
baseMonth = reportData[0][0]
for i in range(1, len(reportData)):
testMonth = reportData[i][0]
factor = 0.0
for j in range(len(baseMonth)):
old = baseMonth[j][len(baseMonth[j]) - 1]
new = testMonth[j][len(testMonth[j]) - 1]
if abs(new - old) > 3:
old = 1.0 if old == 0.0 else old
new = 1.0 if new == 0.0 else new
factor = old / float(new)
break
if abs(factor) > 0.0003:
for j in range(len(reportData[i])):
for k in range(len(reportData[i][j])):
for l in range(1, len(reportData[i][j][k])):
reportData[i][j][k][l] = factor * reportData[i][j][k][l
]
return reportData
|
dropbox | dropbox//team_log.pyclass:EventType/shared_content_change_downloads_policy | @classmethod
def shared_content_change_downloads_policy(cls, val):
"""
Create an instance of this class set to the
``shared_content_change_downloads_policy`` tag with value ``val``.
:param SharedContentChangeDownloadsPolicyType val:
:rtype: EventType
"""
return cls('shared_content_change_downloads_policy', val)
|
pywinauto-0.6.8 | pywinauto-0.6.8//pywinauto/controlproperties.pyfile:/pywinauto/controlproperties.py:function:SetReferenceControls/SetReferenceControls | def SetReferenceControls(controls, refControls):
"""Set the reference controls for the controls passed in
This does some minor checking as following:
* test that there are the same number of reference controls as
controls - fails with an exception if there are not
* test if all the ID's are the same or not
"""
if len(controls) != len(refControls):
raise RuntimeError(
'Numbers of controls on ref. dialog does not match Loc. dialog')
for i, ctrl in enumerate(controls):
ctrl.ref = refControls[i]
toRet = 1
allIDsSameFlag = 2
allClassesSameFlag = 4
if [ctrl.control_id() for ctrl in controls] == [ctrl.control_id() for
ctrl in refControls]:
toRet += allIDsSameFlag
if [ctrl.class_name() for ctrl in controls] == [ctrl.class_name() for
ctrl in refControls]:
toRet += allClassesSameFlag
return toRet
|
airflow | airflow//sentry.pyclass:DummySentry/add_tagging | @classmethod
def add_tagging(cls, task_instance):
"""
Blank function for tagging.
"""
|
ragavi-0.3.4 | ragavi-0.3.4//ragavi/ragavi.pyfile:/ragavi/ragavi.py:function:field_selector_callback/field_selector_callback | def field_selector_callback():
"""Return JS callback for field selection checkboxes
Returns
-------
code : :obj:`str`
"""
code = """
/*bsize: total number of items in a batch
bsel: the batch selector group buttons
csel: corr selector group buttons
nants: Number of antennas
ncorrs: number of available correlations
nfields: number of available fields
nbatches: total number of available batches
ax: List containing glyphs for all antennas, fields and correlations
count: keeping a cumulative sum of the traverse number
*/
let count = 0;
let new_bsize;
for (let sp=0; sp<nspws; sp++){
for (let f=0; f<nfields; f++){
for (let c=0; c<ncorrs; c++){
//re-initialise new batch size
new_bsize = bsize;
for(let n=0; n<nbatches; n++){
// Reduce new batch size to the size of the last batch
if (n == nbatches-1 && nants!=bsize){
new_bsize = nants % bsize;
}
for(let b=0; b<new_bsize; b++){
if (cb_obj.active.includes(f) && csel.active.includes(c) &&
ssel.active.includes(sp)){
ax[count].visible = true;
}
else{
ax[count].visible = false;
}
count = count + 1;
}
}
}
}
}
"""
return code
|
dropbox | dropbox//team_log.pyclass:EventType/file_delete | @classmethod
def file_delete(cls, val):
"""
Create an instance of this class set to the ``file_delete`` tag with
value ``val``.
:param FileDeleteType val:
:rtype: EventType
"""
return cls('file_delete', val)
|
pytest_gitcov | pytest_gitcov//plugin.pyfile:/plugin.py:function:get_coverage_from_cov_plugin/get_coverage_from_cov_plugin | def get_coverage_from_cov_plugin(config) ->'CoverageData':
"""Get the coverage from pytest_cov plugin
Args:
config: pytest configuration object
Raises:
AssertionError: raised when coverage data object is empty
Returns:
CoverageData: test coverage from this test run
"""
cov_plugin = config.pluginmanager.getplugin('_cov')
cov_data = cov_plugin.cov_controller.cov.get_data()
assert cov_data, 'CoverageData is loaded'
return cov_data
|
phylonetwork-2.1 | phylonetwork-2.1//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe | def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance']:
rendered += '-%d-g%s' % (pieces['distance'], pieces['short'])
else:
rendered = pieces['short']
if pieces['dirty']:
rendered += '-dirty'
return rendered
|
imreg_dft-2.0.0 | imreg_dft-2.0.0//src/imreg_dft/utils.pyfile:/src/imreg_dft/utils.py:function:_get_emslices/_get_emslices | def _get_emslices(shape1, shape2):
"""
Common code used by :func:`embed_to` and :func:`undo_embed`
"""
slices_from = []
slices_to = []
for dim1, dim2 in zip(shape1, shape2):
diff = dim2 - dim1
slice_from = slice(None)
slice_to = slice(None)
if diff > 0:
rem = diff - diff // 2
slice_from = slice(diff // 2, dim2 - rem)
if diff < 0:
diff *= -1
rem = diff - diff // 2
slice_to = slice(diff // 2, dim1 - rem)
slices_from.append(slice_from)
slices_to.append(slice_to)
return slices_from, slices_to
|
motllo-0.2.2 | motllo-0.2.2//motllo/tree_parser.pyfile:/motllo/tree_parser.py:function:find_file_depth/find_file_depth | def find_file_depth(line: str):
"""Find how deep in a hierarchy a file is by checking the tree"""
for idx, item in enumerate(line):
if item not in ['│', '├', '─', '└'] and not item.isspace():
return idx // 4 - 1
return -1
|
nengo-3.0.0 | nengo-3.0.0//nengo/utils/functions.pyfile:/nengo/utils/functions.py:function:function_name/function_name | def function_name(func):
"""Returns the name of a function.
Unlike accessing ``func.__name__``, this function is robust to the
different types of objects that can be considered a function in Nengo.
Parameters
----------
func : callable or array_like
Object used as function argument.
Returns
-------
str
Name of function object.
"""
return getattr(func, '__name__', func.__class__.__name__)
|
zenmake | zenmake//waf/waflib/Tools/vala.pyfile:/waf/waflib/Tools/vala.py:function:options/options | def options(opt):
"""
Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
"""
opt.load('gnu_dirs')
valaopts = opt.add_option_group('Vala Compiler Options')
valaopts.add_option('--vala-target-glib', default=None, dest=
'vala_target_glib', metavar='MAJOR.MINOR', help=
'Target version of glib for Vala GObject code generation')
|
codebraid | codebraid//converters/pandoc.pyclass:PandocConverter/_freeze_raw_node_io_map | @staticmethod
def _freeze_raw_node_io_map(node, source_name, line_number,
type_translation_dict={'RawBlock': 'CodeBlock', 'RawInline': 'Code'}):
"""
Same as `_freeze_raw_node()`, but also store trace info.
"""
node['t'] = type_translation_dict[node['t']]
raw_format, raw_content = node['c']
node['c'] = [['', ['codebraid--temp'], [['format', raw_format], [
'trace', '{0}:{1}'.format(source_name, line_number)]]], raw_content]
|
dns-lexicon-3.3.22 | dns-lexicon-3.3.22//lexicon/providers/gehirn.pyfile:/lexicon/providers/gehirn.py:function:provider_parser/provider_parser | def provider_parser(subparser):
"""Construct subparser for Gehirn"""
subparser.add_argument('--auth-token', help=
'specify access token for authentication')
subparser.add_argument('--auth-secret', help=
'specify access secret for authentication')
|
PYPOWER-5.1.4 | PYPOWER-5.1.4//pypower/ppver.pyfile:/pypower/ppver.py:function:ppver/ppver | def ppver(*args):
""" Returns PYPOWER version info for current installation.
@author: Ray Zimmerman (PSERC Cornell)
"""
ver = {'Name': 'PYPOWER', 'Version': '5.1.4', 'Release': '', 'Date':
'27-June-2018'}
return ver
|
alot | alot//helper.pyfile:/helper.py:function:shell_quote/shell_quote | def shell_quote(text):
"""Escape the given text for passing it to the shell for interpretation.
The resulting string will be parsed into one "word" (in the sense used in
the shell documentation, see sh(1)) by the shell.
:param text: the text to quote
:type text: str
:returns: the quoted text
:rtype: str
"""
return "'%s'" % text.replace("'", '\'"\'"\'')
|
selinon-1.1.0 | selinon-1.1.0//selinon/selective.pyfile:/selinon/selective.py:function:_normalize_path/_normalize_path | def _normalize_path(paths):
"""Normalize multiple graph traversals by edges into one that traverses all edges.
In general we can get to a task by multiple paths. As we would like to ensure that all paths are traversed, we
compound multiple traversals into a single one that traverses all necessary edges.
:param paths: a list of paths that should be compound into a single traversal
:return: a dict representing compound traversal
"""
result = {}
for entry in paths:
for key, value in entry.items():
if key not in result:
result[key] = []
result[key] = list(set(result[key]) | set(value))
return result
|
tensorly-0.4.5 | tensorly-0.4.5//tensorly/backend/core.pyclass:Backend/is_tensor | @staticmethod
def is_tensor(obj):
"""Returns if `obj` is a tensor for the current backend"""
raise NotImplementedError
|
oor-3 | oor-3//oor/object.pyclass:Object/get_cmnd | def get_cmnd(zelf, *args, **kwargs):
""" determine the command in the zelf.txt attribute, if present. """
from oor import kernel
if 'txt' in zelf and zelf.txt:
val = zelf.txt.split()[0]
if not val:
return
if 'cc' in zelf:
if val[0] != zelf.cc:
return
val = val[1:]
try:
cmnd = kernel.cmnds[val]
except KeyError:
try:
cmnd = kernel.ctrl[val]
except KeyError:
cmnd = None
return cmnd
|
dynetx-0.2.2 | dynetx-0.2.2//dynetx/classes/function.pyfile:/dynetx/classes/function.py:function:is_directed/is_directed | def is_directed(G):
""" Return True if graph is directed."""
return G.is_directed()
|
gwgen-1.0.3 | gwgen-1.0.3//gwgen/parameterization.pyclass:CloudParameterizerBase/filter_stations | @classmethod
def filter_stations(cls, stations):
"""Get the GHCN stations that are also in the EECRA dataset
Parameters
----------
stations: np.ndarray
A string array with stations to use
Returns
-------
np.ndarray
The ids in `stations` that can be mapped to the eecra dataset"""
return cls.eecra_ghcn_map().loc[stations].dropna().index.values
|
mxnet-1.6.0.data | mxnet-1.6.0.data//purelib/mxnet/ndarray/gen_op.pyfile:/purelib/mxnet/ndarray/gen_op.py:function:softmax_cross_entropy/softmax_cross_entropy | def softmax_cross_entropy(data=None, label=None, out=None, name=None, **kwargs
):
"""Calculate cross entropy of softmax output and one-hot label.
- This operator computes the cross entropy in two steps:
- Applies softmax function on the input array.
- Computes and returns the cross entropy loss between the softmax output and the labels.
- The softmax function and cross entropy loss is given by:
- Softmax Function:
.. math:: \\text{softmax}(x)_i = \\frac{exp(x_i)}{\\sum_j exp(x_j)}
- Cross Entropy Function:
.. math:: \\text{CE(label, output)} = - \\sum_i \\text{label}_i \\log(\\text{output}_i)
Example::
x = [[1, 2, 3],
[11, 7, 5]]
label = [2, 0]
softmax(x) = [[0.09003057, 0.24472848, 0.66524094],
[0.97962922, 0.01794253, 0.00242826]]
softmax_cross_entropy(data, label) = - log(0.66524084) - log(0.97962922) = 0.4281871
Defined in src/operator/loss_binary_op.cc:L59
Parameters
----------
data : NDArray
Input data
label : NDArray
Input label
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return 0,
|
fake-bpy-module-2.78-20200428 | fake-bpy-module-2.78-20200428//bpy/ops/object.pyfile:/bpy/ops/object.py:function:vertex_group_mirror/vertex_group_mirror | def vertex_group_mirror(mirror_weights: bool=True, flip_group_names: bool=
True, all_groups: bool=False, use_topology: bool=False):
"""Mirror vertex group, flip weights and/or names, editing only selected vertices, flipping when both sides are selected otherwise copy from unselected
:param mirror_weights: Mirror Weights, Mirror weights
:type mirror_weights: bool
:param flip_group_names: Flip Group Names, Flip vertex group names
:type flip_group_names: bool
:param all_groups: All Groups, Mirror all vertex groups weights
:type all_groups: bool
:param use_topology: Topology Mirror, Use topology based mirroring (for when both sides of mesh have matching, unique topology)
:type use_topology: bool
"""
pass
|
pysmime | pysmime//pkcs11.pyfile:/pkcs11.py:function:pkcs11_login/pkcs11_login | def pkcs11_login(pkcs11, pin):
"""
Performs authentication by PIN on the smart card.
"""
pkcs11.ctrl_cmd_string('PIN', pin)
|
qisys | qisys//ui.pyfile:/ui.py:function:valid_filename/valid_filename | def valid_filename(value):
""" Validate that the string passed as input can safely be used as a valid file name """
if value in ['.', '..']:
raise Exception('Invalid name: %s' % value)
bad_chars = '<>:"/\\|?*'
for bad_char in bad_chars:
if bad_char in value:
mess = "Invalid name: '%s'\n" % value
mess += 'A valid name should not contain any '
mess += 'of the following chars:\n'
mess += ' '.join(bad_chars)
raise Exception(mess)
return value
|
patentmodels | patentmodels//lib/utils_claimset.pyfile:/lib/utils_claimset.py:function:check_last/check_last | def check_last(claimset_list):
"""
Checks claims end with a claim number = length of list.
:param claimset_list: set of (number, text) tuples for claims.
:type claimset_list: tuples (int, str)
:return: true if last claim = length of claims; false if not
"""
try:
if claimset_list[-1][0] == len(claimset_list):
return True
else:
return False
except:
return False
|
tablib | tablib//packages/dbfpy/record.pyclass:DbfRecord/rawFromStream | def rawFromStream(cls, dbf, index):
"""Return raw record contents read from the stream.
Arguments:
dbf:
A `Dbf.Dbf` instance containing the record.
index:
Index of the record in the records' container.
This argument can't be None in this call.
Return value is a string containing record data in DBF format.
"""
dbf.stream.seek(dbf.header.headerLength + index * dbf.header.recordLength)
return dbf.stream.read(dbf.header.recordLength)
|
dask | dask//utils.pyfile:/utils.py:function:partial_by_order/partial_by_order | def partial_by_order(*args, **kwargs):
"""
>>> from operator import add
>>> partial_by_order(5, function=add, other=[(1, 10)])
15
"""
function = kwargs.pop('function')
other = kwargs.pop('other')
args2 = list(args)
for i, arg in other:
args2.insert(i, arg)
return function(*args2, **kwargs)
|
syncano-cli-0.8 | syncano-cli-0.8//syncano_cli/custom_sockets/formatters.pyclass:SocketFormatter/get_script_dependency | @classmethod
def get_script_dependency(cls, data, **kwargs):
"""
Note: when definig new depenency processors use following pattern:
get_{name}_dependency -> where {name} is one of the defined in DepedencyTypeE
this allows to easily extend dependency handling;
:param data:
:param directory:
:return:
"""
directory = kwargs.get('directory')
return {'runtime_name': data['runtime_name'], 'source': cls._get_source
(data['file'], directory)}
|
st2smv | st2smv//utils.pyfile:/utils.py:function:try_incr/try_incr | def try_incr(d, key, delta):
"""Try to increment `d[key]` by `delta`.
If `key` is not in `d`, then add it to `d` with the value `delta`.
"""
if key in d:
d[key] += delta
else:
d[key] = delta
|
combi | combi//_python_toolbox/cute_iter_tools.pyfile:/_python_toolbox/cute_iter_tools.py:function:is_iterable/is_iterable | def is_iterable(thing):
"""Return whether an object is iterable."""
if hasattr(type(thing), '__iter__'):
return True
else:
try:
iter(thing)
except TypeError:
return False
else:
return True
|
snutree | snutree//utilities/voluptuous.pyfile:/utilities/voluptuous.py:function:NonEmptyString/NonEmptyString | def NonEmptyString(s):
"""
Matches a nonempty string and throws a ValueError otherwise.
"""
if isinstance(s, str) and len(s) > 0:
return s
raise ValueError
|
prettyprinter-0.18.0 | prettyprinter-0.18.0//prettyprinter/utils.pyfile:/prettyprinter/utils.py:function:intersperse/intersperse | def intersperse(x, ys):
"""
Returns an iterable where ``x`` is inserted between
each element of ``ys``
:type ys: Iterable
"""
it = iter(ys)
try:
y = next(it)
except StopIteration:
return
yield y
for y in it:
yield x
yield y
|