repo
stringlengths
1
29
path
stringlengths
24
332
code
stringlengths
39
579k
PICOS-2.0.8
PICOS-2.0.8//picos/solvers/solver_mskfsn.pyclass:MOSEKFusionSolver/names
@classmethod def names(cls): """Implement :meth:`~.solver.Solver.names`.""" return 'mskfsn', 'MOSEK (Fusion)', 'MOSEK via Fusion API'
SignalIntegrity
SignalIntegrity//Lib/Devices/DirectionalCoupler.pyfile:/Lib/Devices/DirectionalCoupler.py:function:DirectionalCoupler/DirectionalCoupler
def DirectionalCoupler(ports): """DirectionalCoupler Directional Coupler @param ports integer number of ports (3 or 4) @return s-parameter matrix of a three or four port directional coupler port 1 and 2 are a thru connection. port 3 picks off the wave going from port 1 to 2. port 4 (optional) picks off the wave going from port 2 to port 1. @note the directional coupler is completely ideal and is not passive in that the picked off wave is an exact copy of the wave going between the ports specified above. """ if ports == 3: return [[0, 1, 0], [1, 0, 0], [1, 0, 0]] elif ports == 4: return [[0, 1, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0], [0, 1, 0, 0]]
graphenelib-1.3.2
graphenelib-1.3.2//graphenebase/transactions.pyfile:/graphenebase/transactions.py:function:getBlockParams/getBlockParams
def getBlockParams(ws, use_head_block=False): """ Auxiliary method to obtain ``ref_block_num`` and ``ref_block_prefix``. Requires a websocket connection to a witness node! """ raise DeprecationWarning( "This method shouldn't be called anymore. It is part of transactionbuilder now" )
mercurial-5.4
mercurial-5.4//mercurial/interfaces/repository.pyclass:ipeerconnection/local
def local(): """Returns a local repository instance. If the peer represents a local repository, returns an object that can be used to interface with it. Otherwise returns ``None``. """
logrus
logrus//utils.pyfile:/utils.py:function:squashDicts/squashDicts
def squashDicts(*dict_args): """ Given any number of dicts, shallow copy and merge into a new dict, precedence goes to key value pairs in later dicts. """ result = {} for dictionary in dict_args: result.update(dictionary) return result
superset_hand-0.14.1
superset_hand-0.14.1//superset/utils.pyfile:/superset/utils.py:function:error_msg_from_exception/error_msg_from_exception
def error_msg_from_exception(e): """Translate exception into error message Database have different ways to handle exception. This function attempts to make sense of the exception object and construct a human readable sentence. TODO(bkyryliuk): parse the Presto error message from the connection created via create_engine. engine = create_engine('presto://localhost:3506/silver') - gives an e.message as the str(dict) presto.connect("localhost", port=3506, catalog='silver') - as a dict. The latter version is parsed correctly by this function. """ msg = '' if hasattr(e, 'message'): if type(e.message) is dict: msg = e.message.get('message') elif e.message: msg = '{}'.format(e.message) return msg or '{}'.format(e)
minegauler
minegauler//shared/utils.pyclass:Grid/from_2d_array
@classmethod def from_2d_array(cls, array): """ Create an instance using a 2-dimensional array. Arguments: array ([[object, ...], ...]) The array to use in creating the grid instance. Return: Grid The resulting grid. """ x_size = len(array[0]) y_size = len(array) grid = cls(x_size, y_size) for coord in grid.all_coords: x, y = coord grid[coord] = array[y][x] return grid
rolabesti
rolabesti//arguments.pyfile:/arguments.py:function:prepare_arguments/prepare_arguments
def prepare_arguments(arguments): """Set proper maximum track length. Convert length arguments to seconds.""" if 'max_tracklist_length' in arguments: if 0 < arguments['max_tracklist_length'] < arguments['max']: arguments['max'] = arguments['max_tracklist_length'] arguments['max_tracklist_length'] *= 60 arguments['max'] *= 60 arguments['min'] *= 60
dropbox
dropbox//team_log.pyclass:EventDetails/shared_content_relinquish_membership_details
@classmethod def shared_content_relinquish_membership_details(cls, val): """ Create an instance of this class set to the ``shared_content_relinquish_membership_details`` tag with value ``val``. :param SharedContentRelinquishMembershipDetails val: :rtype: EventDetails """ return cls('shared_content_relinquish_membership_details', val)
pytube
pytube//cipher.pyfile:/cipher.py:function:reverse/reverse
def reverse(arr, b): """Reverse elements in a list. This function is equivalent to: .. code-block:: javascript function(a, b) { a.reverse() } This method takes an unused ``b`` variable as their transform functions universally sent two arguments. **Example**: >>> reverse([1, 2, 3, 4]) [4, 3, 2, 1] """ return arr[::-1]
spatialist-0.5
spatialist-0.5//spatialist/ancillary.pyfile:/spatialist/ancillary.py:function:dictmerge/dictmerge
def dictmerge(x, y): """ merge two dictionaries """ z = x.copy() z.update(y) return z
things3-api-2.6.0
things3-api-2.6.0//things3/things3_kanban.pyfile:/things3/things3_kanban.py:function:write_html_column/write_html_column
def write_html_column(cssclass, file, header, rows): """Create a column in the output.""" file.write("<div class='column'><div class=''>" + "<h2 class='h2 " + cssclass + "'>" + header + "<span class='size'>" + str(len(rows)) + '</span></h2>') for row in rows: task_uuid = str(row['uuid']) if row['uuid'] is not None else '' task_title = str(row['title']) if row['title'] is not None else '' context_title = str(row['context']) if row['context' ] is not None else '' context_uuid = str(row['context_uuid']) if row['context_uuid' ] is not None else '' deadline = str(row['due']) if row['due'] is not None else '' task_link = ('<a href="things:///show?id=' + task_uuid + '">' + task_title + '</a>' if task_uuid != '' else task_title) context_link = ('<a href="things:///show?id=' + context_uuid + '">' + context_title + '</a>' if context_uuid != '' else context_title) css_class = 'hasProject' if context_title != '' else 'hasNoProject' css_class = 'hasDeadline' if deadline != '' else css_class file.write('<div class="box">' + task_link + '<div class="deadline">' + deadline + '</div>' + '<div class="area ' + css_class + '">' + context_link + '</div>' + '</div>') file.write('</div></div>')
lifx-photons-core-0.25.0
lifx-photons-core-0.25.0//photons_device_finder.pyclass:Filter/empty
@classmethod def empty(kls, force_refresh=False): """Create an empty filter""" return kls.from_options({'force_refresh': force_refresh})
django-helcim-0.9.1
django-helcim-0.9.1//helcim/bridge_oscar.pyfile:/helcim/bridge_oscar.py:function:remap_oscar_credit_card/remap_oscar_credit_card
def remap_oscar_credit_card(card): """Remaps Oscar credit card object as Helcim dictionary. Parameters: card (obj): A credit card object provided by Django Oscar. Returns: dict: Credit card details formatted for django-helcim. """ if card.expiry_date: cc_expiry = card.expiry_date.strftime('%m%y') else: cc_expiry = None return {'cc_name': card.name, 'cc_number': card.number, 'cc_expiry': cc_expiry, 'cc_cvv': card.ccv}
ray
ray//authentication.pyclass:Authentication/authenticate
@classmethod def authenticate(cls, login_data): """ Here you can implement select in the database to garantee that the username and the password are from the same user. This method must return a dict """ raise NotImplementedError()
wx
wx//lib/agw/thumbnailctrl.pyfile:/lib/agw/thumbnailctrl.py:function:GetMondrianData/GetMondrianData
def GetMondrianData(): """ Returns a default image placeholder as a decompressed stream of characters. """ return ( b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\x00qIDATX\x85\xed\xd6;\n\x800\x10E\xd1{\xc5\x8d\xb9r\x97\x16\x0b\xad$\x8a\x82:\x16o\xda\x84pB2\x1f\x81Fa\x8c\x9c\x08\x04Z{\xcf\xa72\xbcv\xfa\xc5\x08 \x80r\x80\xfc\xa2\x0e\x1c\xe4\xba\xfaX\x1d\xd0\xde]S\x07\x02\xd8>\xe1wa-`\x9fQ\xe9\x86\x01\x04\x10\x00\\(Dk\x1b-\x04\xdc\x1d\x07\x14\x98;\x0bS\x7f\x7f\xf9\x13\x04\x10@\xf9X\xbe\x00\xc9 \x14K\xc1<={\x00\x00\x00\x00IEND\xaeB`\x82' )
ks_response-1.2
ks_response-1.2//ks_response/response.pyclass:KSResponse/_format_method_name
@staticmethod def _format_method_name(request_name: str) ->str: """Приводит к единой форме имя метода.""" request_name = request_name if not request_name.startswith('/' ) else request_name[1:] request_name = request_name if request_name.endswith('/' ) else request_name + '/' return request_name
service_framework-0.0.8
service_framework-0.0.8//src/service_framework/connections/out/publisher.pyclass:Publisher/get_compatable_connection_types
@staticmethod def get_compatable_connection_types(): """ This is needed so the service framework knows which connections this current connection is compatable. return::['str'] A list of the compatable connections """ return ['subscriber']
os_vm_expire
os_vm_expire//model/repositories.pyfile:/model/repositories.py:function:is_db_connection_error/is_db_connection_error
def is_db_connection_error(args): """Return True if error in connecting to db.""" conn_err_codes = '2002', '2003', '2006' for err_code in conn_err_codes: if args.find(err_code) != -1: return True return False
msprime-0.7.4
msprime-0.7.4//msprime/cli.pyfile:/msprime/cli.py:function:convert_float/convert_float
def convert_float(value, parser): """ Converts the specified value to a float if possible. If conversion fails, exit by calling parser.error. """ try: return float(value) except ValueError: parser.error("invalid float value '{}'".format(value))
cyborgclient
cyborgclient//common/cliutils.pyfile:/common/cliutils.py:function:unauthenticated/unauthenticated
def unauthenticated(func): """Adds 'unauthenticated' attribute to decorated function. Usage: >>> @unauthenticated ... def mymethod(f): ... pass """ func.unauthenticated = True return func
capsul-2.2.1
capsul-2.2.1//capsul/process/process.pyclass:ProcessMeta/complement_doc
@staticmethod def complement_doc(name, docstr): """ complement the process docstring """ docstring = docstr.split('\n') indent = -1 for line in docstring[1:]: lstrip = line.strip() if not lstrip: continue lindent = line.index(line.strip()) if indent == -1 or lindent < indent: indent = lindent if indent < 0: indent = 0 docstring += [(' ' * indent + line) for line in ['', '.. note::', '', " * Type '{0}.help()' for a full description of this process parameters." .format(name), " * Type '<{0}>.get_input_spec()' for a full description of this process input trait types." .format(name), " * Type '<{0}>.get_output_spec()' for a full description of this process output trait types." .format(name), '']] return '\n'.join(docstring)
pyboto3-1.4.4
pyboto3-1.4.4//pyboto3/ecs.pyfile:/pyboto3/ecs.py:function:list_clusters/list_clusters
def list_clusters(nextToken=None, maxResults=None): """ Returns a list of existing clusters. See also: AWS API Documentation Examples This example lists all of your available clusters in your default region. Expected Output: :example: response = client.list_clusters( nextToken='string', maxResults=123 ) :type nextToken: string :param nextToken: The nextToken value returned from a previous paginated ListClusters request where maxResults was used and the results exceeded the value of that parameter. Pagination continues from the end of the previous results that returned the nextToken value. This value is null when there are no more results to return. Note This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and not for other programmatic purposes. :type maxResults: integer :param maxResults: The maximum number of cluster results returned by ListClusters in paginated output. When this parameter is used, ListClusters only returns maxResults results in a single page along with a nextToken response element. The remaining results of the initial request can be seen by sending another ListClusters request with the returned nextToken value. This value can be between 1 and 100. If this parameter is not used, then ListClusters returns up to 100 results and a nextToken value if applicable. :rtype: dict :return: { 'clusterArns': [ 'string', ], 'nextToken': 'string' } :returns: (string) -- """ pass
simsurvey-0.6.0
simsurvey-0.6.0//simsurvey/utils/tools.pyfile:/simsurvey/utils/tools.py:function:kwargs_extract/kwargs_extract
def kwargs_extract(default, **kwargs): """ like kwargs_update but extracts keys of default from kwargs Returns: k -- dictionary based on default update for kwargs l -- kwargs without keys defined in default """ k = default.copy() l = {} for key, val in kwargs.items(): if key in k.keys(): k[key] = val else: l[key] = val return k, l
dask_cuda
dask_cuda//_version.pyfile:/_version.py:function:render_pep440_old/render_pep440_old
def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' return rendered
huey-2.2.0
huey-2.2.0//huey/utils.pyfile:/huey/utils.py:function:is_naive/is_naive
def is_naive(dt): """ Determines if a given datetime.datetime is naive. The concept is defined in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo Assuming value.tzinfo is either None or a proper datetime.tzinfo, value.utcoffset() implements the appropriate logic. """ return dt.utcoffset() is None
atlassian_jwt_auth
atlassian_jwt_auth//algorithms.pyfile:/algorithms.py:function:get_permitted_algorithm_names/get_permitted_algorithm_names
def get_permitted_algorithm_names(): """ returns permitted algorithm names. """ return ['RS256', 'RS384', 'RS512', 'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512']
allmydata
allmydata//interfaces.pyclass:IDirectoryNode/set_nodes
def set_nodes(entries, overwrite=True): """Add multiple children to a directory node. Takes a dict mapping unicode childname to (child_node, metdata) tuples. If metdata=None, the original metadata is left unmodified. Returns a Deferred that fires (with this dirnode) when the operation finishes. This is equivalent to calling set_node() multiple times, but is much more efficient."""
mizani-0.6.0
mizani-0.6.0//mizani/utils.pyfile:/mizani/utils.py:function:identity/identity
def identity(*args): """ Return whatever is passed in """ return args if len(args) > 1 else args[0]
cxgen-3.0.0
cxgen-3.0.0//cxgen/utils.pyfile:/cxgen/utils.py:function:pythonVersion/pythonVersion
def pythonVersion(): """Returns python version in a string format""" from platform import python_version return python_version()
dexy-2.0.8
dexy-2.0.8//dexy/filters/id.pyfile:/dexy/filters/id.py:function:p_main/p_main
def p_main(p): """entries : entries entry | entry""" pass
median_voting
median_voting//median_voting.pyclass:MedianStatistics/sort_votes
@staticmethod def sort_votes(votes): """ Sorts the votes in decreasing order and returns the result. Usually you don't have to sort the elements by yourself, the __init__ method will take care of this. But if the elements are already sorted from for example a database there is no need to sort them again. Args: votes (list of MedianVote): The votes to sort. Returns: list of MedianVote: The sorted votes. """ return sorted(votes, key=lambda vote: vote.value, reverse=True)
zenmake
zenmake//zm/waf/launcher.pyfile:/zm/waf/launcher.py:function:setWscriptVars/setWscriptVars
def setWscriptVars(module, bconf): """ Set wscript vars: top, out, APPNAME, VERSION """ module.top = bconf.confPaths.wscripttop module.out = bconf.confPaths.wscriptout module.APPNAME = bconf.projectName module.VERSION = bconf.projectVersion
eniric
eniric//nIRanalysis.pyfile:/nIRanalysis.py:function:name_assignment/name_assignment
def name_assignment(spectrum: str): """ assigns a name to the filename in which the spectrum is going to be saved """ m0_aces = 'lte03900' m3_aces = 'lte03500' m6_aces = 'lte02800' m9_aces = 'lte02600' base = 'PHOENIX-ACES-AGSS-COND-2011-HiRes_wave.dat' if m0_aces in spectrum and base in spectrum: name = 'M0-PHOENIX-ACES' elif m3_aces in spectrum and base in spectrum: name = 'M3-PHOENIX-ACES' elif m6_aces in spectrum and base in spectrum: name = 'M6-PHOENIX-ACES' elif m9_aces in spectrum and base in spectrum: name = 'M9-PHOENIX-ACES' else: raise ValueError('Name {0} not found!'.format(spectrum)) return name
pyspark-2.4.5
pyspark-2.4.5//pyspark/sql/context.pyclass:HiveContext/_createForTesting
@classmethod def _createForTesting(cls, sparkContext): """(Internal use only) Create a new HiveContext for testing. All test code that touches HiveContext *must* go through this method. Otherwise, you may end up launching multiple derby instances and encounter with incredibly confusing error messages. """ jsc = sparkContext._jsc.sc() jtestHive = (sparkContext._jvm.org.apache.spark.sql.hive.test. TestHiveContext(jsc, False)) return cls(sparkContext, jtestHive)
ddtrace
ddtrace//contrib/django/utils.pyfile:/contrib/django/utils.py:function:resource_from_cache_prefix/resource_from_cache_prefix
def resource_from_cache_prefix(resource, cache): """ Combine the resource name with the cache prefix (if any) """ if getattr(cache, 'key_prefix', None): name = '{} {}'.format(resource, cache.key_prefix) else: name = resource return name.lower()
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/armature.pyfile:/bpy/ops/armature.py:function:delete/delete
def delete(): """Remove selected bones from the armature """ pass
lcmap-merlin-2.3.3.1
lcmap-merlin-2.3.3.1//merlin/chips.pyfile:/merlin/chips.py:function:trim/trim
def trim(chips, dates): """Eliminates chips that are not from the specified dates Args: chips: Sequence of chips dates: Sequence of dates that should be included in result Returns: tuple: filtered chips """ return tuple(filter(lambda c: c['acquired'] in dates, chips))
zope.formlib-4.7.1
zope.formlib-4.7.1//src/zope/formlib/interfaces.pyclass:IBrowserWidget/__call__
def __call__(): """Render the widget."""
dimcli
dimcli//core/utils.pyfile:/core/utils.py:function:line_last_three_words/line_last_three_words
def line_last_three_words(line): """return last three words""" if len(line.split()) > 2: return ' '.join([line.split()[-3], line.split()[-2], line.split()[-1]]) else: return ''
epics-sumo-4.0.1
epics-sumo-4.0.1//sumolib/cli.pyclass:Container/normalize_name
@staticmethod def normalize_name(st): """remove dashes from a name.""" if st.startswith('--'): st = st.replace('--', '', 1) elif st.startswith('-'): st = st.replace('-', '', 1) return st.replace('-', '_')
csirtg-spamhaus-0.2
csirtg-spamhaus-0.2//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old
def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' return rendered
zun
zun//pci/utils.pyfile:/pci/utils.py:function:_get_sysfs_netdev_path/_get_sysfs_netdev_path
def _get_sysfs_netdev_path(pci_addr, pf_interface): """Get the sysfs path based on the PCI address of the device. Assumes a networking device - will not check for the existence of the path. """ if pf_interface: return '/sys/bus/pci/devices/%s/physfn/net' % pci_addr return '/sys/bus/pci/devices/%s/net' % pci_addr
wpiformat-2020.28
wpiformat-2020.28//wpiformat/includeorder.pyclass:IncludeOrder/rebuild_include
@staticmethod def rebuild_include(name_match, group_number): """Adds appropriate brackets around include name and "#include" before that based on group number. Keyword arguments: name_match -- include name's regex Match object group_number -- include classification index Returns include name with approriate brackets and "#include" prefix. """ if 1 <= group_number <= 3: output = '#include <' + name_match.group('name' ) + '>' + name_match.group('postfix') else: output = '#include "' + name_match.group('name' ) + '"' + name_match.group('postfix') if name_match.group('comment'): return name_match.group('comment') + output else: return output
CodeReview-1.0.0
CodeReview-1.0.0//CodeReview/Tools/Slice.pyclass:Slice/__and__
def __and__(i1, i2): """Return the intersection of i1 and i2.""" return i1.__class__(*i1._intersection(i1, i2))
GetCCWarc-0.0.1.dev34
GetCCWarc-0.0.1.dev34//GetCCWarc/_version.pyfile:/GetCCWarc/_version.py:function:render_git_describe_long/render_git_describe_long
def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces['closest-tag']: rendered = pieces['closest-tag'] rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if pieces['dirty']: rendered += '-dirty' return rendered
cbdd-scopy-1.1.2
cbdd-scopy-1.1.2//scopy/druglikeness/molproperty.pyfile:/scopy/druglikeness/molproperty.py:function:GetProperties/GetProperties
def GetProperties(mol, items=['MW', 'Vol', 'Dense', 'fChar', 'nBond', 'nAtom', 'nHD', 'nHA', 'nHB', 'nHet', 'nStero', 'nHev', 'nRot', 'nRig', 'Flex', 'nRing', 'logP', 'logD', 'pKa', 'logSw', 'ab', 'MR', 'TPSA', 'AP', 'HetRatio', 'Fsp3', 'MaxRing', 'QEDmean', 'QEDmax', 'QEDnone', 'SAscore', 'NPscore', 'nSingle', 'nDouble', 'nTriple', 'nC', 'nB', 'nF', 'nCl', 'nBr', 'nI', 'nP', 'nS', 'nO', 'nN']): """ Get all properties in scopy """ funcl = {'MW': 'CalculateMolWeight(mol)', 'Vol': 'CalculateMolVolume(mol)', 'Dense': 'CalculateMolDensity(mol)', 'fChar': 'CalculateMolFCharge(mol)', 'nBond': 'CalculateNumBonds(mol)', 'nAtom': 'CalculateNumAtoms(mol)', 'nHet': 'CalculateNumHetero(mol)', 'nRot': 'CalculateNumRotatableBonds(mol)', 'nRig': 'CalculateNumRigidBonds(mol)', 'Flex': 'CalculateFlexibility(mol)', 'nRing': 'CalculateNumRing(mol)', 'nHev': 'CalculateNumHeavyAtom(mol)', 'logP': 'CalculateLogP(mol)', 'logD': 'CalculateLogD(mol)', 'pKa': 'CalculatepKa(mol)', 'ab': 'CheckAcid(mol)', 'MR': 'CalculateMolMR(mol)', 'nHD': 'CalculateNumHDonors(mol)', 'nHA': 'CalculateNumHAcceptors(mol)', 'nHB': 'CalculateNumHyBond(mol)', 'AP': 'CalculateAromaticProportion(mol)', 'logSw': 'CalculateLogSw(mol)', 'Fsp3': 'CalculateFsp3(mol)', 'TPSA': 'CalculateTPSA(mol)', 'MaxRing': 'CalculateMaxSizeSystemRing(mol)', 'nStero': 'CalculateNumStereocenters(mol)', 'HetRatio': 'CalculateHetCarbonRatio(mol)', 'QEDmean': 'CalculateQEDmean(mol)', 'QEDmax': 'CalculateQEDmax(mol)', 'QEDnone': 'CalculateQEDnone(mol)', 'SAscore': 'CalculateSAscore(mol)', 'NPscore': 'CalculateNPscore(mol)', 'nSingle': 'CalculateNumSinBond(mol)', 'nDouble': 'CalculateNumDouBond(mol)', 'nTriple': 'CalculateNumTriBond(mol)', 'nC': 'CalculateNumCarbon(mol)', 'nB': 'CalculateNumBoron(mol)', 'nF': 'CalculateNumFluorin(mol)', 'nCl': 'CalculateNumChlorin(mol)', 'nBr': 'CalculateNumBromine(mol)', 'nI': 'CalculateNumIodine(mol)', 'nP': 'CalculateNumPhosphor(mol)', 'nS': 'CalculateNumSulfur(mol)', 'nO': 'CalculateNumOxygen(mol)', 'nN': 'CalculateNumNitrogen(mol)'} vals = [] for item in items: val = eval(funcl[item]) vals.append(val) return dict(zip(items, vals))
MongoFrames-1.3.5
MongoFrames-1.3.5//mongoframes/factory/blueprints.pyclass:Blueprint/assemble
@classmethod def assemble(cls): """Assemble a single document using the blueprint""" document = {} for field_name, maker in cls._instructions.items(): with maker.target(document): document[field_name] = maker() return document
robottelo
robottelo//cli/org.pyclass:Org/remove_compute_resource
@classmethod def remove_compute_resource(cls, options=None): """Removes a computeresource from an org""" cls.command_sub = 'remove-compute-resource' return cls.execute(cls._construct_command(options))
ikpy
ikpy//utils/geometry.pyfile:/utils/geometry.py:function:homogeneous_to_cartesian_vectors/homogeneous_to_cartesian_vectors
def homogeneous_to_cartesian_vectors(homogeneous_vector): """Convert an homogeneous vector to cartesian vector""" return homogeneous_vector[:-1]
bout_install-0.1.8
bout_install-0.1.8//bout_install/installer/PETScInstaller.pyclass:PETScInstaller/get_configure_command
@staticmethod def get_configure_command(config_options=None): """ Get the command to configure the package. Notes ----- Configuring happens through python 2 https://github.com/petsc/petsc/blob/master/configure Parameters ---------- config_options : dict Configuration options to use with `./configure`. The configuration options will be converted to `--key=val` during runtime Returns ------- config_str : str The configuration command """ options = '' if config_options is not None: for key, val in config_options.items(): if val is not None: options += f' --{key}={val}' else: options += f' --{key}' config_str = f'python2 ./configure{options}' return config_str
prysm
prysm//zernike.pyfile:/zernike.py:function:primary_spherical/primary_spherical
def primary_spherical(rho, phi): """Zernike primary Spherical.""" return 6 * rho ** 4 - 6 * rho ** 2 + 1
pyboto3-1.4.4
pyboto3-1.4.4//pyboto3/s3.pyfile:/pyboto3/s3.py:function:delete_object_tagging/delete_object_tagging
def delete_object_tagging(Bucket=None, Key=None, VersionId=None): """ Removes the tag-set from an existing object. See also: AWS API Documentation :example: response = client.delete_object_tagging( Bucket='string', Key='string', VersionId='string' ) :type Bucket: string :param Bucket: [REQUIRED] :type Key: string :param Key: [REQUIRED] :type VersionId: string :param VersionId: The versionId of the object that the tag-set will be removed from. :rtype: dict :return: { 'VersionId': 'string' } :returns: (dict) -- VersionId (string) -- The versionId of the object the tag-set was removed from. """ pass
tuskar-0.4.18
tuskar-0.4.18//tuskar/templates/plan.pyfile:/tuskar/templates/plan.py:function:generate_group_id/generate_group_id
def generate_group_id(namespace): """Generates the ID for a resource group wrapper resource around the resource with the given namespace. :type namespace: str :rtype: str """ return namespace.rsplit('-', 1)[0]
ambition
ambition//rest.pyclass:RESTClient/POST
@classmethod def POST(cls, *n, **kw): """ Perform a POST request using `RESTClient.request()` """ return cls.IMPL.POST(*n, **kw)
WsgiDAV-3.0.3
WsgiDAV-3.0.3//wsgidav/util.pyfile:/wsgidav/util.py:function:is_equal_or_child_uri/is_equal_or_child_uri
def is_equal_or_child_uri(parentUri, childUri): """Return True, if childUri is a child of parentUri or maps to the same resource. Similar to <util.is_child_uri>_ , but this method also returns True, if parent equals child. ('/a/b' is considered identical with '/a/b/'). """ return parentUri and childUri and (childUri.rstrip('/') + '/').startswith( parentUri.rstrip('/') + '/')
emulsion
emulsion//agent/core/abstract_agent.pyclass:AbstractAgent/from_dict
@classmethod def from_dict(cls, dct): """Instantiate an agent using the specified dictionary. TAG: USER """ return cls(**dct)
solprimer
solprimer//solprim/solartimeposition.pyfile:/solprim/solartimeposition.py:function:geo_coordinates/geo_coordinates
def geo_coordinates(longitude=0, latitude=0): """ transform geographical coordinates given as longitude [-180..+180] and latitude [-90..+90] in a string with absolute indications and 'N', 'S', 'E', 'W' input: - longitude, decimal degrees, float - latitude, decimal degrees, float output: - string in format '27.34N 14.82E' """ if latitude >= 0: lat_dir = 'N' lat_val = str(round(latitude, 2)) else: lat_dir = 'S' lat_val = str(round(-latitude, 2)) if longitude >= 0: lon_dir = 'E' lon_val = str(round(longitude, 2)) else: lon_dir = 'W' lon_val = str(round(-longitude, 2)) coord = lat_val + '' + lat_dir + ' ' + lon_val + '' + lon_dir return coord
dataflake.cache-1.12
dataflake.cache-1.12//dataflake/cache/interfaces.pyclass:ICache/invalidate
def invalidate(key=None): """ Invalidate the given key, or all key/values if no key is passed. """
polyglotdb-1.0.0
polyglotdb-1.0.0//polyglotdb/syllabification/probabilistic.pyfile:/polyglotdb/syllabification/probabilistic.py:function:split_nonsyllabic_prob/split_nonsyllabic_prob
def split_nonsyllabic_prob(string, onsets, codas): """ Guesses split between onset and coda in list with no found syllabic segments Parameters ---------- string : iterable the phones to search through onsets : iterable an iterable of possible onsets codas : iterable an iterable of possible codas Returns ------- int best guess for the index in the string where the onset ends and coda begins """ if len(string) == 0: return None max_prob = -10000 best = None for i in range(len(string) + 1): prob = 0 ons = tuple(string[:i]) cod = tuple(string[i:]) if ons not in onsets: prob += onsets[None] else: prob += onsets[ons] if cod not in codas: prob += codas[None] else: prob += codas[cod] if prob > max_prob: max_prob = prob best = i return best
ftw.blueprints-1.1.1
ftw.blueprints-1.1.1//ftw/blueprints/interfaces.pyclass:IXMLHandler/parse_xml_string
def parse_xml_string(xml_string): """Parses a xml string with minidom """
python_toolbox
python_toolbox//sequence_tools/misc.pyfile:/sequence_tools/misc.py:function:are_equal_regardless_of_order/are_equal_regardless_of_order
def are_equal_regardless_of_order(seq1, seq2): """ Do `seq1` and `seq2` contain the same elements, same number of times? Disregards order of elements. Currently will fail for items that have problems with comparing. """ from python_toolbox import nifty_collections return nifty_collections.Bag(seq1) == nifty_collections.Bag(seq2)
suzu-0.4.1
suzu-0.4.1//suzu/atomtbl.pyclass:AtomTbl/validate_wunit
@staticmethod def validate_wunit(w): """ @param w widget mapping """ err = [] try: v = w['symbol'].get() if len(v) < 1 or len(v) > 2: stat = False err.append(('symbol', 'char length is not valid')) w['symbol'].config(bg='red') else: w['symbol'].config(bg='white') except: err.append(('symbol', 'exception orruced')) for name in ['z', 'w', 'stoich', 'disp']: e = w[name].validate() if e: err.append((name, e)) return err if err else None
pytorch_pretrained_bert
pytorch_pretrained_bert//modeling_transfo_xl.pyfile:/modeling_transfo_xl.py:function:build_tf_to_pytorch_map/build_tf_to_pytorch_map
def build_tf_to_pytorch_map(model, config): """ A map of modules from TF to PyTorch. This time I use a map to keep the PyTorch model as identical to the original PyTorch model as possible. """ tf_to_pt_map = {} if hasattr(model, 'transformer'): tf_to_pt_map.update({ 'transformer/adaptive_softmax/cutoff_0/cluster_W': model.crit. cluster_weight, 'transformer/adaptive_softmax/cutoff_0/cluster_b': model.crit. cluster_bias}) for i, (out_l, proj_l, tie_proj) in enumerate(zip(model.crit. out_layers, model.crit.out_projs, config.tie_projs)): layer_str = 'transformer/adaptive_softmax/cutoff_%d/' % i if config.tie_weight: tf_to_pt_map.update({(layer_str + 'b'): out_l.bias}) else: raise NotImplementedError tf_to_pt_map.update({(layer_str + 'lookup_table'): out_l. weight, (layer_str + 'b'): out_l.bias}) if not tie_proj: tf_to_pt_map.update({(layer_str + 'proj'): proj_l}) model = model.transformer for i, (embed_l, proj_l) in enumerate(zip(model.word_emb.emb_layers, model.word_emb.emb_projs)): layer_str = 'transformer/adaptive_embed/cutoff_%d/' % i tf_to_pt_map.update({(layer_str + 'lookup_table'): embed_l.weight, (layer_str + 'proj_W'): proj_l}) for i, b in enumerate(model.layers): layer_str = 'transformer/layer_%d/' % i tf_to_pt_map.update({(layer_str + 'rel_attn/LayerNorm/gamma'): b. dec_attn.layer_norm.weight, (layer_str + 'rel_attn/LayerNorm/beta'): b.dec_attn.layer_norm.bias, ( layer_str + 'rel_attn/o/kernel'): b.dec_attn.o_net.weight, ( layer_str + 'rel_attn/qkv/kernel'): b.dec_attn.qkv_net.weight, (layer_str + 'rel_attn/r/kernel'): b.dec_attn.r_net.weight, ( layer_str + 'ff/LayerNorm/gamma'): b.pos_ff.layer_norm.weight, (layer_str + 'ff/LayerNorm/beta'): b.pos_ff.layer_norm.bias, ( layer_str + 'ff/layer_1/kernel'): b.pos_ff.CoreNet[0].weight, ( layer_str + 'ff/layer_1/bias'): b.pos_ff.CoreNet[0].bias, ( layer_str + 'ff/layer_2/kernel'): b.pos_ff.CoreNet[3].weight, ( layer_str + 'ff/layer_2/bias'): b.pos_ff.CoreNet[3].bias}) if config.untie_r: r_r_list = [] r_w_list = [] for b in model.layers: r_r_list.append(b.dec_attn.r_r_bias) r_w_list.append(b.dec_attn.r_w_bias) else: r_r_list = [model.r_r_bias] r_w_list = [model.r_w_bias] tf_to_pt_map.update({'transformer/r_r_bias': r_r_list, 'transformer/r_w_bias': r_w_list}) return tf_to_pt_map
dk-1.1.2
dk-1.1.2//dk/utils.pyfile:/dk/utils.py:function:hour_minute/hour_minute
def hour_minute(v): """Convert 7.5 (hours) to (7, 30) i.e. 7 hours and 30 minutes.""" h = int(v) m = int((v - h) * 60) return h, m
cameo-0.11.15
cameo-0.11.15//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe
def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance']: rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if pieces['dirty']: rendered += '-dirty' return rendered
remofile
remofile//algorithm.pyfile:/algorithm.py:function:download_files/download_files
def download_files(client, source, destination, relative_directory): """ Download files from the remote directory. This method downloads one file, an entire directory or a set of files (specified by shell glob pattern) to a given directory in in the local filesystem. Additional parameters are there to refine the process. The source parameter refers to the remote file(s) to be transfered to the local filesystem and is expected to be a path-like object, unless it's a shell glob pattern in which case a string is expected. The path must be relative and is by default relative to the root directory. It can be altered with the relative_directory parameter which itself must be path-like object refering to an absolute directory. The destination parameter refers to the remote directory in which the file(s) must be transfered to. It must be a path-like object and must be absolute. Long description. :param source: Foobar. :param destination: Foobar. :param chunk_size: Foobar. :type chunk_size: int :param process_chunk: Foobar. :param timeout: How many milliseconds to wait before giving up :type timeout: int """ pass
parl-1.3
parl-1.3//parl/core/fluid/plutils/common.pyfile:/parl/core/fluid/plutils/common.py:function:inverse/inverse
def inverse(x): """ Inverse 0/1 variable Args: x: variable with float32 dtype Returns: inverse_x: variable with float32 dtype """ inverse_x = -1.0 * x + 1.0 return inverse_x
ovation-1.26.0
ovation-1.26.0//ovation/core.pyfile:/ovation/core.py:function:delete_entity/delete_entity
def delete_entity(session, entity): """ Deletes an entity. Deleted entities are put in the "trash" and are no longer visible or returned by GET operations. Trashed entities can be restored from the trash by calling `undelete_entity`. :param session: ovation.session.Session :param entity: entity dict or ID :return: deleted entity Dict """ try: id = entity['_id'] except TypeError: id = entity return session.delete(session.path(entity_id=id))
music21-5.7.2
music21-5.7.2//music21/search/base.pyfile:/music21/search/base.py:function:translateNoteTieToByte/translateNoteTieToByte
def translateNoteTieToByte(n): """ takes a note.Note object and returns a one-byte representation of its tie status. 's' if start tie, 'e' if stop tie, 'c' if continue tie, and '' if no tie >>> n = note.Note('E') >>> search.translateNoteTieToByte(n) '' >>> n.tie = tie.Tie('start') >>> search.translateNoteTieToByte(n) 's' >>> n.tie.type = 'continue' >>> search.translateNoteTieToByte(n) 'c' >>> n.tie.type = 'stop' >>> search.translateNoteTieToByte(n) 'e' """ if n.tie is None: return '' elif n.tie.type == 'start': return 's' elif n.tie.type == 'continue': return 'c' elif n.tie.type == 'stop': return 'e' else: return ''
PyTrack-NTU-1.0.3
PyTrack-NTU-1.0.3//PyTrack/formatBridge.pyfile:/PyTrack/formatBridge.py:function:getColHeaders/getColHeaders
def getColHeaders(): """Function to return the column headers for the *PyTrack* base format data representation. """ return ['Timestamp', 'StimulusName', 'EventSource', 'GazeLeftx', 'GazeRightx', 'GazeLefty', 'GazeRighty', 'PupilLeft', 'PupilRight', 'FixationSeq', 'SaccadeSeq', 'Blink', 'GazeAOI']
pyfacebook-0.3
pyfacebook-0.3//pyfacebook/me.pyclass:Me/get_token_uri
def get_token_uri(APP_ID, scope): """ Returns URL for getting tokens """ uri = 'https://www.facebook.com/dialog/oauth?client_id=%s' % APP_ID uri += ('&scope=%s&redirect_uri=https://localhost&response_type=token' % scope) return uri
soma-base-4.6.4
soma-base-4.6.4//python/soma/qt_gui/controls/Enum.pyclass:EnumControlWidget/is_valid
@staticmethod def is_valid(control_instance, *args, **kwargs): """ Method to check if the new control value is correct. Parameters ---------- control_instance: QComboBox (mandatory) the control widget we want to validate Returns ------- out: bool always True since the control value is always valid """ return True
natcap
natcap//rios/porter_core.pyfile:/rios/porter_core.py:function:write_csv/write_csv
def write_csv(column_headers, rows, output_uri): """Given a list of column headers and contents, write to a csv file column_headers - a list of strings defining the column names rows - a list of lists where an inner list index corresponds to the column header in column_headers output_uri - the location to save the output .csv file returns nothing""" pass
superset-dywx-0.26.3
superset-dywx-0.26.3//superset/db_engine_specs.pyclass:SqliteEngineSpec/get_table_names
@classmethod def get_table_names(cls, schema, inspector): """Need to disregard the schema for Sqlite""" return sorted(inspector.get_table_names())
aiortc-dc-0.5.5
aiortc-dc-0.5.5//examples/datachannel-filexfer/wssignaling.pyfile:/examples/datachannel-filexfer/wssignaling.py:function:add_signaling_arguments/add_signaling_arguments
def add_signaling_arguments(parser): """ Add signaling method arguments to an argparse.ArgumentParser. """ parser.add_argument('--signaling-host', default='127.0.0.1', help= 'Signaling host (websocket server host)') parser.add_argument('--signaling-port', default=1234, help= 'Signaling port (websocket server port)')
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/action.pyfile:/bpy/ops/action.py:function:view_all/view_all
def view_all(): """Reset viewable area to show full keyframe range """ pass
APLpy-2.0.3
APLpy-2.0.3//astropy_helpers/astropy_helpers/utils.pyfile:/astropy_helpers/astropy_helpers/utils.py:function:get_numpy_include_path/get_numpy_include_path
def get_numpy_include_path(): """ Gets the path to the numpy headers. """ import builtins if hasattr(builtins, '__NUMPY_SETUP__'): del builtins.__NUMPY_SETUP__ import imp import numpy imp.reload(numpy) try: numpy_include = numpy.get_include() except AttributeError: numpy_include = numpy.get_numpy_include() return numpy_include
JapaneseTokenizer
JapaneseTokenizer//datamodels.pyfile:/datamodels.py:function:__is_sotpwords/__is_sotpwords
def __is_sotpwords(token, stopwords): """This function filters out stopwords. If token is in stopwords list, return True; else return False """ if token in stopwords: return True else: return False
zope.formlib-4.7.1
zope.formlib-4.7.1//src/zope/formlib/interfaces.pyclass:IAddFormCustomization/create
def create(data): """Create and return an object to be added to the context. The data argument is a dictionary with values supplied by the form. If any user errors occur, they should be collected into a list and raised as a `WidgetsError`. """
cptac
cptac//dataframe_tools.pyfile:/dataframe_tools.py:function:standardize_axes_names/standardize_axes_names
def standardize_axes_names(data_dict): """For all dataframes in the given dictionary, sets the name of the index axes to "Patient_ID", because that's what they all are by that point, and sets the name of the column axes to "Name". Parameters: data_dict (dict): The dataframe dictionary of the dataset. Returns: dict: The dataframe dictionary, with the dataframe axes' names standardized. Keys are str of dataframe names, values are pandas DataFrames """ for name in data_dict.keys(): df = data_dict[name] df.index.name = 'Patient_ID' df.columns.name = 'Name' data_dict[name] = df return data_dict
cleave-0.28
cleave-0.28//cleave/server.pyclass:BaseServer/_read
@staticmethod def _read(sock, chunk_len=2048): """ Read all chunks from socket connection :param sock: :param chunk_len: :return: """ result = sock.recv(chunk_len) if len(result) == chunk_len: while True: tmp = sock.recv(chunk_len) result += tmp if len(tmp) < chunk_len: break return result
numba
numba//core/types/misc.pyfile:/core/types/misc.py:function:unliteral/unliteral
def unliteral(lit_type): """ Get base type from Literal type. """ if hasattr(lit_type, '__unliteral__'): return lit_type.__unliteral__() return getattr(lit_type, 'literal_type', lit_type)
simple_format-0.2
simple_format-0.2//simple_format/_scan.pyfile:/simple_format/_scan.py:function:scan3/scan3
def scan3(alines): """deal with indents in text lines""" outer_indent = [0] for i, aline in enumerate(alines): if aline.type in ['ordered', 'unordered']: while outer_indent[-1] >= aline.outer_indent: outer_indent.pop() outer_indent.append(aline.outer_indent) elif aline.type == 'text': while outer_indent[-1] > aline.content_start: outer_indent.pop() aline.indent = outer_indent[-1] return alines
fdict
fdict//fdict.pyclass:fdict/_get_root_parent_node
@staticmethod def _get_root_parent_node(path, delimiter='/', rootpath=None): """Get path to the root parent of current leaf""" if rootpath: startpos = len(rootpath) + 1 else: startpos = 0 m = path.find(delimiter, startpos) return path[:m] if m >= 0 else None
m01.grid-3.0.0
m01.grid-3.0.0//src/m01/grid/interfaces.pyclass:IChunkReader/read
def read(size=-1): """Read at most `size` bytes from the file If size is negative or omitted all data is read """
colcol
colcol//colcol.pyfile:/colcol.py:function:is_rgb/is_rgb
def is_rgb(in_col): """ Check whether input is a valid RGB color. Return True if it is, otherwise False. """ if len(in_col) == 3 and type(in_col) == tuple: if 0 <= in_col[0] <= 255 and 0 <= in_col[1] <= 255 and 0 <= in_col[2 ] <= 255: return True else: return False else: return False
nibetaseries-0.6.0
nibetaseries-0.6.0//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old
def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' return rendered
bpy_nibbler-0.1
bpy_nibbler-0.1//bpy_lambda/2.78/scripts/addons/io_scene_fbx/fbx_utils.pyfile:/bpy_lambda/2.78/scripts/addons/io_scene_fbx/fbx_utils.py:function:similar_values/similar_values
def similar_values(v1, v2, e=1e-06): """Return True if v1 and v2 are nearly the same.""" if v1 == v2: return True return abs(v1 - v2) / max(abs(v1), abs(v2)) <= e
longling-1.3.12
longling-1.3.12//longling/lib/candylib.pyfile:/longling/lib/candylib.py:function:list2dict/list2dict
def list2dict(list_obj, value=None, dict_obj=None): """ >>> list_obj = ["a", 2, "c"] >>> list2dict(list_obj, 10) {'a': {2: {'c': 10}}} """ dict_obj = {} if dict_obj is None else dict_obj _dict_obj = dict_obj for e in list_obj[:-1]: if e not in _dict_obj: _dict_obj[e] = {} _dict_obj = _dict_obj[e] _dict_obj[list_obj[-1]] = value return dict_obj
ipwb-0.2020.4.24.1847
ipwb-0.2020.4.24.1847//ipwb/replay.pyfile:/ipwb/replay.py:function:compile_target_uri/compile_target_uri
def compile_target_uri(url: str, query_string: bytes) ->str: """Append GET query string to the page path, to get full URI.""" if query_string: return '{}?{}'.format(url, query_string.decode('utf-8')) else: return url
alnitak
alnitak//parser.pyfile:/parser.py:function:version_message/version_message
def version_message(prog): """Program version message. Args: prog (State): program state. Returns: str: the message to print. """ return '{} {}\n{}'.format(prog.name, prog.version, prog.copyright)
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/anim.pyfile:/bpy/ops/anim.py:function:paste_driver_button/paste_driver_button
def paste_driver_button(): """Paste the driver in the copy/paste buffer for the highlighted button """ pass
bpy
bpy//ops/outliner.pyfile:/ops/outliner.py:function:collection_enable/collection_enable
def collection_enable(): """Enable viewport drawing in the view layers """ pass
polaris
polaris//fetch/data_fetch_decoder.pyfile:/fetch/data_fetch_decoder.py:function:build_decode_cmd/build_decode_cmd
def build_decode_cmd(src, dest, decoder): """ Build command to decode downloaded into JSON """ decode_multiple = 'decode_multiple' decoder_module = decoder input_format = 'csv' decode_cmd = ( '{decode_multiple} --filename {src} --format {input_format} {decoder_module} > {dest}' .format(decode_multiple=decode_multiple, decoder_module= decoder_module, src=src, input_format=input_format, dest=dest)) return decode_cmd
mpu-0.23.0
mpu-0.23.0//mpu/string.pyfile:/mpu/string.py:function:is_int/is_int
def is_int(potential_int: str) ->bool: """ Check if potential_int is a valid integer. Parameters ---------- potential_int : str Returns ------- is_int : bool Examples -------- >>> is_int('123') True >>> is_int('1234567890123456789') True >>> is_int('0') True >>> is_int('-123') True >>> is_int('123.45') False >>> is_int('a') False >>> is_int('0x8') False """ try: int(potential_int) return True except ValueError: return False
class_only_design-0.3.0
class_only_design-0.3.0//class_only_design/util.pyfile:/class_only_design/util.py:function:_is_sunder/_is_sunder
def _is_sunder(name): """Returns True if a _sunder_ name, False otherwise.""" return name[0] == name[-1] == '_' and name[1:2] != '_' and name[-2:-1 ] != '_' and len(name) > 2
wrk-0.3.2
wrk-0.3.2//kale/task.pyclass:Task/_clean_task_environment
@staticmethod def _clean_task_environment(task_id=None, task_name=None, exc=None): """Cleans the environment for this task. Args: task_id: string of task id. task_name: string of task name. exc: The exception raised by the task, None if the task succeeded. """ pass
torrentpy
torrentpy//models/river/inca.pyclass:INCAr/_get_out
@staticmethod def _get_out(waterbody, datetime_time_step, dict_data_frame, r_out_c_no3, r_out_c_nh4, r_out_c_dph, r_out_c_pph, r_out_c_sed, r_s_m_no3, r_s_m_nh4, r_s_m_dph, r_s_m_pph, r_s_m_sed): """ This function is the interface between the model and the data models of the simulator. It stores the outputs, and updated states in the data frame. """ dict_data_frame[waterbody][datetime_time_step]['r_out_c_no3'] = r_out_c_no3 dict_data_frame[waterbody][datetime_time_step]['r_out_c_nh4'] = r_out_c_nh4 dict_data_frame[waterbody][datetime_time_step]['r_out_c_dph'] = r_out_c_dph dict_data_frame[waterbody][datetime_time_step]['r_out_c_pph'] = r_out_c_pph dict_data_frame[waterbody][datetime_time_step]['r_out_c_sed'] = r_out_c_sed dict_data_frame[waterbody][datetime_time_step]['r_s_m_no3'] = r_s_m_no3 dict_data_frame[waterbody][datetime_time_step]['r_s_m_nh4'] = r_s_m_nh4 dict_data_frame[waterbody][datetime_time_step]['r_s_m_dph'] = r_s_m_dph dict_data_frame[waterbody][datetime_time_step]['r_s_m_pph'] = r_s_m_pph dict_data_frame[waterbody][datetime_time_step]['r_s_m_sed'] = r_s_m_sed
pmxutils
pmxutils//mathtools.pyfile:/mathtools.py:function:advConstruct/advConstruct
def advConstruct(expression, *args, constants={}): """Returns a function computing the given expression. The variable names need to be listed as individual string arguments. Constants is an optional argument with the name and value of constants in the expression""" string = """def func({}): return {}""".format(', '.join(args), expression.replace('^', '**')) exec(string, constants) return constants['func']