Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
5,100 | def has_c():
"""Is the C extension installed?"""
try:
from pymongo import _cmessage
return True
except __HOLE__:
return False | ImportError | dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/__init__.py/has_c |
5,101 | def get_queryset(self):
"""
Fixes get_query_set vs get_queryset for Django <1.6
"""
try:
qs = super(UserManager, self).get_queryset()
except __HOLE__: # pragma: no cover
qs = super(UserManager, self).get_query_set()
return qs | AttributeError | dataset/ETHPy150Open mishbahr/django-users2/users/managers.py/UserManager.get_queryset |
5,102 | def testUnhashableKeys(self):
try:
a = {[1]:2}
except TypeError:
pass
else:
self.fail("list as dict key should raise TypeError")
try:
a = {{1:2}:3}
except __HOLE__:
pass
else:
self.fail("dict as dict key should raise TypeError") | TypeError | dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_dict_jy.py/DictInitTest.testUnhashableKeys |
5,103 | def get_objects(self):
""" Returns a set of objects set for deletion. List of objects is
retrived from a HTTP POST list called ``objects``.
Returns
-------
set
Set of objects to delete
"""
try:
return self._objs
except AttributeError:
objects = set()
model = self.get_model()
# Values to use in object lookup, could id for example or slug etc
vals = request.values.getlist('objects')
try:
field = getattr(model, self.get_lookup_field())
except __HOLE__:
raise AttributeError('Lookup field does not exist')
for obj in model.query.filter(field.in_(vals)).all():
objects.add(obj)
self._objs = objects
return objects | AttributeError | dataset/ETHPy150Open thisissoon/Flask-Velox/flask_velox/mixins/sqla/delete.py/MultiDeleteObjectMixin.get_objects |
5,104 | def clean_rename_format(self):
try:
self.cleaned_data['rename_format'] % {
'original_filename': 'filename',
'original_basename': 'basename',
'original_extension': 'ext',
'current_filename': 'filename',
'current_basename': 'basename',
'current_extension': 'ext',
'current_folder': 'folder',
'counter': 42,
'global_counter': 42,
}
except __HOLE__ as e:
raise forms.ValidationError(_('Unknown rename format value key "%(key)s".') % {'key': e.args[0]})
except Exception as e:
raise forms.ValidationError(_('Invalid rename format: %(error)s.') % {'error': e})
return self.cleaned_data['rename_format'] | KeyError | dataset/ETHPy150Open divio/django-filer/filer/admin/forms.py/RenameFilesForm.clean_rename_format |
5,105 | def __contains__(self, item):
"""
Checks if the layer is inside the packet.
:param item: name of the layer
"""
try:
self[item]
return True
except __HOLE__:
return False | KeyError | dataset/ETHPy150Open KimiNewt/pyshark/src/pyshark/packet/packet.py/Packet.__contains__ |
5,106 | @property
def sniff_time(self):
try:
timestamp = float(self.sniff_timestamp)
except __HOLE__:
# If the value after the decimal point is negative, discard it
# Google: wireshark fractional second
timestamp = float(self.sniff_timestamp.split(".")[0])
return datetime.datetime.fromtimestamp(timestamp) | ValueError | dataset/ETHPy150Open KimiNewt/pyshark/src/pyshark/packet/packet.py/Packet.sniff_time |
5,107 | def __contains__(self, key):
try:
self[key]
return True
except __HOLE__:
return False | KeyError | dataset/ETHPy150Open brutasse/graphite-api/graphite_api/utils.py/RequestParams.__contains__ |
5,108 | def get(self, key, default=None):
try:
return self[key]
except __HOLE__:
return default | KeyError | dataset/ETHPy150Open brutasse/graphite-api/graphite_api/utils.py/RequestParams.get |
5,109 | def _handle_server(self):
server = self._server_socket
data, r_addr = server.recvfrom(BUF_SIZE)
if not data:
logging.debug('UDP handle_server: data is empty')
if self._stat_callback:
self._stat_callback(self._listen_port, len(data))
if self._is_local:
frag = common.ord(data[2])
if frag != 0:
logging.warn('drop a message since frag is not 0')
return
else:
data = data[3:]
else:
data = encrypt.encrypt_all(self._password, self._method, 0, data)
# decrypt data
if not data:
logging.debug('UDP handle_server: data is empty after decrypt')
return
header_result = parse_header(data)
if header_result is None:
return
addrtype, dest_addr, dest_port, header_length = header_result
if self._is_local:
server_addr, server_port = self._get_a_server()
else:
server_addr, server_port = dest_addr, dest_port
addrs = self._dns_cache.get(server_addr, None)
if addrs is None:
addrs = socket.getaddrinfo(server_addr, server_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if not addrs:
# drop
return
else:
self._dns_cache[server_addr] = addrs
af, socktype, proto, canonname, sa = addrs[0]
key = client_key(r_addr, af)
client = self._cache.get(key, None)
if not client:
# TODO async getaddrinfo
if self._forbidden_iplist:
if common.to_str(sa[0]) in self._forbidden_iplist:
logging.debug('IP %s is in forbidden list, drop' %
common.to_str(sa[0]))
# drop
return
client = socket.socket(af, socktype, proto)
client.setblocking(False)
self._cache[key] = client
self._client_fd_to_server_addr[client.fileno()] = r_addr
self._sockets.add(client.fileno())
self._eventloop.add(client, eventloop.POLL_IN, self)
if self._is_local:
data = encrypt.encrypt_all(self._password, self._method, 1, data)
if not data:
return
else:
data = data[header_length:]
if not data:
return
try:
client.sendto(data, (server_addr, server_port))
except __HOLE__ as e:
err = eventloop.errno_from_exception(e)
if err in (errno.EINPROGRESS, errno.EAGAIN):
pass
else:
shell.print_exception(e) | IOError | dataset/ETHPy150Open ziggear/shadowsocks/shadowsocks/udprelay.py/UDPRelay._handle_server |
5,110 | def with_errorcheck_client(*features):
def inner(f):
def run():
try:
return with_client(*features, connected=False)(f)()
except __HOLE__ as e:
assert False, e
run.__name__ = f.__name__
return run
return inner | TypeError | dataset/ETHPy150Open Shizmob/pydle/tests/test_featurize.py/with_errorcheck_client |
5,111 | def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except __HOLE__:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds) | AttributeError | dataset/ETHPy150Open xraypy/xraylarch/doc/sphinx/ext/backports.py/OrderedDict.__init__ |
5,112 | def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except __HOLE__:
pass
dict.clear(self) | AttributeError | dataset/ETHPy150Open xraypy/xraylarch/doc/sphinx/ext/backports.py/OrderedDict.clear |
5,113 | def is_active(self, view_or_path):
assert (isinstance(view_or_path, sublime.View) or
isinstance(view_or_path, str)), "bad parameter"
try:
return is_active(view_or_path)
except __HOLE__:
return is_active_path(view_or_path) | AttributeError | dataset/ETHPy150Open guillermooo/dart-sublime-bundle/lib/analyzer/queue.py/AnalyzerQueue.is_active |
5,114 | def read_status(self):
request = Connection()
request.write_varint(0) # Request status
self.connection.write_buffer(request)
response = self.connection.read_buffer()
if response.read_varint() != 0:
raise IOError("Received invalid status response packet.")
try:
raw = json.loads(response.read_utf())
except ValueError:
raise IOError("Received invalid JSON")
try:
return PingResponse(raw)
except __HOLE__ as e:
raise IOError("Received invalid status response: %s" % e) | ValueError | dataset/ETHPy150Open Dinnerbone/mcstatus/mcstatus/pinger.py/ServerPinger.read_status |
5,115 | def tearDown(self):
try:
self.proxyServerFactory.protoInstance.transport.loseConnection()
except __HOLE__:
pass
try:
pi = self.proxyServerFactory.clientFactoryInstance.protoInstance
pi.transport.loseConnection()
except AttributeError:
pass
try:
self.clientProtocol.transport.loseConnection()
except AttributeError:
pass
try:
self.serverProtocol.transport.loseConnection()
except AttributeError:
pass
return defer.gatherResults(
[defer.maybeDeferred(p.stopListening) for p in self.openPorts]) | AttributeError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/test/test_protocols.py/Portforwarding.tearDown |
5,116 | def close(self):
for fd in self._fd_to_chan:
try:
self.poller.unregister(fd)
except __HOLE__:
pass
self._channels.clear()
self._fd_to_chan.clear()
self.poller = None | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/zmq.py/MultiChannelPoller.close |
5,117 | def __init__(self, uri='tcp://127.0.0.1', port=DEFAULT_PORT,
hwm=DEFAULT_HWM, swap_size=None, enable_sink=True,
context=None):
try:
scheme, parts = uri.split('://')
except __HOLE__:
scheme = 'tcp'
parts = uri
endpoints = parts.split(';')
self.port = port
if scheme != 'tcp':
raise NotImplementedError('Currently only TCP can be used')
self.context = context or zmq.Context.instance()
if enable_sink:
self.sink = self.context.socket(zmq.PULL)
self.sink.bind('tcp://*:{0.port}'.format(self))
else:
self.sink = None
self.vent = self.context.socket(zmq.PUSH)
if hasattr(zmq, 'SNDHWM'):
self.vent.setsockopt(zmq.SNDHWM, hwm)
else:
self.vent.setsockopt(zmq.HWM, hwm)
if swap_size:
self.vent.setsockopt(zmq.SWAP, swap_size)
for endpoint in endpoints:
if scheme == 'tcp' and ':' not in endpoint:
endpoint += ':' + str(DEFAULT_PORT)
endpoint = ''.join([scheme, '://', endpoint])
self.connect(endpoint) | ValueError | dataset/ETHPy150Open celery/kombu/kombu/transport/zmq.py/Client.__init__ |
5,118 | def close(self):
if not self.closed:
self.connection.cycle.discard(self)
try:
self.__dict__['client'].close()
except __HOLE__:
pass
super(Channel, self).close() | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/zmq.py/Channel.close |
5,119 | def close_connection(self, connection):
super(Transport, self).close_connection(connection)
try:
connection.__dict__['context'].term()
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open celery/kombu/kombu/transport/zmq.py/Transport.close_connection |
5,120 | def handle_noargs(self, **kwargs):
try:
verbosity = int(kwargs['verbosity'])
except (__HOLE__, TypeError, ValueError):
verbosity = 1
feeds = Feed.objects.filter(approval_status=PENDING_FEED)
to_email = [x.email for x in User.objects.filter(groups__name=settings.FEED_APPROVERS_GROUP_NAME)]
if len(feeds) == 0:
if verbosity >= 1:
print "There are no pending feeds. Skipping the email."
return
site = Site.objects.get(pk=1)
email = """The following feeds are pending approval:
{% regroup feeds by feed_type as feed_grouping %}{% for group in feed_grouping %}
{{ group.grouper }} {% for feed in group.list %}
- {{ feed.title }} ( {{ feed.feed_url }} ) {% endfor %}
{% endfor %}
To approve them, visit: http://{{ site.domain }}{% url admin:aggregator_feed_changelist %}
"""
message = Template(email).render(Context({'feeds': feeds, 'site': site}))
if verbosity >= 2:
print "Pending approval email:\n"
print message
mail.send_mail("django community feeds pending approval", message,
'nobody@%s' % site.domain,
to_email,
fail_silently=False)
if verbosity >= 1:
print "Sent pending approval email to: %s" % (', '.join(to_email)) | KeyError | dataset/ETHPy150Open python/raspberryio/raspberryio/aggregator/management/commands/send_pending_approval_email.py/Command.handle_noargs |
5,121 | def _to_node(self, compute):
try:
state = self.NODE_STATE_MAP[compute.findtext("STATE")]
except __HOLE__:
state = NodeState.UNKNOWN
networks = []
for element in compute.findall("NIC"):
networks.append(element.attrib["ip"])
return Node(id=compute.findtext("ID"),
name=compute.findtext("NAME"),
state=state,
public_ip=networks,
private_ip=[],
driver=self.connection.driver) | KeyError | dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/opennebula.py/OpenNebulaNodeDriver._to_node |
5,122 | def verifyCryptedPassword(crypted, pw):
if crypted[0] == '$': # md5_crypt encrypted
salt = '$1$' + crypted.split('$')[2]
else:
salt = crypted[:2]
try:
import crypt
except __HOLE__:
crypt = None
if crypt is None:
raise NotImplementedError("cred_unix not supported on this platform")
return crypt.crypt(pw, salt) == crypted | ImportError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/plugins/cred_unix.py/verifyCryptedPassword |
5,123 | def checkPwd(self, pwd, username, password):
try:
cryptedPass = pwd.getpwnam(username)[1]
except __HOLE__:
return defer.fail(UnauthorizedLogin())
else:
if cryptedPass in ('*', 'x'):
# Allow checkSpwd to take over
return None
elif verifyCryptedPassword(cryptedPass, password):
return defer.succeed(username) | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/plugins/cred_unix.py/UNIXChecker.checkPwd |
5,124 | def checkSpwd(self, spwd, username, password):
try:
cryptedPass = spwd.getspnam(username)[1]
except __HOLE__:
return defer.fail(UnauthorizedLogin())
else:
if verifyCryptedPassword(cryptedPass, password):
return defer.succeed(username) | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/plugins/cred_unix.py/UNIXChecker.checkSpwd |
5,125 | def requestAvatarId(self, credentials):
username, password = credentials.username, credentials.password
try:
import pwd
except __HOLE__:
pwd = None
if pwd is not None:
checked = self.checkPwd(pwd, username, password)
if checked is not None:
return checked
try:
import spwd
except ImportError:
spwd = None
if spwd is not None:
checked = self.checkSpwd(spwd, username, password)
if checked is not None:
return checked
# TODO: check_pam?
# TODO: check_shadow?
return defer.fail(UnauthorizedLogin()) | ImportError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/plugins/cred_unix.py/UNIXChecker.requestAvatarId |
5,126 | @task
def coverage():
"""Run tests and show test coverage report."""
try:
import pytest_cov # NOQA
except __HOLE__:
print_failure_message(
'Install the pytest coverage plugin to use this task, '
"i.e., `pip install pytest-cov'.")
raise SystemExit(1)
import pytest
pytest.main(PYTEST_FLAGS + [
'--cov', CODE_DIRECTORY,
'--cov-report', 'term-missing',
TESTS_DIRECTORY]) | ImportError | dataset/ETHPy150Open calvinschmdt/EasyTensorflow/pavement.py/coverage |
5,127 | @task # NOQA
def doc_watch():
"""Watch for changes in the docs and rebuild HTML docs when changed."""
try:
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
except ImportError:
print_failure_message('Install the watchdog package to use this task, '
"i.e., `pip install watchdog'.")
raise SystemExit(1)
class RebuildDocsEventHandler(FileSystemEventHandler):
def __init__(self, base_paths):
self.base_paths = base_paths
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event: The event object representing the file system event.
:type event: :class:`watchdog.events.FileSystemEvent`
"""
for base_path in self.base_paths:
if event.src_path.endswith(base_path):
super(RebuildDocsEventHandler, self).dispatch(event)
# We found one that matches. We're done.
return
def on_modified(self, event):
print_failure_message('Modification detected. Rebuilding docs.')
# # Strip off the path prefix.
# import os
# if event.src_path[len(os.getcwd()) + 1:].startswith(
# CODE_DIRECTORY):
# # sphinx-build doesn't always pick up changes on code files,
# # even though they are used to generate the documentation. As
# # a workaround, just clean before building.
doc_html()
print_success_message('Docs have been rebuilt.')
print_success_message(
'Watching for changes in project files, press Ctrl-C to cancel...')
handler = RebuildDocsEventHandler(get_project_files())
observer = Observer()
observer.schedule(handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except __HOLE__:
observer.stop()
observer.join() | KeyboardInterrupt | dataset/ETHPy150Open calvinschmdt/EasyTensorflow/pavement.py/doc_watch |
5,128 | def visit_field(self, node):
# Remove the field from the tree.
node.parent.remove(node)
# Extract the field name & optional argument
tag = node[0].astext().split(None, 1)
tagname = tag[0]
if len(tag)>1: arg = tag[1]
else: arg = None
# Handle special fields:
fbody = node[1]
if arg is None:
for (list_tag, entry_tag) in CONSOLIDATED_FIELDS.items():
if tagname.lower() == list_tag:
try:
self.handle_consolidated_field(fbody, entry_tag)
return
except __HOLE__, e:
estr = 'Unable to split consolidated field '
estr += '"%s" - %s' % (tagname, e)
self._errors.append(ParseError(estr, node.line,
is_fatal=0))
# Use a @newfield to let it be displayed as-is.
if tagname.lower() not in self._newfields:
newfield = Field('newfield', tagname.lower(),
parse(tagname, 'plaintext'))
self.fields.append(newfield)
self._newfields[tagname.lower()] = 1
self._add_field(tagname, arg, fbody) | ValueError | dataset/ETHPy150Open CollabQ/CollabQ/vendor/epydoc/markup/restructuredtext.py/_SplitFieldsTranslator.visit_field |
5,129 | def write_outstream(stream, *text):
encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
for t in text:
if not isinstance(t, binary_type):
t = t.encode(encoding, 'replace')
t = t.decode(encoding)
try:
stream.write(t)
except __HOLE__:
# suppress "broken pipe" errors.
# no known way to handle this on Python 3 however
# as the exception is "ignored" (noisily) in TextIOWrapper.
break | IOError | dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/alembic/util.py/write_outstream |
5,130 | def shapeup(self, ob):
if isinstance(ob, BaseGeometry):
return ob
else:
try:
return asShape(ob)
except __HOLE__:
return asLineString(ob) | ValueError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.shapeup |
5,131 | def polygonize(self, lines):
"""Creates polygons from a source of lines
The source may be a MultiLineString, a sequence of LineString objects,
or a sequence of objects than can be adapted to LineStrings.
"""
source = getattr(lines, 'geoms', None) or lines
try:
source = iter(source)
except __HOLE__:
source = [source]
finally:
obs = [self.shapeup(l) for l in source]
geom_array_type = c_void_p * len(obs)
geom_array = geom_array_type()
for i, line in enumerate(obs):
geom_array[i] = line._geom
product = lgeos.GEOSPolygonize(byref(geom_array), len(obs))
collection = geom_factory(product)
for g in collection.geoms:
clone = lgeos.GEOSGeom_clone(g._geom)
g = geom_factory(clone)
g._other_owned = False
yield g | TypeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.polygonize |
5,132 | def polygonize_full(self, lines):
"""Creates polygons from a source of lines, returning the polygons
and leftover geometries.
The source may be a MultiLineString, a sequence of LineString objects,
or a sequence of objects than can be adapted to LineStrings.
Returns a tuple of objects: (polygons, dangles, cut edges, invalid ring
lines). Each are a geometry collection.
Dangles are edges which have one or both ends which are not incident on
another edge endpoint. Cut edges are connected at both ends but do not
form part of polygon. Invalid ring lines form rings which are invalid
(bowties, etc).
"""
source = getattr(lines, 'geoms', None) or lines
try:
source = iter(source)
except __HOLE__:
source = [source]
finally:
obs = [self.shapeup(l) for l in source]
L = len(obs)
subs = (c_void_p * L)()
for i, g in enumerate(obs):
subs[i] = g._geom
collection = lgeos.GEOSGeom_createCollection(5, subs, L)
dangles = c_void_p()
cuts = c_void_p()
invalids = c_void_p()
product = lgeos.GEOSPolygonize_full(
collection, byref(dangles), byref(cuts), byref(invalids))
return (
geom_factory(product),
geom_factory(dangles),
geom_factory(cuts),
geom_factory(invalids)
) | TypeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.polygonize_full |
5,133 | def linemerge(self, lines):
"""Merges all connected lines from a source
The source may be a MultiLineString, a sequence of LineString objects,
or a sequence of objects than can be adapted to LineStrings. Returns a
LineString or MultiLineString when lines are not contiguous.
"""
source = None
if hasattr(lines, 'type') and lines.type == 'MultiLineString':
source = lines
elif hasattr(lines, '__iter__'):
try:
source = asMultiLineString([ls.coords for ls in lines])
except __HOLE__:
source = asMultiLineString(lines)
if source is None:
raise ValueError("Cannot linemerge %s" % lines)
result = lgeos.GEOSLineMerge(source._geom)
return geom_factory(result) | AttributeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.linemerge |
5,134 | def cascaded_union(self, geoms):
"""Returns the union of a sequence of geometries
This is the most efficient method of dissolving many polygons.
"""
try:
L = len(geoms)
except __HOLE__:
geoms = [geoms]
L = 1
subs = (c_void_p * L)()
for i, g in enumerate(geoms):
subs[i] = g._geom
collection = lgeos.GEOSGeom_createCollection(6, subs, L)
return geom_factory(lgeos.methods['cascaded_union'](collection)) | TypeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.cascaded_union |
5,135 | def unary_union(self, geoms):
"""Returns the union of a sequence of geometries
This method replaces :meth:`cascaded_union` as the
prefered method for dissolving many polygons.
"""
try:
L = len(geoms)
except __HOLE__:
geoms = [geoms]
L = 1
subs = (c_void_p * L)()
for i, g in enumerate(geoms):
subs[i] = g._geom
collection = lgeos.GEOSGeom_createCollection(6, subs, L)
return geom_factory(lgeos.methods['unary_union'](collection)) | TypeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/CollectionOperator.unary_union |
5,136 | def transform(func, geom):
"""Applies `func` to all coordinates of `geom` and returns a new
geometry of the same type from the transformed coordinates.
`func` maps x, y, and optionally z to output xp, yp, zp. The input
parameters may iterable types like lists or arrays or single values.
The output shall be of the same type. Scalars in, scalars out.
Lists in, lists out.
For example, here is an identity function applicable to both types
of input.
def id_func(x, y, z=None):
return tuple(filter(None, [x, y, z]))
g2 = transform(id_func, g1)
A partially applied transform function from pyproj satisfies the
requirements for `func`.
from functools import partial
import pyproj
project = partial(
pyproj.transform,
pyproj.Proj(init='epsg:4326'),
pyproj.Proj(init='epsg:26913'))
g2 = transform(project, g1)
Lambda expressions such as the one in
g2 = transform(lambda x, y, z=None: (x+1.0, y+1.0), g1)
also satisfy the requirements for `func`.
"""
if geom.is_empty:
return geom
if geom.type in ('Point', 'LineString', 'LinearRing', 'Polygon'):
# First we try to apply func to x, y, z sequences. When func is
# optimized for sequences, this is the fastest, though zipping
# the results up to go back into the geometry constructors adds
# extra cost.
try:
if geom.type in ('Point', 'LineString', 'LinearRing'):
return type(geom)(zip(*func(*izip(*geom.coords))))
elif geom.type == 'Polygon':
shell = type(geom.exterior)(
zip(*func(*izip(*geom.exterior.coords))))
holes = list(type(ring)(zip(*func(*izip(*ring.coords))))
for ring in geom.interiors)
return type(geom)(shell, holes)
# A func that assumes x, y, z are single values will likely raise a
# TypeError, in which case we'll try again.
except __HOLE__:
if geom.type in ('Point', 'LineString', 'LinearRing'):
return type(geom)([func(*c) for c in geom.coords])
elif geom.type == 'Polygon':
shell = type(geom.exterior)(
[func(*c) for c in geom.exterior.coords])
holes = list(type(ring)([func(*c) for c in ring.coords])
for ring in geom.interiors)
return type(geom)(shell, holes)
elif geom.type.startswith('Multi') or geom.type == 'GeometryCollection':
return type(geom)([transform(func, part) for part in geom.geoms])
else:
raise ValueError('Type %r not recognized' % geom.type) | TypeError | dataset/ETHPy150Open Toblerity/Shapely/shapely/ops.py/transform |
5,137 | def main():
p = OptionParser(usage=__doc__.strip())
p.add_option("--clean", "-c", action="store_true",
help="clean source directory")
options, args = p.parse_args()
if not args:
p.error('no submodules given')
else:
dirs = ['numpy/%s' % x for x in map(os.path.basename, args)]
# Prepare
if not os.path.isdir(TEMP):
os.makedirs(TEMP)
# Set up dummy files (for building only submodules)
dummy_files = {
'__init__.py': 'from numpy.version import version as __version__',
'version.py': 'version = "1.4.0.dev"'
}
for fn, content in dummy_files.items():
fn = os.path.join(TEMP, 'numpy', fn)
if not os.path.isfile(fn):
try:
os.makedirs(os.path.dirname(fn))
except __HOLE__:
pass
f = open(fn, 'wb+')
f.write(content.encode('ascii'))
f.close()
# Environment
pp = [os.path.abspath(TEMP)]
def getenv():
env = dict(os.environ)
env.update({'PYTHONPATH': ':'.join(pp)})
return env
# Copy
for d in dirs:
src = os.path.join(BASE, d)
dst = os.path.join(TEMP, d)
# Run 2to3
sync_2to3(dst=dst,
src=src,
patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),
clean=options.clean)
# Run setup.py, falling back to Pdb post-mortem on exceptions
setup_py = os.path.join(dst, 'setup.py')
if os.path.isfile(setup_py):
code = """\
import pdb, sys, traceback
p = pdb.Pdb()
try:
import __main__
__main__.__dict__.update({
"__name__": "__main__", "__file__": "setup.py",
"__builtins__": __builtins__})
fp = open("setup.py", "rb")
try:
exec(compile(fp.read(), "setup.py", 'exec'))
finally:
fp.close()
except SystemExit:
raise
except:
traceback.print_exc()
t = sys.exc_info()[2]
p.interaction(None, t)
"""
ret = subprocess.call([sys.executable, '-c', code,
'build_ext', '-i'],
cwd=dst,
env=getenv())
if ret != 0:
raise RuntimeError("Build failed.")
# Run nosetests
subprocess.call(['nosetests3', '-v', d], cwd=TEMP) | OSError | dataset/ETHPy150Open cournape/Bento/bento/private/_yaku/tools/py3tool.py/main |
5,138 | def sync_2to3(src, dst, patchfile=None, clean=False):
import lib2to3.main
from io import StringIO
to_convert = []
for src_dir, dst_dir, dirs, files in walk_sync(src, dst):
for fn in dirs + files:
src_fn = os.path.join(src_dir, fn)
dst_fn = os.path.join(dst_dir, fn)
# skip temporary etc. files
if fn.startswith('.#') or fn.endswith('~'):
continue
# remove non-existing
if os.path.exists(dst_fn) and not os.path.exists(src_fn):
if clean:
if os.path.isdir(dst_fn):
shutil.rmtree(dst_fn)
else:
os.unlink(dst_fn)
continue
# make directories
if os.path.isdir(src_fn):
if not os.path.isdir(dst_fn):
os.makedirs(dst_fn)
continue
dst_dir = os.path.dirname(dst_fn)
if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
# don't replace up-to-date files
try:
if os.path.isfile(dst_fn) and \
os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:
continue
except __HOLE__:
pass
# copy file
shutil.copyfile(src_fn, dst_fn)
# add .py files to 2to3 list
if dst_fn.endswith('.py'):
to_convert.append((src_fn, dst_fn))
# run 2to3
flag_sets = {}
for fn, dst_fn in to_convert:
flag = ''
for pat, opt in EXTRA_2TO3_FLAGS.items():
if fnmatch.fnmatch(fn, pat):
flag = opt
break
flag_sets.setdefault(flag, []).append(dst_fn)
if patchfile:
p = open(patchfile, 'wb+')
else:
p = open(os.devnull, 'wb')
for flags, filenames in flag_sets.items():
if flags == 'skip':
continue
_old_stdout = sys.stdout
try:
sys.stdout = StringIO()
lib2to3.main.main("lib2to3.fixes", ['-w'] + flags.split()+filenames)
finally:
sys.stdout = _old_stdout
for fn, dst_fn in to_convert:
# perform custom mangling
custom_mangling(dst_fn)
p.close() | OSError | dataset/ETHPy150Open cournape/Bento/bento/private/_yaku/tools/py3tool.py/sync_2to3 |
5,139 | @register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email) | ValueError | dataset/ETHPy150Open darcyliu/storyboard/home/templatetags/filter.py/gravatar_url |
5,140 | @register.filter(name='limit')
def limit(value, arg):
"""
Returns a slice of the list.
Uses the same syntax as Python's list slicing; see
http://diveintopython.org/native_data_types/lists.html#odbchelper.list.slice
for an introduction.
"""
try:
bits = []
for x in arg.split(u':'):
if len(x) == 0:
bits.append(None)
else:
bits.append(int(x))
return value[slice(*bits)]
except (ValueError, __HOLE__):
return value # Fail silently. | TypeError | dataset/ETHPy150Open darcyliu/storyboard/home/templatetags/filter.py/limit |
5,141 | def _cursor(self):
cursor = None
if not self._valid_connection():
conn_string = convert_unicode(self._connect_string())
self.connection = Database.connect(conn_string, **self.settings_dict['DATABASE_OPTIONS'])
cursor = FormatStylePlaceholderCursor(self.connection)
# Set oracle date to ansi date format. This only needs to execute
# once when we create a new connection. We also set the Territory
# to 'AMERICA' which forces Sunday to evaluate to a '1' in TO_CHAR().
cursor.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS' "
"NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF' "
"NLS_TERRITORY = 'AMERICA'")
try:
self.oracle_version = int(self.connection.version.split('.')[0])
# There's no way for the DatabaseOperations class to know the
# currently active Oracle version, so we do some setups here.
# TODO: Multi-db support will need a better solution (a way to
# communicate the current version).
if self.oracle_version <= 9:
self.ops.regex_lookup = self.ops.regex_lookup_9
else:
self.ops.regex_lookup = self.ops.regex_lookup_10
except __HOLE__:
pass
try:
self.connection.stmtcachesize = 20
except:
# Django docs specify cx_Oracle version 4.3.1 or higher, but
# stmtcachesize is available only in 4.3.2 and up.
pass
connection_created.send(sender=self.__class__)
if not cursor:
cursor = FormatStylePlaceholderCursor(self.connection)
return cursor
# Oracle doesn't support savepoint commits. Ignore them. | ValueError | dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/backends/oracle/base.py/DatabaseWrapper._cursor |
5,142 | def executemany(self, query, params=None):
try:
args = [(':arg%d' % i) for i in range(len(params[0]))]
except (IndexError, __HOLE__):
# No params given, nothing to do
return None
# cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it
# it does want a trailing ';' but not a trailing '/'. However, these
# characters must be included in the original query in case the query
# is being passed to SQL*Plus.
if query.endswith(';') or query.endswith('/'):
query = query[:-1]
query = convert_unicode(query % tuple(args), self.charset)
formatted = [self._format_params(i) for i in params]
self._guess_input_sizes(formatted)
try:
return self.cursor.executemany(query,
[self._param_generator(p) for p in formatted])
except DatabaseError, e:
# cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
if e.args[0].code == 1400 and not isinstance(e, IntegrityError):
e = IntegrityError(e.args[0])
raise e | TypeError | dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/backends/oracle/base.py/FormatStylePlaceholderCursor.executemany |
5,143 | def init_thrift_server(self, socket, processor):
"""
Creates a thrift server that listens in the given socket and
uses the given processor
"""
try:
os.unlink(socket)
except __HOLE__ as oerr:
if oerr.errno != errno.ENOENT:
raise
transport = TSocket.TServerSocket(unix_socket=socket)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory()
listener_server = TSimpleServer(self.name, processor, transport, tfactory, pfactory)
return listener_server | OSError | dataset/ETHPy150Open alticelabs/meocloud-cli/meocloud/client/linux/thrift_utils.py/ThriftListener.init_thrift_server |
5,144 | def __getattr__(self, identifier):
"""
Allows creating sub OptionTree instances using attribute
access, inheriting the group options.
"""
try:
return super(AttrTree, self).__getattr__(identifier)
except __HOLE__: pass
if identifier.startswith('_'): raise AttributeError(str(identifier))
elif self.fixed==True: raise AttributeError(self._fixed_error % identifier)
valid_id = sanitize_identifier(identifier, escape=False)
if valid_id in self.children:
return self.__dict__[valid_id]
self.__setattr__(identifier, self.groups)
return self[identifier] | AttributeError | dataset/ETHPy150Open ioam/holoviews/holoviews/core/options.py/OptionTree.__getattr__ |
5,145 | def _IsProcessRunnable(name):
try:
with open(os.devnull, 'w') as devnull:
found = subprocess.call(['/usr/bin/which', name],
stdout=devnull, stderr=devnull) == 0
return True
except __HOLE__:
return False | OSError | dataset/ETHPy150Open natduca/quickopen/src/find_based_db_indexer.py/_IsProcessRunnable |
5,146 | def extract_features(self, text):
'''Extracts features from a body of text.
:rtype: dictionary of features
'''
# Feature extractor may take one or two arguments
try:
return self.feature_extractor(text, self.train_set)
except (__HOLE__, AttributeError):
return self.feature_extractor(text) | TypeError | dataset/ETHPy150Open sloria/TextBlob/textblob/classifiers.py/BaseClassifier.extract_features |
5,147 | @cached_property
def classifier(self):
"""The classifier."""
try:
return self.train()
except __HOLE__: # nltk_class has not been defined
raise ValueError("NLTKClassifier must have a nltk_class"
" variable that is not None.") | AttributeError | dataset/ETHPy150Open sloria/TextBlob/textblob/classifiers.py/NLTKClassifier.classifier |
5,148 | def train(self, *args, **kwargs):
"""Train the classifier with a labeled feature set and return
the classifier. Takes the same arguments as the wrapped NLTK class.
This method is implicitly called when calling ``classify`` or
``accuracy`` methods and is included only to allow passing in arguments
to the ``train`` method of the wrapped NLTK class.
.. versionadded:: 0.6.2
:rtype: A classifier
"""
try:
self.classifier = self.nltk_class.train(self.train_features,
*args, **kwargs)
return self.classifier
except __HOLE__:
raise ValueError("NLTKClassifier must have a nltk_class"
" variable that is not None.") | AttributeError | dataset/ETHPy150Open sloria/TextBlob/textblob/classifiers.py/NLTKClassifier.train |
5,149 | def update(self, new_data, *args, **kwargs):
"""Update the classifier with new training data and re-trains the
classifier.
:param new_data: New data as a list of tuples of the form
``(text, label)``.
"""
self.train_set += new_data
self.train_features = [(self.extract_features(d), c)
for d, c in self.train_set]
try:
self.classifier = self.nltk_class.train(self.train_features,
*args, **kwargs)
except __HOLE__: # Descendant has not defined nltk_class
raise ValueError("NLTKClassifier must have a nltk_class"
" variable that is not None.")
return True | AttributeError | dataset/ETHPy150Open sloria/TextBlob/textblob/classifiers.py/NLTKClassifier.update |
5,150 | def _get_live_streams(self, params, swf_url):
for key, quality in QUALITY_MAP.items():
key_url = "{0}URL".format(key)
url = params.get(key_url)
if not url:
continue
try:
res = http.get(url, exception=IOError)
except __HOLE__:
continue
if quality == "hds":
streams = HDSStream.parse_manifest(self.session, res.url)
for name, stream in streams.items():
if key == "source":
name += "+"
yield name, stream
elif res.text.startswith("rtmp"):
match = _rtmp_re.match(res.text)
if not match:
continue
stream = RTMPStream(self.session, {
"rtmp": match.group("host"),
"app": match.group("app"),
"playpath": match.group("playpath"),
"swfVfy": swf_url,
"live": True
})
yield quality, stream | IOError | dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/plugins/dailymotion.py/DailyMotion._get_live_streams |
5,151 | def check_xml_line_by_line(test_case, expected, actual):
"""Does what it's called, hopefully parameters are self-explanatory"""
# this is totally wacky, but elementtree strips needless
# whitespace that mindom will preserve in the original string
parser = etree.XMLParser(remove_blank_text=True)
parsed_expected = etree.tostring(etree.XML(expected, parser), pretty_print=True)
parsed_actual = etree.tostring(etree.XML(actual, parser), pretty_print=True)
if parsed_expected == parsed_actual:
return
try:
expected_lines = parsed_expected.split("\n")
actual_lines = parsed_actual.split("\n")
test_case.assertEqual(
len(expected_lines),
len(actual_lines),
"Parsed xml files are different lengths\n" +
"Expected: \n%s\nActual:\n%s" % (parsed_expected, parsed_actual))
for i in range(len(expected_lines)):
test_case.assertEqual(expected_lines[i], actual_lines[i])
except __HOLE__:
import logging
logging.error("Failure in xml comparison\nExpected:\n%s\nActual:\n%s" % (parsed_expected, parsed_actual))
raise | AssertionError | dataset/ETHPy150Open dimagi/commcare-hq/corehq/ex-submodules/casexml/apps/case/tests/util.py/check_xml_line_by_line |
5,152 | def normalize_asg_success(success):
count = "1"
duration = "PT15M"
# if it's falsy, return defaults
if not success:
return [count, duration]
# if it's int, use as instance count
if isinstance(success, int):
return [str(success), duration]
try:
# try to parse as int
count = int(success)
# if it works, use as instance count
return [success, duration]
except __HOLE__:
# ok did not work, try to parse
if "within" in success:
instance, time = success.split("within")
return [instance.strip(), to_iso8601_duration(time.strip())]
else:
msg = 'Unknown ASG success requirement "{}". Use something like "1 within 10m".'
raise click.UsageError(msg.format(success)) | ValueError | dataset/ETHPy150Open zalando-stups/senza/senza/components/auto_scaling_group.py/normalize_asg_success |
5,153 | def normalize_network_threshold(threshold):
unit = "Bytes"
if threshold is None:
return []
if isinstance(threshold, int):
return [str(threshold), unit]
amount = 1024
shortcuts = {
"B": "Bytes",
"KB": "Kilobytes",
"MB": "Megabytes",
"GB": "Gigabytes",
"TB": "Terabytes"
}
try:
# if someone write just Threshold: 10
amount = int(threshold)
return [threshold, unit]
except __HOLE__:
# check if there is a space as if somebody wrote Threshold: 20 GB
if " " in threshold:
# okay, so split it
amount, unit = threshold.split()
if unit in shortcuts:
unit = shortcuts[unit]
allowed_units = shortcuts.values()
if unit not in allowed_units:
raise click.UsageError("Network threshold unit must be one of {}".format(list(allowed_units)))
else:
raise click.UsageError('Unknown network threshold "{}". Use something like "20 GB".'.format(threshold))
return [amount, unit] | ValueError | dataset/ETHPy150Open zalando-stups/senza/senza/components/auto_scaling_group.py/normalize_network_threshold |
5,154 | def create(self, validated_data):
request = self.context['request']
user = request.user
auth = Auth(user)
node = self.context['view'].get_node()
target_node_id = validated_data['_id']
pointer_node = Node.load(target_node_id)
if not pointer_node or pointer_node.is_collection:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' not found.'.format(target_node_id)
)
try:
pointer = node.add_pointer(pointer_node, auth, save=True)
return pointer
except __HOLE__:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' already pointed to by \'{}\'.'.format(target_node_id, node._id)
) | ValueError | dataset/ETHPy150Open CenterForOpenScience/osf.io/api/nodes/serializers.py/NodeLinksSerializer.create |
5,155 | def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = getattr(self.__class__, '__unittest_skip_why__', '')
skip_why = (
skip_why if skip_why != '' else
getattr(testMethod, '__unittest_skip_why__', '')
)
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
deferred = self.setUp()
if deferred is not None:
for x in deferred:
yield x
except SkipTest as e:
self._addSkip(result, str(e))
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
else:
try:
deferred = testMethod()
if deferred is not None:
for x in deferred:
yield x
except __HOLE__:
raise
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure as e:
addExpectedFailure = getattr(
result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn(
"TestResult has no addExpectedFailure method, "
"reporting as passes", RuntimeWarning
)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(
result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn(
"TestResult has no addUnexpectedSuccess method, "
"reporting as failures", RuntimeWarning
)
result.addFailure(self, sys.exc_info())
except SkipTest as e:
self._addSkip(result, str(e))
except:
result.addError(self, sys.exc_info())
else:
success = True
try:
deferred = self.tearDown()
if deferred is not None:
for x in deferred:
yield x
except KeyboardInterrupt:
raise
except:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun() | KeyboardInterrupt | dataset/ETHPy150Open randy3k/UnitTesting/unittesting/core/st3/case.py/DeferrableTestCase.run |
5,156 | def test_gevent_version(self):
try:
import gevent
except __HOLE__:
raise SkipTest('gevent not available.')
env_version = os.environ.get('GEVENT_VERSION')
if env_version:
self.assertEqual(env_version, gevent.__version__) | ImportError | dataset/ETHPy150Open python-zk/kazoo/kazoo/tests/test_build.py/TestBuildEnvironment.test_gevent_version |
5,157 | def assertSubscriptionInTopic(self, subscription, topic_name):
ret = self.run_function(
'boto_sns.get_all_subscriptions_by_topic',
name=topic_name
)
for _subscription in ret:
try:
self.assertDictContainsSubset(subscription, _subscription)
return True
except __HOLE__:
continue
raise self.failureException(
'Subscription {0} not found in topic {1} subscriptions: {2}'
.format(subscription, topic_name, ret)
) | AssertionError | dataset/ETHPy150Open saltstack/salt/tests/integration/states/boto_sns.py/BotoSNSTest.assertSubscriptionInTopic |
5,158 | def ignore_not_implemented(func):
def _inner(*args, **kwargs):
try:
return func(*args, **kwargs)
except __HOLE__:
return None
update_wrapper(_inner, func)
return _inner | NotImplementedError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/tests/regressiontests/introspection/tests.py/ignore_not_implemented |
5,159 | def render_GET(self, request):
if self.representation is None:
# we haven't been given a representation yet
request.setResponseCode(500)
return 'Resource has not yet been created/updated.'
# check for if-modified-since header, and send 304 back if it is not been modified
msd_header = request.getHeader(IF_MODIFIED_SINCE)
if msd_header:
try:
msd = datetime.datetime.strptime(msd_header, RFC850_FORMAT)
if msd >= self.last_update_time:
request.setResponseCode(304)
return ''
except __HOLE__:
pass # error parsing timestamp
request.setHeader(LAST_MODIFIED, self.last_modified_timestamp)
if self.mime_type:
request.setHeader(CONTENT_TYPE, self.mime_type)
return self.representation | ValueError | dataset/ETHPy150Open NORDUnet/opennsa/opennsa/shared/modifiableresource.py/ModifiableResource.render_GET |
5,160 | def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except __HOLE__, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text | OSError | dataset/ETHPy150Open adblockplus/gyp/pylib/gyp/MSVSVersion.py/_RegistryQuery |
5,161 | def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except __HOLE__:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1) | ImportError | dataset/ETHPy150Open adblockplus/gyp/pylib/gyp/MSVSVersion.py/_RegistryGetValue |
5,162 | def clean(self, value):
if isinstance(value, decimal.Decimal):
# Value is already an integer.
return value
if isinstance(value, six.string_types):
# Strip the string of whitespace
value = value.strip()
if not value:
# Value is nothing; return nothing.
return None
try:
# Attempt to coerce whatever we have as an int.
return decimal.Decimal(value)
except (__HOLE__, decimal.InvalidOperation):
# Failed to do so.
raise ValueError('Not a valid decimal value.') | ValueError | dataset/ETHPy150Open armet/python-armet/armet/attributes/decimal.py/DecimalAttribute.clean |
5,163 | def test_custom_type_mongotype_dict_index_not_checked(self):
class CustomObject(CustomType):
mongo_type = dict
python_type = float
def to_bson(self, value):
return {'f':unicode(value)}
def to_python(self, value):
return float(value['f'])
failed = False
try:
class MyDocument(Document):
structure = {'amount': CustomObject()}
required_fields = ['amount']
indexes = [{'fields':['amount.f']}]
except __HOLE__, e:
self.assertEqual(str(e), "Error in indexes: can't find amount.f in structure")
failed = True
self.assertEqual(failed, True) | ValueError | dataset/ETHPy150Open namlook/mongokit/tests/test_custom_types.py/CustomTypesTestCase.test_custom_type_mongotype_dict_index_not_checked |
5,164 | @property
def num_prev(self):
try:
return int(self.request.GET.get('num_prev'))
except (__HOLE__, TypeError):
pass
return 12 | ValueError | dataset/ETHPy150Open dimagi/commcare-hq/custom/_legacy/mvp/reports/va.py/VerbalAutopsyReport.num_prev |
5,165 | @property
def current_month(self):
try:
return dateutil.parser.parse(self.request.GET.get('current_month'))
except (__HOLE__, ValueError):
pass | AttributeError | dataset/ETHPy150Open dimagi/commcare-hq/custom/_legacy/mvp/reports/va.py/VerbalAutopsyReport.current_month |
5,166 | @property
def report_context(self):
report_matrix = []
month_headers = None
for category_group in self.indicator_slugs:
category_indicators = []
total_rowspan = 0
for slug in category_group['indicator_slugs']:
try:
indicator = DynamicIndicatorDefinition.get_current(
MVP.NAMESPACE, self.domain, slug,
wrap_correctly=True,
)
if self.is_rendered_as_email:
retrospective = indicator.get_monthly_retrospective(user_ids=self.user_ids)
else:
retrospective = indicator.get_monthly_retrospective(return_only_dates=True)
if not month_headers:
month_headers = self.get_month_headers(retrospective)
if isinstance(indicator, CombinedCouchViewIndicatorDefinition):
table = self.get_indicator_table(retrospective)
indicator_rowspan = 3
else:
table = self.get_indicator_row(retrospective)
indicator_rowspan = 1
total_rowspan += indicator_rowspan + 1
category_indicators.append(dict(
title=indicator.description,
table=table,
load_url="%s?indicator=%s" % (self.get_url(self.domain,
render_as='partial'), indicator.slug),
rowspan=indicator_rowspan
))
except (__HOLE__, ResourceNotFound):
logging.info("Could not grab indicator %s in domain %s" % (slug, self.domain))
report_matrix.append(dict(
category_title=category_group['category_title'],
category_slug=category_group['category_slug'],
rowspan=total_rowspan,
indicators=category_indicators,
))
return dict(
months=month_headers,
report=report_matrix,
) | AttributeError | dataset/ETHPy150Open dimagi/commcare-hq/custom/_legacy/mvp/reports/va.py/VerbalAutopsyReport.report_context |
5,167 | def norm(x, ord=None, axis=None):
"""
Norm of a sparse matrix
This function is able to return one of seven different matrix norms,
depending on the value of the ``ord`` parameter.
Parameters
----------
x : a sparse matrix
Input sparse matrix.
ord : {non-zero int, inf, -inf, 'fro'}, optional
Order of the norm (see table under ``Notes``). inf means numpy's
`inf` object.
axis : {int, 2-tuple of ints, None}, optional
If `axis` is an integer, it specifies the axis of `x` along which to
compute the vector norms. If `axis` is a 2-tuple, it specifies the
axes that hold 2-D matrices, and the matrix norms of these matrices
are computed. If `axis` is None then either a vector norm (when `x`
is 1-D) or a matrix norm (when `x` is 2-D) is returned.
Returns
-------
n : float or ndarray
Notes
-----
Some of the ord are not implemented because some associated functions like,
_multi_svd_norm, are not yet available for sparse matrix.
This docstring is modified based on numpy.linalg.norm.
https://github.com/numpy/numpy/blob/master/numpy/linalg/linalg.py
The following norms can be calculated:
===== ============================
ord norm for sparse matrices
===== ============================
None Frobenius norm
'fro' Frobenius norm
inf max(sum(abs(x), axis=1))
-inf min(sum(abs(x), axis=1))
0 abs(x).sum(axis=axis)
1 max(sum(abs(x), axis=0))
-1 min(sum(abs(x), axis=0))
2 Not implemented
-2 Not implemented
other Not implemented
===== ============================
The Frobenius norm is given by [1]_:
:math:`||A||_F = [\\sum_{i,j} abs(a_{i,j})^2]^{1/2}`
References
----------
.. [1] G. H. Golub and C. F. Van Loan, *Matrix Computations*,
Baltimore, MD, Johns Hopkins University Press, 1985, pg. 15
Examples
--------
>>> from scipy.sparse import *
>>> import numpy as np
>>> from scipy.sparse.linalg import norm
>>> a = np.arange(9) - 4
>>> a
array([-4, -3, -2, -1, 0, 1, 2, 3, 4])
>>> b = a.reshape((3, 3))
>>> b
array([[-4, -3, -2],
[-1, 0, 1],
[ 2, 3, 4]])
>>> b = csr_matrix(b)
>>> norm(b)
7.745966692414834
>>> norm(b, 'fro')
7.745966692414834
>>> norm(b, np.inf)
9
>>> norm(b, -np.inf)
2
>>> norm(b, 1)
7
>>> norm(b, -1)
6
"""
if not issparse(x):
raise TypeError("input is not sparse. use numpy.linalg.norm")
# Check the default case first and handle it immediately.
if axis is None and ord in (None, 'fro', 'f'):
return _sparse_frobenius_norm(x)
# Some norms require functions that are not implemented for all types.
x = x.tocsr()
if axis is None:
axis = (0, 1)
elif not isinstance(axis, tuple):
msg = "'axis' must be None, an integer or a tuple of integers"
try:
int_axis = int(axis)
except __HOLE__:
raise TypeError(msg)
if axis != int_axis:
raise TypeError(msg)
axis = (int_axis,)
nd = 2
if len(axis) == 2:
row_axis, col_axis = axis
if not (-nd <= row_axis < nd and -nd <= col_axis < nd):
raise ValueError('Invalid axis %r for an array with shape %r' %
(axis, x.shape))
if row_axis % nd == col_axis % nd:
raise ValueError('Duplicate axes given.')
if ord == 2:
raise NotImplementedError
#return _multi_svd_norm(x, row_axis, col_axis, amax)
elif ord == -2:
raise NotImplementedError
#return _multi_svd_norm(x, row_axis, col_axis, amin)
elif ord == 1:
return abs(x).sum(axis=row_axis).max(axis=col_axis)[0,0]
elif ord == Inf:
return abs(x).sum(axis=col_axis).max(axis=row_axis)[0,0]
elif ord == -1:
return abs(x).sum(axis=row_axis).min(axis=col_axis)[0,0]
elif ord == -Inf:
return abs(x).sum(axis=col_axis).min(axis=row_axis)[0,0]
elif ord in (None, 'f', 'fro'):
# The axis order does not matter for this norm.
return _sparse_frobenius_norm(x)
else:
raise ValueError("Invalid norm order for matrices.")
elif len(axis) == 1:
a, = axis
if not (-nd <= a < nd):
raise ValueError('Invalid axis %r for an array with shape %r' %
(axis, x.shape))
if ord == Inf:
M = abs(x).max(axis=a)
elif ord == -Inf:
M = abs(x).min(axis=a)
elif ord == 0:
# Zero norm
M = (x != 0).sum(axis=a)
elif ord == 1:
# special case for speedup
M = abs(x).sum(axis=a)
elif ord in (2, None):
M = sqrt(abs(x).power(2).sum(axis=a))
else:
try:
ord + 1
except TypeError:
raise ValueError('Invalid norm order for vectors.')
M = np.power(abs(x).power(ord).sum(axis=a), 1 / ord)
return M.A.ravel()
else:
raise ValueError("Improper number of dimensions to norm.") | TypeError | dataset/ETHPy150Open scipy/scipy/scipy/sparse/linalg/_norm.py/norm |
5,168 | def __init__(self, userdict):
self.username = userdict["username"]
self.password = userdict["password"]
self.access = {}
for realm, level in userdict.get("access", {}).items():
try:
self.access[realm] = LEVELS.index(level)
except __HOLE__:
raise ValueError("Invalid record for user {user}. {level} is not a proper access level.\
Choose one of: {levels}.".format(
user=self.username,
level=level,
levels=", ".join(LEVELS)
)) | ValueError | dataset/ETHPy150Open marchon/Flask-API-Server/apiserver/authentication.py/User.__init__ |
5,169 | def test_support(file_list, source_tag):
events = {
'MediaStarted': 0,
'MediaEnded': 0,
'MediaCompleted': 0,
'MediaFailed': 0,
'MediaQualityChange': 0,
'MediaQualityChangeAuto': 0,
'MediaQualityChangeProgramatically': 0,
'MediaInitialBufferStart': 0,
'MediaInitialBufferEnd': 0,
'MediaBufferingStart': 0,
'MediaBufferingEnd': 0,
'MediaScrub': 0
}
custom_fields = {
'x_useragent': 0,
'x_tpmid': 0,
'x_episode_title': 0,
'x_program_title': 0,
'x_producer': 0,
'x_video_length': 0,
'x_client_id': 0,
'x_session_id': 0,
'x_tracking_id': 0,
'x_video_location': 0,
'x_stream_size': 0,
'x_flash_player': 0,
'x_buffering_length': 0,
'x_encoding_name': 0,
'x_auto': 0,
'x_bandwidth': 0,
'x_after_seek': 0,
'x_start_time': 0,
'x_previous_quality': 0,
'x_new_quality': 0
}
# Iterate over passed in files
for filename in file_list:
try:
with open(filename, 'r') as f:
print 'processing file...'
# Read file line by line and aggregate all fields ecountered
for line in f:
# Parse line into dict
fields = GoonHillyLog.parse_log_line(line)
# Skip line if event source tag is not what we are looking
# for
if fields.get('source_tag') != source_tag:
continue
# Iterate over all keys in the event
for key in fields.iterkeys():
# If the key is an event, increment counter for that
# event
if key == 'event_type' and fields.get(key) in events:
events[fields.get(key)] += 1
# If key is a custom field, increment counter for that
# field
elif key in custom_fields:
custom_fields[key] += 1
except __HOLE__:
# Catch error if passed in file does not exist
pass
print_results(events, custom_fields, source_tag) | IOError | dataset/ETHPy150Open pbs/agora-proc/tests/goonhilly_support.py/test_support |
5,170 | def search_noteitems(self, search_object):
for nw in self.all_items:
if not search_object.search_notemodel(nw.notemodel):
nw.setHidden(True)
else:
nw.setHidden(False)
try:
self.setCurrentItem(self.all_visible_items[0])
return True
except __HOLE__:
# no items found
self.setCurrentRow(-1)
return False | IndexError | dataset/ETHPy150Open akehrer/Motome/Motome/Models/NoteListWidget.py/NoteListWidget.search_noteitems |
5,171 | def get_config(path):
try:
with open(path) as f:
data = f.read()
except __HOLE__ as e:
if e.errno == errno.ENOENT:
data = None
else:
raise e
if data is None:
return {}
else:
# assume config is a dict
return yaml.safe_load(data) | IOError | dataset/ETHPy150Open rcbops/rpc-openstack/scripts/update-yaml.py/get_config |
5,172 | def __place_template_folder(group, src, dst, gbp=False):
template_files = pkg_resources.resource_listdir(group, src)
# For each template, place
for template_file in template_files:
if not gbp and os.path.basename(template_file) == 'gbp.conf.em':
debug("Skipping template '{0}'".format(template_file))
continue
template_path = os.path.join(src, template_file)
template_dst = os.path.join(dst, template_file)
if pkg_resources.resource_isdir(group, template_path):
debug("Recursing on folder '{0}'".format(template_path))
__place_template_folder(group, template_path, template_dst, gbp)
else:
try:
debug("Placing template '{0}'".format(template_path))
template = pkg_resources.resource_string(group, template_path)
template_abs_path = pkg_resources.resource_filename(group, template_path)
except __HOLE__ as err:
error("Failed to load template "
"'{0}': {1}".format(template_file, str(err)), exit=True)
if not os.path.exists(dst):
os.makedirs(dst)
if os.path.exists(template_dst):
debug("Removing existing file '{0}'".format(template_dst))
os.remove(template_dst)
with open(template_dst, 'w') as f:
if not isinstance(template, str):
template = template.decode('utf-8')
f.write(template)
shutil.copystat(template_abs_path, template_dst) | IOError | dataset/ETHPy150Open ros-infrastructure/bloom/bloom/generators/debian/generator.py/__place_template_folder |
5,173 | def get_package_from_branch(branch):
with inbranch(branch):
try:
package_data = get_package_data(branch)
except __HOLE__:
return None
if type(package_data) not in [list, tuple]:
# It is a ret code
DebianGenerator.exit(package_data)
names, version, packages = package_data
if type(names) is list and len(names) > 1:
DebianGenerator.exit(
"Debian generator does not support generating "
"from branches with multiple packages in them, use "
"the release generator first to split packages into "
"individual branches.")
if type(packages) is dict:
return list(packages.values())[0] | SystemExit | dataset/ETHPy150Open ros-infrastructure/bloom/bloom/generators/debian/generator.py/get_package_from_branch |
5,174 | def _check_all_keys_are_valid(self, peer_packages):
keys_to_resolve = []
key_to_packages_which_depends_on = collections.defaultdict(list)
keys_to_ignore = set()
for package in self.packages.values():
depends = package.run_depends + package.buildtool_export_depends
build_depends = package.build_depends + package.buildtool_depends + package.test_depends
unresolved_keys = depends + build_depends + package.replaces + package.conflicts
keys_to_ignore = keys_to_ignore.union(package.replaces + package.conflicts)
keys = [d.name for d in unresolved_keys]
keys_to_resolve.extend(keys)
for key in keys:
key_to_packages_which_depends_on[key].append(package.name)
os_name = self.os_name
rosdistro = self.rosdistro
all_keys_valid = True
for key in sorted(set(keys_to_resolve)):
for os_version in self.distros:
try:
extended_peer_packages = peer_packages + [d.name for d in keys_to_ignore]
rule, installer_key, default_installer_key = \
resolve_rosdep_key(key, os_name, os_version, rosdistro, extended_peer_packages,
retry=False)
if rule is None:
continue
if installer_key != default_installer_key:
error("Key '{0}' resolved to '{1}' with installer '{2}', "
"which does not match the default installer '{3}'."
.format(key, rule, installer_key, default_installer_key))
BloomGenerator.exit(
"The Debian generator does not support dependencies "
"which are installed with the '{0}' installer."
.format(installer_key),
returncode=code.GENERATOR_INVALID_INSTALLER_KEY)
except (GeneratorError, __HOLE__) as e:
print(fmt("Failed to resolve @{cf}@!{key}@| on @{bf}{os_name}@|:@{cf}@!{os_version}@| with: {e}")
.format(**locals()))
print(fmt("@{cf}@!{0}@| is depended on by these packages: ").format(key) +
str(list(set(key_to_packages_which_depends_on[key]))))
print(fmt("@{kf}@!<== @{rf}@!Failed@|"))
all_keys_valid = False
return all_keys_valid | RuntimeError | dataset/ETHPy150Open ros-infrastructure/bloom/bloom/generators/debian/generator.py/DebianGenerator._check_all_keys_are_valid |
5,175 | def pre_modify(self):
info("\nPre-verifying Debian dependency keys...")
# Run rosdep update is needed
if not self.has_run_rosdep:
self.update_rosdep()
peer_packages = [p.name for p in self.packages.values()]
while not self._check_all_keys_are_valid(peer_packages):
error("Some of the dependencies for packages in this repository could not be resolved by rosdep.")
error("You can try to address the issues which appear above and try again if you wish.")
try:
if not maybe_continue(msg="Would you like to try again?"):
error("User aborted after rosdep keys were not resolved.")
sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO)
except (__HOLE__, EOFError):
error("\nUser quit.", exit=True)
update_rosdep()
invalidate_view_cache()
info("All keys are " + ansi('greenf') + "OK" + ansi('reset') + "\n") | KeyboardInterrupt | dataset/ETHPy150Open ros-infrastructure/bloom/bloom/generators/debian/generator.py/DebianGenerator.pre_modify |
5,176 | def __init__(self, to, chained_field=None, chained_model_field=None,
auto_choose=False, **kwargs):
"""
examples:
class Publication(models.Model):
name = models.CharField(max_length=255)
class Writer(models.Model):
name = models.CharField(max_length=255)
publications = models.ManyToManyField('Publication', blank=True, null=True)
class Book(models.Model):
publication = models.ForeignKey(Publication)
writer = ChainedManyToManyField(
Writer,
chained_field="publication",
chained_model_field="publications",
)
name = models.CharField(max_length=255)
``chained_field`` is the name of the ForeignKey field referenced by ChainedManyToManyField of the same Model.
in the examples, chained_field is the name of field publication in Model Book.
``chained_model_field`` is the name of the ManyToMany field referenced in the 'to' Model.
in the examples, chained_model_field is the name of field publications in Model Writer.
``auto_choose`` controls whether auto select the choice when there is only one available choice.
"""
try:
isbasestring = isinstance(to, basestring)
except __HOLE__:
isbasestring = isinstance(to, str)
if isbasestring:
self.to_app_name, self.to_model_name = to.split('.')
else:
self.to_app_name = to._meta.app_label
self.to_model_name = to._meta.object_name
self.chain_field = chained_field
self.chained_model_field = chained_model_field
self.auto_choose = auto_choose
ManyToManyField.__init__(self, to, **kwargs) | NameError | dataset/ETHPy150Open digi604/django-smart-selects/smart_selects/db_fields.py/ChainedManyToManyField.__init__ |
5,177 | def __getattr__(self, name):
# I only over-ride portions of the os module
try:
return object.__getattr__(self, name)
except __HOLE__:
return getattr(os, name) | AttributeError | dataset/ETHPy150Open openstack/swift/test/unit/common/test_manager.py/MockOs.__getattr__ |
5,178 | def test_setup_env(self):
class MockResource(object):
def __init__(self, error=None):
self.error = error
self.called_with_args = []
def setrlimit(self, resource, limits):
if self.error:
raise self.error
self.called_with_args.append((resource, limits))
def __getattr__(self, name):
# I only over-ride portions of the resource module
try:
return object.__getattr__(self, name)
except __HOLE__:
return getattr(resource, name)
_orig_resource = manager.resource
_orig_environ = os.environ
try:
manager.resource = MockResource()
manager.os.environ = {}
manager.setup_env()
expected = [
(resource.RLIMIT_NOFILE, (manager.MAX_DESCRIPTORS,
manager.MAX_DESCRIPTORS)),
(resource.RLIMIT_DATA, (manager.MAX_MEMORY,
manager.MAX_MEMORY)),
(resource.RLIMIT_NPROC, (manager.MAX_PROCS,
manager.MAX_PROCS)),
]
self.assertEqual(manager.resource.called_with_args, expected)
self.assertTrue(
manager.os.environ['PYTHON_EGG_CACHE'].startswith('/tmp'))
# test error condition
manager.resource = MockResource(error=ValueError())
manager.os.environ = {}
manager.setup_env()
self.assertEqual(manager.resource.called_with_args, [])
self.assertTrue(
manager.os.environ['PYTHON_EGG_CACHE'].startswith('/tmp'))
manager.resource = MockResource(error=OSError())
manager.os.environ = {}
self.assertRaises(OSError, manager.setup_env)
self.assertEqual(manager.os.environ.get('PYTHON_EGG_CACHE'), None)
finally:
manager.resource = _orig_resource
os.environ = _orig_environ | AttributeError | dataset/ETHPy150Open openstack/swift/test/unit/common/test_manager.py/TestManagerModule.test_setup_env |
5,179 | def test_watch_server_pids(self):
class MockOs(object):
WNOHANG = os.WNOHANG
def __init__(self, pid_map=None):
if pid_map is None:
pid_map = {}
self.pid_map = {}
for pid, v in pid_map.items():
self.pid_map[pid] = (x for x in v)
def waitpid(self, pid, options):
try:
rv = next(self.pid_map[pid])
except StopIteration:
raise OSError(errno.ECHILD, os.strerror(errno.ECHILD))
except KeyError:
raise OSError(errno.ESRCH, os.strerror(errno.ESRCH))
if isinstance(rv, Exception):
raise rv
else:
return rv
class MockTime(object):
def __init__(self, ticks=None):
self.tock = time()
if not ticks:
ticks = []
self.ticks = (t for t in ticks)
def time(self):
try:
self.tock += next(self.ticks)
except StopIteration:
self.tock += 1
return self.tock
def sleep(*args):
return
class MockServer(object):
def __init__(self, pids, run_dir=manager.RUN_DIR, zombie=0):
self.heartbeat = (pids for _ in range(zombie))
def get_running_pids(self):
try:
rv = next(self.heartbeat)
return rv
except __HOLE__:
return {}
_orig_os = manager.os
_orig_time = manager.time
_orig_server = manager.Server
try:
manager.time = MockTime()
manager.os = MockOs()
# this server always says it's dead when you ask for running pids
server = MockServer([1])
# list of pids keyed on servers to watch
server_pids = {
server: [1],
}
# basic test, server dies
gen = manager.watch_server_pids(server_pids)
expected = [(server, 1)]
self.assertEqual([x for x in gen], expected)
# start long running server and short interval
server = MockServer([1], zombie=15)
server_pids = {
server: [1],
}
gen = manager.watch_server_pids(server_pids)
self.assertEqual([x for x in gen], [])
# wait a little longer
gen = manager.watch_server_pids(server_pids, interval=15)
self.assertEqual([x for x in gen], [(server, 1)])
# zombie process
server = MockServer([1], zombie=200)
server_pids = {
server: [1],
}
# test weird os error
manager.os = MockOs({1: [OSError()]})
gen = manager.watch_server_pids(server_pids)
self.assertRaises(OSError, lambda: [x for x in gen])
# test multi-server
server1 = MockServer([1, 10], zombie=200)
server2 = MockServer([2, 20], zombie=8)
server_pids = {
server1: [1, 10],
server2: [2, 20],
}
pid_map = {
1: [None for _ in range(10)],
2: [None for _ in range(8)],
20: [None for _ in range(4)],
}
manager.os = MockOs(pid_map)
gen = manager.watch_server_pids(server_pids,
interval=manager.KILL_WAIT)
expected = [
(server2, 2),
(server2, 20),
]
self.assertEqual([x for x in gen], expected)
finally:
manager.os = _orig_os
manager.time = _orig_time
manager.Server = _orig_server | StopIteration | dataset/ETHPy150Open openstack/swift/test/unit/common/test_manager.py/TestManagerModule.test_watch_server_pids |
5,180 | def test_wait(self):
server = manager.Server('test')
self.assertEqual(server.wait(), 0)
class MockProcess(threading.Thread):
def __init__(self, delay=0.1, fail_to_start=False):
threading.Thread.__init__(self)
# setup pipe
rfd, wfd = os.pipe()
# subprocess connection to read stdout
self.stdout = os.fdopen(rfd)
# real process connection to write stdout
self._stdout = os.fdopen(wfd, 'w')
self.delay = delay
self.finished = False
self.returncode = None
if fail_to_start:
self._returncode = 1
self.run = self.fail
else:
self._returncode = 0
def __enter__(self):
self.start()
return self
def __exit__(self, *args):
if self.isAlive():
self.join()
def close_stdout(self):
self._stdout.flush()
with open(os.devnull, 'wb') as nullfile:
try:
os.dup2(nullfile.fileno(), self._stdout.fileno())
except __HOLE__:
pass
def fail(self):
print('mock process started', file=self._stdout)
sleep(self.delay) # perform setup processing
print('mock process failed to start', file=self._stdout)
self.close_stdout()
def poll(self):
self.returncode = self._returncode
return self.returncode or None
def run(self):
print('mock process started', file=self._stdout)
sleep(self.delay) # perform setup processing
print('setup complete!', file=self._stdout)
self.close_stdout()
sleep(self.delay) # do some more processing
print('mock process finished', file=self._stdout)
self.finished = True
class MockTime(object):
def time(self):
return time()
def sleep(self, *args, **kwargs):
pass
with temptree([]) as t:
old_stdout = sys.stdout
old_wait = manager.WARNING_WAIT
old_time = manager.time
try:
manager.WARNING_WAIT = 0.01
manager.time = MockTime()
with open(os.path.join(t, 'output'), 'w+') as f:
# actually capture the read stdout (for prints)
sys.stdout = f
# test closing pipe in subprocess unblocks read
with MockProcess() as proc:
server.procs = [proc]
status = server.wait()
self.assertEqual(status, 0)
# wait should return before process exits
self.assertTrue(proc.isAlive())
self.assertFalse(proc.finished)
self.assertTrue(proc.finished) # make sure it did finish
# test output kwarg prints subprocess output
with MockProcess() as proc:
server.procs = [proc]
status = server.wait(output=True)
output = pop_stream(f)
self.assertTrue('mock process started' in output)
self.assertTrue('setup complete' in output)
# make sure we don't get prints after stdout was closed
self.assertTrue('mock process finished' not in output)
# test process which fails to start
with MockProcess(fail_to_start=True) as proc:
server.procs = [proc]
status = server.wait()
self.assertEqual(status, 1)
self.assertTrue('failed' in pop_stream(f))
# test multiple procs
procs = [MockProcess(delay=.5) for i in range(3)]
for proc in procs:
proc.start()
server.procs = procs
status = server.wait()
self.assertEqual(status, 0)
for proc in procs:
self.assertTrue(proc.isAlive())
for proc in procs:
proc.join()
finally:
sys.stdout = old_stdout
manager.WARNING_WAIT = old_wait
manager.time = old_time | OSError | dataset/ETHPy150Open openstack/swift/test/unit/common/test_manager.py/TestServer.test_wait |
5,181 | def main():
args = docopt.docopt('\n'.join(__doc__.split('\n')[2:]),
version=const.VERSION)
logging.basicConfig(
level=logging.DEBUG if args['--verbose'] else logging.INFO,
stream=sys.stdout,
)
connect_conf = config.new_context_from_file(args['--config-file'],
section='imap')
if connect_conf is None:
return 1
display_conf = config.new_context_from_file(args['--config-file'],
section='display')
if args['--format'] is not None:
config_key = 'format_thread' if args['--thread'] else 'format_list'
display_conf[config_key] = args['--format']
if args['--limit'] is not None:
try:
limit = int(args['--limit'])
if limit < 1:
raise ValueError
except __HOLE__:
log.error('Invalid argument limit : {}'.format(args['--limit']))
return 1
else:
limit = None
try:
imap_account = imap_cli.connect(**connect_conf)
imap_cli.change_dir(
imap_account,
directory=args['<directory>'] or const.DEFAULT_DIRECTORY)
if args['--thread'] is False:
for mail_info in search.fetch_mails_info(imap_account,
limit=limit):
sys.stdout.write(
display_conf['format_list'].format(**mail_info))
sys.stdout.write('\n')
else:
threads = search.fetch_threads(imap_account, limit=limit)
mail_tree = search.threads_to_mail_tree(threads)
for output in search.display_mail_tree(
imap_account,
mail_tree,
format_thread=display_conf['format_thread']):
sys.stdout.write(output)
sys.stdout.write('\n')
imap_cli.disconnect(imap_account)
except KeyboardInterrupt:
log.info('Interrupt by user, exiting')
return 0 | ValueError | dataset/ETHPy150Open Gentux/imap-cli/imap_cli/list_mail.py/main |
5,182 | def connect(self, username, password):
# Try to use oauth2 first. It's much safer
try:
self._connect_oauth(username)
except (TypeError, __HOLE__) as e:
logger.warning("Couldn't do oauth2 because %s" % e)
self.server = self.transport(self.hostname, self.port)
typ, msg = self.server.login(username, password)
self.server.select() | ValueError | dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/transports/gmail.py/GmailImapTransport.connect |
5,183 | def _connect_oauth(self, username):
# username should be an email address that has already been authorized
# for gmail access
try:
from django_mailbox.google_utils import (
get_google_access_token,
fetch_user_info,
AccessTokenNotFound,
)
except __HOLE__:
raise ValueError(
"Install python-social-auth to use oauth2 auth for gmail"
)
access_token = None
while access_token is None:
try:
access_token = get_google_access_token(username)
google_email_address = fetch_user_info(username)['email']
except TypeError:
# This means that the google process took too long
# Trying again is the right thing to do
pass
except AccessTokenNotFound:
raise ValueError(
"No Token available in python-social-auth for %s" % (
username
)
)
auth_string = 'user=%s\1auth=Bearer %s\1\1' % (
google_email_address,
access_token
)
self.server = self.transport(self.hostname, self.port)
self.server.authenticate('XOAUTH2', lambda x: auth_string)
self.server.select() | ImportError | dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/transports/gmail.py/GmailImapTransport._connect_oauth |
5,184 | def format_currency(number, currency, format, locale=babel.numbers.LC_NUMERIC,
force_frac=None, format_type='standard'):
"""Same as ``babel.numbers.format_currency``, but has ``force_frac``
argument instead of ``currency_digits``.
If the ``force_frac`` argument is given, the argument is passed down to
``pattern.apply``.
"""
locale = babel.core.Locale.parse(locale)
if format:
pattern = babel.numbers.parse_pattern(format)
else:
try:
pattern = locale.currency_formats[format_type]
except __HOLE__:
raise babel.numbers.UnknownCurrencyFormatError(
"%r is not a known currency format type" % format_type)
if force_frac is None:
fractions = babel.core.get_global('currency_fractions')
try:
digits = fractions[currency][0]
except KeyError:
digits = fractions['DEFAULT'][0]
frac = (digits, digits)
else:
frac = force_frac
return pattern.apply(number, locale, currency=currency, force_frac=frac)
# Data class | KeyError | dataset/ETHPy150Open Suor/django-easymoney/easymoney.py/format_currency |
5,185 | @staticmethod
def load(collada, localscope, node):
colornode = node.find( '%s/%s/%s'%(tag('technique_common'),tag('directional'),
tag('color') ) )
if colornode is None:
raise DaeIncompleteError('Missing color for directional light')
try:
color = tuple([float(v) for v in colornode.text.split()])
except __HOLE__ as ex:
raise DaeMalformedError('Corrupted color values in light definition')
return DirectionalLight(node.get('id'), color, xmlnode = node) | ValueError | dataset/ETHPy150Open pycollada/pycollada/collada/light.py/DirectionalLight.load |
5,186 | @staticmethod
def load(collada, localscope, node):
colornode = node.find('%s/%s/%s' % (tag('technique_common'),
tag('ambient'), tag('color')))
if colornode is None:
raise DaeIncompleteError('Missing color for ambient light')
try:
color = tuple( [ float(v) for v in colornode.text.split() ] )
except __HOLE__ as ex:
raise DaeMalformedError('Corrupted color values in light definition')
return AmbientLight(node.get('id'), color, xmlnode = node) | ValueError | dataset/ETHPy150Open pycollada/pycollada/collada/light.py/AmbientLight.load |
5,187 | @staticmethod
def load(collada, localscope, node):
pnode = node.find('%s/%s' % (tag('technique_common'), tag('point')))
colornode = pnode.find( tag('color') )
if colornode is None:
raise DaeIncompleteError('Missing color for point light')
try:
color = tuple([float(v) for v in colornode.text.split()])
except __HOLE__ as ex:
raise DaeMalformedError('Corrupted color values in light definition')
constant_att = linear_att = quad_att = zfar = None
qattnode = pnode.find( tag('quadratic_attenuation') )
cattnode = pnode.find( tag('constant_attenuation') )
lattnode = pnode.find( tag('linear_attenuation') )
zfarnode = pnode.find( tag('zfar') )
try:
if cattnode is not None:
constant_att = float(cattnode.text)
if lattnode is not None:
linear_att = float(lattnode.text)
if qattnode is not None:
quad_att = float(qattnode.text)
if zfarnode is not None:
zfar = float(zfarnode.text)
except ValueError as ex:
raise DaeMalformedError('Corrupted values in light definition')
return PointLight(node.get('id'), color, constant_att, linear_att,
quad_att, zfar, xmlnode = node) | ValueError | dataset/ETHPy150Open pycollada/pycollada/collada/light.py/PointLight.load |
5,188 | @staticmethod
def load(collada, localscope, node):
pnode = node.find( '%s/%s'%(tag('technique_common'),tag('spot')) )
colornode = pnode.find( tag('color') )
if colornode is None:
raise DaeIncompleteError('Missing color for spot light')
try:
color = tuple([float(v) for v in colornode.text.split()])
except ValueError as ex:
raise DaeMalformedError('Corrupted color values in spot light definition')
constant_att = linear_att = quad_att = falloff_ang = falloff_exp = None
cattnode = pnode.find( tag('constant_attenuation') )
lattnode = pnode.find( tag('linear_attenuation') )
qattnode = pnode.find( tag('quadratic_attenuation') )
fangnode = pnode.find( tag('falloff_angle') )
fexpnode = pnode.find( tag('falloff_exponent') )
try:
if cattnode is not None:
constant_att = float(cattnode.text)
if lattnode is not None:
linear_att = float(lattnode.text)
if qattnode is not None:
quad_att = float(qattnode.text)
if fangnode is not None:
falloff_ang = float(fangnode.text)
if fexpnode is not None:
falloff_exp = float(fexpnode.text)
except __HOLE__ as ex:
raise DaeMalformedError('Corrupted values in spot light definition')
return SpotLight(node.get('id'), color, constant_att, linear_att,
quad_att, falloff_ang, falloff_exp, xmlnode = node) | ValueError | dataset/ETHPy150Open pycollada/pycollada/collada/light.py/SpotLight.load |
5,189 | def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(
version="%prog version: $Id: bed2graph.py 2861 2010-02-23 17:36:32Z andreas $", usage=globals()["__doc__"])
parser.add_option("-o", "--output-section", dest="output", type="choice",
choices=("full", "name"),
help="output either ``full`` overlapping entries, only the ``name``s. [default=%default].")
parser.set_defaults(
output="full",
)
# add common options (-h/--help, ...) and parse command line
(options, args) = E.Start(parser, argv=argv)
if len(args) != 2:
raise ValueError("two arguments required")
if args[0] == "-":
infile1 = options.stdin
else:
infile1 = IOTools.openFile(args[0], "r")
infile2 = IOTools.openFile(args[1], "r")
idx = Bed.readAndIndex(infile2, with_values=True)
output = options.output
outfile = options.stdout
if output == "name":
outfile.write("name1\tname2\n")
outf = lambda x: x.fields[0]
else:
outf = str
for bed in Bed.iterator(infile1):
try:
overlaps = idx[bed.contig].find(bed.start, bed.end)
except (KeyError, __HOLE__):
# ignore missing contig and zero length intervals
continue
for o in overlaps:
outfile.write("\t".join((outf(bed), outf(o[2]))) + "\n")
E.Stop() | IndexError | dataset/ETHPy150Open CGATOxford/cgat/scripts/bed2graph.py/main |
5,190 | def __init__(self, config):
self._config = config
self._relays = {} # (tcprelay, udprelay)
self._loop = eventloop.EventLoop()
self._dns_resolver = asyncdns.DNSResolver()
self._dns_resolver.add_to_loop(self._loop)
self._statistics = collections.defaultdict(int)
self._control_client_addr = None
try:
manager_address = config['manager_address']
if ':' in manager_address:
addr = manager_address.rsplit(':', 1)
addr = addr[0], int(addr[1])
addrs = socket.getaddrinfo(addr[0], addr[1])
if addrs:
family = addrs[0][0]
else:
logging.error('invalid address: %s', manager_address)
exit(1)
else:
addr = manager_address
family = socket.AF_UNIX
self._control_socket = socket.socket(family,
socket.SOCK_DGRAM)
self._control_socket.bind(addr)
self._control_socket.setblocking(False)
except (OSError, __HOLE__) as e:
logging.error(e)
logging.error('can not bind to manager address')
exit(1)
self._loop.add(self._control_socket,
eventloop.POLL_IN, self)
self._loop.add_periodic(self.handle_periodic)
port_password = config['port_password']
del config['port_password']
for port, password in port_password.items():
a_config = config.copy()
a_config['server_port'] = int(port)
a_config['password'] = password
self.add_port(a_config) | IOError | dataset/ETHPy150Open ziggear/shadowsocks/shadowsocks/manager.py/Manager.__init__ |
5,191 | def _send_control_data(self, data):
if self._control_client_addr:
try:
self._control_socket.sendto(data, self._control_client_addr)
except (socket.error, OSError, __HOLE__) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
return
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc() | IOError | dataset/ETHPy150Open ziggear/shadowsocks/shadowsocks/manager.py/Manager._send_control_data |
5,192 | def __init__(self):
try:
settings_module = os.environ[ENVIRONMENT_VARIABLE]
if not settings_module:
raise KeyError
except __HOLE__:
raise ImportError("Flexisettings cannot be imported, " \
"because environment variable %s is undefined." \
% ENVIRONMENT_VARIABLE
)
self._settings_module = settings_module
self._settings_path = self._get_mod_dir(settings_module)
self._import_settings()
if self._globals['FLEXI_LAYOUT_DISCOVERY']:
self._layout_discovery()
if self._globals['FLEXI_AUTORELOAD']:
for module, modfile in self._wrapped_modules.iteritems():
# add this module to sys.modules
sys.modules['flexisettings.wrapped.%s' % module] = MockModule(module, modfile)
# old-style class attribute lookup | KeyError | dataset/ETHPy150Open hrbonz/django-flexisettings/flexisettings/settings.py/FlexiSettingsProxy.__init__ |
5,193 | def verify_json(filename):
""" Checks that a JSON file is valid JSON """
try:
with open(filename) as jsonfile:
json.loads(jsonfile.read())
return True
except __HOLE__:
return False | ValueError | dataset/ETHPy150Open gosquadron/squadron/squadron/autotest.py/verify_json |
5,194 | def _detect_parse_error(self, date_formats, languages):
"""
Check following cases:
* 2nd month in Hijri calendar can be 29 or 30 days whilst this is
not possible for Gregorian calendar.
"""
for lang_shortname in languages:
language = default_language_loader.get_language(lang_shortname)
translated = language.translate(self.source, settings=settings)
for date_format in date_formats:
try:
datetime.strptime(date_format, translated)
except __HOLE__:
sep = find_date_separator(date_format)
m = re.search(
r'(?<!\d)(?:(?:(0?2){sep}(29|30))|(?:(29|30){sep}(0?2)))'.format(sep=sep),
translated)
if m:
raise HijriGregorianFebruaryMismatch() | ValueError | dataset/ETHPy150Open scrapinghub/dateparser/dateparser/calendars/hijri.py/HijriCalendar._detect_parse_error |
5,195 | def __setstate__(self, d):
if "restore_design_info" in d:
# NOTE: there may be a more performant way to do this
from patsy import dmatrices, PatsyError
exc = []
try:
data = d['frame']
except __HOLE__:
data = d['orig_endog'].join(d['orig_exog'])
for depth in [2, 3, 1, 0, 4]: # sequence is a guess where to likely find it
try:
_, design = dmatrices(d['formula'], data, eval_env=depth,
return_type='dataframe')
break
except (NameError, PatsyError) as e:
print('not in depth %d' % depth)
exc.append(e) # why do I need a reference from outside except block
pass
else:
raise exc[-1]
self.design_info = design.design_info
del d["restore_design_info"]
self.__dict__.update(d) | KeyError | dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/base/data.py/ModelData.__setstate__ |
5,196 | def _get_names(self, arr):
if isinstance(arr, DataFrame):
return list(arr.columns)
elif isinstance(arr, Series):
if arr.name:
return [arr.name]
else:
return
else:
try:
return arr.dtype.names
except __HOLE__:
pass
return None | AttributeError | dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/base/data.py/ModelData._get_names |
5,197 | def _get_row_labels(self, arr):
try:
return arr.index
except __HOLE__:
# if we've gotten here it's because endog is pandas and
# exog is not, so just return the row labels from endog
return self.orig_endog.index | AttributeError | dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/base/data.py/PandasData._get_row_labels |
5,198 | def main(argv=None):
if argv is None:
argv = sys.argv
try:
args = vroom.args.Parse(argv[1:])
except __HOLE__ as e:
sys.stderr.write('%s\n' % ', '.join(e.args))
return 1
if args.murder:
try:
output = subprocess.check_output(['ps', '-A']).decode('utf-8')
except subprocess.CalledProcessError:
sys.stdout.write("Can't find running processes.\n")
return 1
for line in output.splitlines():
if line.endswith('vroom'):
pid = int(line.split(None, 1)[0])
# ARE YOU SUICIDAL?!
if pid != os.getpid():
sys.stdout.write('Killing a vroom: %s\n' % line)
os.kill(pid, signal.SIGKILL)
break
else:
sys.stdout.write('No running vrooms found.\n')
return 0
end = 'VroomEnd()'
kill = ['vim', '--servername', args.servername, '--remote-expr', end]
sys.stdout.write("I hope you're happy.\n")
return subprocess.call(kill)
dirty = False
writers = []
try:
for filename in args.filenames:
with open(filename) as f:
runner = vroom.runner.Vroom(filename, args)
writers.append(runner(f))
if runner.dirty:
dirty = True
except vroom.vim.ServerQuit as e:
# If the vim server process fails, the details are probably on stderr, so hope
# for the best and exit without shell reset.
sys.stderr.write('Exception: {}\n'.format(e))
return 2
if dirty:
# Running vim in a process can screw with shell line endings. Reset terminal.
subprocess.call(['reset'])
for writer in writers:
writer.Write()
vroom.output.WriteBackmatter(writers, args)
failed_tests = [w for w in writers if w.Status() != vroom.output.STATUS.PASS]
if failed_tests:
return 3 | ValueError | dataset/ETHPy150Open google/vroom/vroom/__main__.py/main |
5,199 | @must_be_logged_in
def watched_logs_get(**kwargs):
user = kwargs['auth'].user
try:
page = int(request.args.get('page', 0))
except __HOLE__:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "page".'
))
try:
size = int(request.args.get('size', 10))
except ValueError:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "size".'
))
total = sum(1 for x in user.get_recent_log_ids())
paginated_logs, pages = paginate(user.get_recent_log_ids(), total, page, size)
logs = (model.NodeLog.load(id) for id in paginated_logs)
return {
"logs": [serialize_log(log) for log in logs],
"total": total,
"pages": pages,
"page": page
} | ValueError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/views.py/watched_logs_get |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.