Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
5,600 | def _add_details(self, info):
for (k, v) in six.iteritems(info):
try:
if k == 'requirements':
v = self._add_requirements_details(v)
setattr(self, k, v)
self._info[k] = v
except __HOLE__:
# In this case we already defined the attribute on the class
pass | AttributeError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/Artifact._add_details |
5,601 | def _add_details(self, info):
for (k, v) in six.iteritems(info):
try:
if k == 'artifacts':
v = self._add_artifact_details(v)
elif k == 'services':
v = self._add_services_details(v)
setattr(self, k, v)
self._info[k] = v
except __HOLE__:
# In this case we already defined the attribute on the class
pass | AttributeError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/Plan._add_details |
5,602 | def list(self, **kwargs):
kwargs = self._filter_kwargs(kwargs)
kwargs.setdefault("headers", kwargs.get("headers", {}))
kwargs['headers']['Content-Type'] = 'x-application/yaml'
resp = self.client.get(
self.build_url(base_url="/v1", **kwargs), **kwargs)
try:
resp_plan = yamlutils.load(resp.content)
except __HOLE__ as e:
raise exc.CommandException(message='Could not load Plan. '
'Reason: %s' % e.message)
return [Plan(self, res, loaded=True) for res in resp_plan if res] | ValueError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/PlanManager.list |
5,603 | def create(self, plan, **kwargs):
kwargs = self._filter_kwargs(kwargs)
kwargs['data'] = plan
kwargs.setdefault("headers", kwargs.get("headers", {}))
kwargs['headers']['Content-Type'] = 'x-application/yaml'
try:
resp = self.client.post(
self.build_url(base_url="/v1", **kwargs), **kwargs)
except Exception as e:
message = vars(e).get('details', str(e))
raise exceptions.BadRequest(message=message)
try:
resp_plan = yamlutils.load(resp.content)
except __HOLE__ as e:
raise exc.CommandException(message='Could not load Plan. '
'Reason: %s' % e.message)
return Plan(self, resp_plan) | ValueError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/PlanManager.create |
5,604 | def _get(self, url, response_key=None):
kwargs = {'headers': {}}
kwargs['headers']['Content-Type'] = 'x-application/yaml'
resp = self.client.get(url, **kwargs)
try:
resp_plan = yamlutils.load(resp.content)
except __HOLE__ as e:
raise exc.CommandException(message='Could not load Plan. '
'Reason: %s' % e.message)
return Plan(self, resp_plan, loaded=True) | ValueError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/PlanManager._get |
5,605 | def update(self, plan, **kwargs):
kwargs = self._filter_kwargs(kwargs)
kwargs['data'] = plan
kwargs.setdefault("headers", kwargs.get("headers", {}))
kwargs['headers']['Content-Type'] = 'x-application/yaml'
resp = self.client.put(self.build_url(base_url="/v1", **kwargs),
**kwargs)
try:
resp_plan = yamlutils.load(resp.content)
except __HOLE__ as e:
raise exc.CommandException(message='Could not load Plan. '
'Reason: %s' % e.message)
return self.resource_class(self, resp_plan) | ValueError | dataset/ETHPy150Open openstack/python-solumclient/solumclient/v1/plan.py/PlanManager.update |
5,606 | def is_duplicate_page(link):
try:
ConfiguratorPage.objects.get(link=link)
return True
except (ConfiguratorPage.DoesNotExist, __HOLE__):
return False | AssertionError | dataset/ETHPy150Open marineam/nagcat/railroad/railroad/permalink/views.py/is_duplicate_page |
5,607 | @conf
def cmd_and_log(self, cmd, kw):
Logs.debug('runner: %s\n' % cmd)
if self.log:
self.log.write('%s\n' % cmd)
try:
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
(out, err) = p.communicate()
except __HOLE__, e:
self.log.write('error %r' % e)
self.fatal(str(e))
out = str(out)
err = str(err)
if self.log:
self.log.write(out)
self.log.write(err)
if p.returncode:
if not kw.get('errmsg', ''):
if kw.get('mandatory', False):
kw['errmsg'] = out.strip()
else:
kw['errmsg'] = 'no'
self.fatal('fail')
return out | OSError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/config_c.py/cmd_and_log |
5,608 | @conf
def validate_c(self, kw):
"""validate the parameters for the test method"""
if not 'env' in kw:
kw['env'] = self.env.copy()
env = kw['env']
if not 'compiler' in kw:
kw['compiler'] = 'cc'
if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
kw['compiler'] = 'cxx'
if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
if not self.env['CC']:
self.fatal('a c compiler is required')
if not 'type' in kw:
kw['type'] = 'cprogram'
assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
#if kw['type'] != 'program' and kw.get('execute', 0):
# raise ValueError, 'can only execute programs'
def to_header(dct):
if 'header_name' in dct:
dct = Utils.to_list(dct['header_name'])
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
# set the file name
if not 'compile_mode' in kw:
kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
#OSX
if 'framework_name' in kw:
try: TaskGen.task_gen.create_task_macapp
except __HOLE__: self.fatal('frameworks require the osx tool')
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
if not kw.get('no_header', False):
if not 'header_name' in kw:
kw['header_name'] = []
fwk = '%s/%s.h' % (fwkname, fwkname)
if kw.get('remove_dot_h', None):
fwk = fwk[:-2]
kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
#kw['frameworkpath'] = set it yourself
if 'function_name' in kw:
fu = kw['function_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for function %s' % fu
kw['code'] = to_header(kw) + SNIP1 % fu
if not 'uselib_store' in kw:
kw['uselib_store'] = fu.upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(fu)
elif 'type_name' in kw:
tu = kw['type_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for type %s' % tu
if not 'header_name' in kw:
kw['header_name'] = 'stdint.h'
kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
if not 'define_name' in kw:
kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for header %s' % kw['header_name']
l = Utils.to_list(kw['header_name'])
assert len(l)>0, 'list of headers in header_name is empty'
kw['code'] = to_header(kw) + SNIP3
if not 'uselib_store' in kw:
kw['uselib_store'] = l[0].upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(l[0])
if 'lib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for library %s' % kw['lib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['lib'].upper()
if 'staticlib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for static library %s' % kw['staticlib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['staticlib'].upper()
if 'fragment' in kw:
# an additional code fragment may be provided to replace the predefined code
# in custom headers
kw['code'] = kw['fragment']
if not 'msg' in kw:
kw['msg'] = 'Checking for custom code'
if not 'errmsg' in kw:
kw['errmsg'] = 'no'
for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
if flagsname in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
if not 'errmsg' in kw:
kw['errmsg'] = 'no'
if not 'execute' in kw:
kw['execute'] = False
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
if not 'code' in kw:
kw['code'] = SNIP3
if not kw.get('success'): kw['success'] = None
assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c' | AttributeError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/config_c.py/validate_c |
5,609 | @conf
def run_c_code(self, *k, **kw):
test_f_name = kw['compile_filename']
k = 0
while k < 10000:
# make certain to use a fresh folder - necessary for win32
dir = os.path.join(self.blddir, '.conf_check_%d' % k)
# if the folder already exists, remove it
try:
shutil.rmtree(dir)
except __HOLE__:
pass
try:
os.stat(dir)
except OSError:
break
k += 1
try:
os.makedirs(dir)
except:
self.fatal('cannot create a configuration test folder %r' % dir)
try:
os.stat(dir)
except:
self.fatal('cannot use the configuration test folder %r' % dir)
bdir = os.path.join(dir, 'testbuild')
if not os.path.exists(bdir):
os.makedirs(bdir)
env = kw['env']
dest = open(os.path.join(dir, test_f_name), 'w')
dest.write(kw['code'])
dest.close()
back = os.path.abspath('.')
bld = Build.BuildContext()
bld.log = self.log
bld.all_envs.update(self.all_envs)
bld.all_envs['default'] = env
bld.lst_variants = bld.all_envs.keys()
bld.load_dirs(dir, bdir)
os.chdir(dir)
bld.rescan(bld.srcnode)
if not 'features' in kw:
# conf.check(features='cc cprogram pyext', ...)
kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
o = bld(features=kw['features'], source=test_f_name, target='testprog')
for k, v in kw.iteritems():
setattr(o, k, v)
self.log.write("==>\n%s\n<==\n" % kw['code'])
# compile the program
try:
bld.compile()
except Utils.WafError:
ret = Utils.ex_stack()
else:
ret = 0
# chdir before returning
os.chdir(back)
if ret:
self.log.write('command returned %r' % ret)
self.fatal(str(ret))
# if we need to run the program, try to get its result
# keep the name of the program to execute
if kw['execute']:
lastprog = o.link_task.outputs[0].abspath(env)
args = Utils.to_list(kw.get('exec_args', []))
proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
(out, err) = proc.communicate()
w = self.log.write
w(str(out))
w('\n')
w(str(err))
w('\n')
w('returncode %r' % proc.returncode)
w('\n')
if proc.returncode:
self.fatal(Utils.ex_stack())
ret = out
return ret | OSError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/config_c.py/run_c_code |
5,610 | @conf
def is_defined(self, key):
defines = self.env[DEFINES]
if not defines:
return False
try:
value = defines[key]
except __HOLE__:
return False
else:
return value != UNDEFINED | KeyError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/config_c.py/is_defined |
5,611 | @conf
def get_define(self, define):
"get the value of a previously stored define"
try: return self.env[DEFINES][define]
except __HOLE__: return None | KeyError | dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/config_c.py/get_define |
5,612 | def wrap_code(self, routine, helpers=[]):
workdir = self.filepath or tempfile.mkdtemp("_sympy_compile")
if not os.access(workdir, os.F_OK):
os.mkdir(workdir)
oldwork = os.getcwd()
os.chdir(workdir)
try:
sys.path.append(workdir)
self._generate_code(routine, helpers)
self._prepare_files(routine)
self._process_files(routine)
mod = __import__(self.module_name)
finally:
sys.path.remove(workdir)
CodeWrapper._module_counter += 1
os.chdir(oldwork)
if not self.filepath:
try:
shutil.rmtree(workdir)
except __HOLE__:
# Could be some issues on Windows
pass
return self._get_wrapped_function(mod, routine.name) | OSError | dataset/ETHPy150Open sympy/sympy/sympy/utilities/autowrap.py/CodeWrapper.wrap_code |
5,613 | def AnalyzeFileObject(self, file_object):
"""Retrieves the format specification.
Args:
file_object: a file-like object (instance of file_io.FileIO).
Returns:
The type indicator if the file-like object contains a supported format
or None otherwise.
"""
tsk_image_object = tsk_image.TSKFileSystemImage(file_object)
try:
_ = pytsk3.Volume_Info(tsk_image_object)
except __HOLE__:
return
return self.type_indicator
# Register the analyzer helpers with the analyzer. | IOError | dataset/ETHPy150Open log2timeline/dfvfs/dfvfs/analyzer/tsk_partition_analyzer_helper.py/TSKPartitionAnalyzerHelper.AnalyzeFileObject |
5,614 | def load_manifest(filename):
rv = {}
try:
with open(filename) as f:
for line in f:
if line[:1] == '@':
rv[line.strip()] = None
continue
line = line.strip().split('=', 1)
if len(line) == 2:
key = line[0].strip()
value = line[1].strip()
rv[key] = value
except __HOLE__ as e:
if e.errno != errno.ENOENT:
raise
return rv | IOError | dataset/ETHPy150Open lektor/lektor/lektor/packages.py/load_manifest |
5,615 | def list_local_packages(path):
"""Lists all local packages below a path that could be installed."""
rv = []
try:
for filename in os.listdir(path):
if os.path.isfile(os.path.join(path, filename, 'setup.py')):
rv.append('@' + filename)
except __HOLE__:
pass
return rv | OSError | dataset/ETHPy150Open lektor/lektor/lektor/packages.py/list_local_packages |
5,616 | def update_cache(package_root, remote_packages, local_package_path,
refresh=False):
"""Updates the package cache at package_root for the given dictionary
of packages as well as packages in the given local package path.
"""
requires_wipe = False
if refresh:
click.echo('Force package cache refresh.')
requires_wipe = True
manifest_file = os.path.join(package_root, 'lektor-packages.manifest')
local_packages = list_local_packages(local_package_path)
old_manifest = load_manifest(manifest_file)
to_install = []
all_packages = dict(remote_packages)
all_packages.update((x, None) for x in local_packages)
# step 1: figure out which remote packages to install.
for package, version in iteritems(remote_packages):
old_version = old_manifest.pop(package, None)
if old_version is None:
to_install.append((package, version))
elif old_version != version:
requires_wipe = True
# step 2: figure out which local packages to install
for package in local_packages:
if old_manifest.pop(package, False) is False:
to_install.append((package, None))
# Bad news, we need to wipe everything
if requires_wipe or old_manifest:
try:
shutil.rmtree(package_root)
except OSError:
pass
to_install = iteritems(all_packages)
if to_install:
click.echo('Updating packages in %s for project' % package_root)
try:
os.makedirs(package_root)
except __HOLE__:
pass
for package, version in to_install:
if package[:1] == '@':
install_local_package(package_root,
os.path.join(local_package_path, package[1:]))
else:
download_and_install_package(package_root, package, version)
write_manifest(manifest_file, all_packages) | OSError | dataset/ETHPy150Open lektor/lektor/lektor/packages.py/update_cache |
5,617 | def wipe_package_cache(env):
"""Wipes the entire package cache."""
package_root = env.project.get_package_cache_path()
try:
shutil.rmtree(package_root)
except (__HOLE__, IOError):
pass | OSError | dataset/ETHPy150Open lektor/lektor/lektor/packages.py/wipe_package_cache |
5,618 | def is_installed(name):
ret = exec_cmd("/usr/bin/dpkg-query -l '{0}'".format(name))
if ret['returncode'] != 0:
return False
# There's no way to use return code of any of the dpkg-query options.
# Instead we use the "state" column of dpkg-query -l
# So programmaticaly here:
# 1. Get stdout
# 2. Split on new line
# 3. Get the last but one line (last is blank, in any case?)
# 4. Get first character (i=installed)
try:
return ret['stdout'].split('\n')[-2][0] == 'i'
except __HOLE__ as err:
log.error(err)
return False | IndexError | dataset/ETHPy150Open comodit/synapse-agent/synapse/resources/packages-plugin/apt.py/is_installed |
5,619 | def InsertPhoto(self, album_or_uri, photo, filename_or_handle,
content_type='image/jpeg'):
"""Add a PhotoEntry
Needs authentication, see self.ClientLogin()
Arguments:
album_or_uri: AlbumFeed or uri of the album where the photo should go
photo: PhotoEntry to add
filename_or_handle: A file-like object or file name where the image/video
will be read from
content_type (optional): Internet media type (a.k.a. mime type) of
media object. Currently Google Photos supports these types:
o image/bmp
o image/gif
o image/jpeg
o image/png
Images will be converted to jpeg on upload. Defaults to `image/jpeg'
"""
try:
assert(isinstance(photo, gdata.photos.PhotoEntry))
except AssertionError:
raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
'body':'`photo` must be a gdata.photos.PhotoEntry instance',
'reason':'Found %s, not PhotoEntry' % type(photo)
})
try:
majtype, mintype = content_type.split('/')
assert(mintype in SUPPORTED_UPLOAD_TYPES)
except (__HOLE__, AssertionError):
raise GooglePhotosException({'status':GPHOTOS_INVALID_CONTENT_TYPE,
'body':'This is not a valid content type: %s' % content_type,
'reason':'Accepted content types: %s' % \
['image/'+t for t in SUPPORTED_UPLOAD_TYPES]
})
if isinstance(filename_or_handle, (str, unicode)) and \
os.path.exists(filename_or_handle): # it's a file name
mediasource = gdata.MediaSource()
mediasource.setFile(filename_or_handle, content_type)
elif hasattr(filename_or_handle, 'read'):# it's a file-like resource
if hasattr(filename_or_handle, 'seek'):
filename_or_handle.seek(0) # rewind pointer to the start of the file
# gdata.MediaSource needs the content length, so read the whole image
file_handle = StringIO.StringIO(filename_or_handle.read())
name = 'image'
if hasattr(filename_or_handle, 'name'):
name = filename_or_handle.name
mediasource = gdata.MediaSource(file_handle, content_type,
content_length=file_handle.len, file_name=name)
else: #filename_or_handle is not valid
raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
'body':'`filename_or_handle` must be a path name or a file-like object',
'reason':'Found %s, not path name or object with a .read() method' % \
filename_or_handle
})
if isinstance(album_or_uri, (str, unicode)): # it's a uri
feed_uri = album_or_uri
elif hasattr(album_or_uri, 'GetFeedLink'): # it's a AlbumFeed object
feed_uri = album_or_uri.GetFeedLink().href
try:
return self.Post(photo, uri=feed_uri, media_source=mediasource,
converter=gdata.photos.PhotoEntryFromString)
except gdata.service.RequestError, e:
raise GooglePhotosException(e.args[0]) | ValueError | dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/attic/PicasaSnap/gdata/photos/service.py/PhotosService.InsertPhoto |
5,620 | def UpdatePhotoBlob(self, photo_or_uri, filename_or_handle,
content_type = 'image/jpeg'):
"""Update a photo's binary data.
Needs authentication, see self.ClientLogin()
Arguments:
photo_or_uri: a gdata.photos.PhotoEntry that will be updated, or a
`edit-media' uri pointing to it
filename_or_handle: A file-like object or file name where the image/video
will be read from
content_type (optional): Internet media type (a.k.a. mime type) of
media object. Currently Google Photos supports these types:
o image/bmp
o image/gif
o image/jpeg
o image/png
Images will be converted to jpeg on upload. Defaults to `image/jpeg'
Returns:
The modified gdata.photos.PhotoEntry
Example:
p = GetFeed(PhotoUri)
p = UpdatePhotoBlob(p, '/tmp/newPic.jpg')
It is important that you don't keep the old object around, once
it has been updated. See
http://code.google.com/apis/gdata/reference.html#Optimistic-concurrency
"""
try:
majtype, mintype = content_type.split('/')
assert(mintype in SUPPORTED_UPLOAD_TYPES)
except (__HOLE__, AssertionError):
raise GooglePhotosException({'status':GPHOTOS_INVALID_CONTENT_TYPE,
'body':'This is not a valid content type: %s' % content_type,
'reason':'Accepted content types: %s' % \
['image/'+t for t in SUPPORTED_UPLOAD_TYPES]
})
if isinstance(filename_or_handle, (str, unicode)) and \
os.path.exists(filename_or_handle): # it's a file name
photoblob = gdata.MediaSource()
photoblob.setFile(filename_or_handle, content_type)
elif hasattr(filename_or_handle, 'read'):# it's a file-like resource
if hasattr(filename_or_handle, 'seek'):
filename_or_handle.seek(0) # rewind pointer to the start of the file
# gdata.MediaSource needs the content length, so read the whole image
file_handle = StringIO.StringIO(filename_or_handle.read())
name = 'image'
if hasattr(filename_or_handle, 'name'):
name = filename_or_handle.name
mediasource = gdata.MediaSource(file_handle, content_type,
content_length=file_handle.len, file_name=name)
else: #filename_or_handle is not valid
raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
'body':'`filename_or_handle` must be a path name or a file-like object',
'reason':'Found %s, not path name or an object with .read() method' % \
type(filename_or_handle)
})
if isinstance(photo_or_uri, (str, unicode)):
entry_uri = photo_or_uri # it's a uri
elif hasattr(photo_or_uri, 'GetEditMediaLink'):
entry_uri = photo_or_uri.GetEditMediaLink().href
try:
return self.Put(photoblob, entry_uri,
converter=gdata.photos.PhotoEntryFromString)
except gdata.service.RequestError, e:
raise GooglePhotosException(e.args[0]) | ValueError | dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/attic/PicasaSnap/gdata/photos/service.py/PhotosService.UpdatePhotoBlob |
5,621 | def Delete(self, object_or_uri, *args, **kwargs):
"""Delete an object.
Re-implementing the GDataService.Delete method, to add some
convenience.
Arguments:
object_or_uri: Any object that has a GetEditLink() method that
returns a link, or a uri to that object.
Returns:
? or GooglePhotosException on errors
"""
try:
uri = object_or_uri.GetEditLink().href
except __HOLE__:
uri = object_or_uri
try:
return gdata.service.GDataService.Delete(self, uri, *args, **kwargs)
except gdata.service.RequestError, e:
raise GooglePhotosException(e.args[0]) | AttributeError | dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/attic/PicasaSnap/gdata/photos/service.py/PhotosService.Delete |
5,622 | def __init__(self, class_name, bases, namespace):
self.errors = []
self.filename = None
try:
self._checks.append(self())
except __HOLE__:
self._checks = [] | AttributeError | dataset/ETHPy150Open nbessi/openerp-conventions/common_checker/base_checker.py/BaseCheckerMeta.__init__ |
5,623 | def load_plugin(self, plugin_name):
""" Loads a single plugin given its name """
if not plugin_name in __all__:
raise KeyError("Plugin " + plugin_name + " not found")
try:
plugin = self.__plugins[plugin_name]
except __HOLE__:
# Load the plugin only if not loaded yet
log.debug("Loading plugin: %s" % plugin_name)
module = __import__("plugins." + plugin_name, fromlist=["plugins"])
plugin = module.load()
self.__plugins[plugin_name] = plugin
return plugin | KeyError | dataset/ETHPy150Open nacx/kahuna/kahuna/pluginmanager.py/PluginManager.load_plugin |
5,624 | def call(self, plugin_name, command_name, args):
""" Encapsulate the call into a context already loaded. """
try:
plugin = self.load_plugin(plugin_name)
except KeyError:
# Plugin not found, pring generic help
self.help_all()
if not command_name:
self.help(plugin)
else:
try:
command = plugin._commands()[command_name]
except __HOLE__:
# Command not found in plugin. Print only plugin help
self.help(plugin)
with opencontext(plugin):
return command(args) | KeyError | dataset/ETHPy150Open nacx/kahuna/kahuna/pluginmanager.py/PluginManager.call |
5,625 | def find_credentials():
"""
Look in the current environment for Twilio credentails
"""
try:
account = os.environ["TWILIO_ACCOUNT_SID"]
token = os.environ["TWILIO_AUTH_TOKEN"]
return account, token
except __HOLE__:
return None, None | KeyError | dataset/ETHPy150Open balanced/status.balancedpayments.com/venv/lib/python2.7/site-packages/twilio/rest/__init__.py/find_credentials |
5,626 | def serialize(self, data, content_type):
try:
return json.dumps(data)
except __HOLE__:
pass
return json.dumps(to_primitive(data)) | TypeError | dataset/ETHPy150Open nii-cloud/dodai-compute/nova/network/quantum/client.py/JSONSerializer.serialize |
5,627 | def do_request(self, method, action, body=None,
headers=None, params=None):
"""Connects to the server and issues a request.
Returns the result data, or raises an appropriate exception if
HTTP status code is not 2xx
:param method: HTTP method ("GET", "POST", "PUT", etc...)
:param body: string of data to send, or None (default)
:param headers: mapping of key/value pairs to add as headers
:param params: dictionary of key/value pairs to add to append
to action
"""
# Ensure we have a tenant id
if not self.tenant:
raise Exception(_("Tenant ID not set"))
# Add format and tenant_id
action += ".%s" % self.format
action = Client.action_prefix + action
action = action.replace('{tenant_id}', self.tenant)
if type(params) is dict:
action += '?' + urllib.urlencode(params)
try:
connection_type = self.get_connection_type()
headers = headers or {"Content-Type":
"application/%s" % self.format}
# Open connection and send request, handling SSL certs
certs = {'key_file': self.key_file, 'cert_file': self.cert_file}
certs = dict((x, certs[x]) for x in certs if certs[x] != None)
if self.use_ssl and len(certs):
c = connection_type(self.host, self.port, **certs)
else:
c = connection_type(self.host, self.port)
if self.logger:
self.logger.debug(
_("Quantum Client Request:\n%(method)s %(action)s\n" %
locals()))
if body:
self.logger.debug(body)
c.request(method, action, body, headers)
res = c.getresponse()
status_code = self.get_status_code(res)
data = res.read()
if self.logger:
self.logger.debug("Quantum Client Reply (code = %s) :\n %s" \
% (str(status_code), data))
if status_code == httplib.NOT_FOUND:
raise QuantumNotFoundException(
_("Quantum entity not found: %s" % data))
if status_code in (httplib.OK,
httplib.CREATED,
httplib.ACCEPTED,
httplib.NO_CONTENT):
if data is not None and len(data):
return self.deserialize(data, status_code)
else:
raise QuantumServerException(
_("Server %(status_code)s error: %(data)s"
% locals()))
except (socket.error, __HOLE__), e:
raise QuantumIOException(_("Unable to connect to "
"server. Got error: %s" % e)) | IOError | dataset/ETHPy150Open nii-cloud/dodai-compute/nova/network/quantum/client.py/Client.do_request |
5,628 | def consume_all(self, max_loops=None):
"""Consume the streamed responses until there are no more.
This simply calls :meth:`consume_next` until there are no
more to consume.
:type max_loops: int
:param max_loops: (Optional) Maximum number of times to try to consume
an additional ``ReadRowsResponse``. You can use this
to avoid long wait times.
"""
curr_loop = 0
if max_loops is None:
max_loops = float('inf')
while curr_loop < max_loops:
curr_loop += 1
try:
self.consume_next()
except __HOLE__:
break | StopIteration | dataset/ETHPy150Open GoogleCloudPlatform/gcloud-python/gcloud/bigtable/row_data.py/PartialRowsData.consume_all |
5,629 | def parse(seq):
"""Sequence(Token) -> object"""
const = lambda x: lambda _: x
tokval = lambda x: x.value
toktype = lambda t: some(lambda x: x.type == t) >> tokval
op = lambda s: a(Token(u'Op', s)) >> tokval
op_ = lambda s: skip(op(s))
n = lambda s: a(Token(u'Name', s)) >> tokval
def make_array(n):
if n is None:
return []
else:
return [n[0]] + n[1]
def make_object(n):
return dict(make_array(n))
def make_number(n):
try:
return int(n)
except __HOLE__:
return float(n)
def unescape(s):
std = {
u'"': u'"', u'\\': u'\\', u'/': u'/', u'b': u'\b', u'f': u'\f',
u'n': u'\n', u'r': u'\r', u't': u'\t',
}
def sub(m):
if m.group(u'standard') is not None:
return std[m.group(u'standard')]
else:
return unichr(int(m.group(u'unicode'), 16))
return re_esc.sub(sub, s)
def make_string(n):
return unescape(n[1:-1])
null = n(u'null') >> const(None)
true = n(u'true') >> const(True)
false = n(u'false') >> const(False)
number = toktype(u'Number') >> make_number
string = toktype(u'String') >> make_string
value = forward_decl()
member = string + op_(u':') + value >> tuple
object = (
op_(u'{') +
maybe(member + many(op_(u',') + member)) +
op_(u'}')
>> make_object)
array = (
op_(u'[') +
maybe(value + many(op_(u',') + value)) +
op_(u']')
>> make_array)
value.define(
null
| true
| false
| object
| array
| number
| string)
json_text = object | array
json_file = json_text + skip(finished)
return json_file.parse(seq) | ValueError | dataset/ETHPy150Open vlasovskikh/funcparserlib/funcparserlib/tests/json.py/parse |
5,630 | def update_price_estimate_on_resource_spl_change(sender, instance, created=False, **kwargs):
try:
# XXX: drop support of IaaS app
is_changed = not created and instance.service_project_link_id != instance._old_values['service_project_link']
except __HOLE__:
is_changed = False
if is_changed:
spl_model = SupportedServices.get_related_models(instance)['service_project_link']
spl_old = spl_model.objects.get(pk=instance._old_values['service_project_link'])
old_family_scope = [spl_old] + spl_old.get_ancestors()
for estimate in models.PriceEstimate.filter(scope=instance, is_manually_input=False):
qs = models.PriceEstimate.objects.filter(
scope__in=old_family_scope, month=estimate.month, year=estimate.year)
for parent_estimate in qs:
parent_estimate.leaf_estimates.remove(estimate)
parent_estimate.update_from_leaf()
models.PriceEstimate.update_ancestors_for_resource(instance, force=True) | AttributeError | dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/cost_tracking/handlers.py/update_price_estimate_on_resource_spl_change |
5,631 | def decode(self, obj, *args, **kwargs):
if not kwargs.get('recurse', False):
obj = super(JSONDecoder, self).decode(obj, *args, **kwargs)
if isinstance(obj, list):
for i in six.moves.xrange(len(obj)):
item = obj[i]
if self._is_recursive(item):
obj[i] = self.decode(item, recurse=True)
elif isinstance(obj, dict):
for key, value in obj.items():
if self._is_recursive(value):
obj[key] = self.decode(value, recurse=True)
elif isinstance(obj, six.string_types):
if TIME_RE.match(obj):
try:
return date_parser.parse(obj).time()
except __HOLE__:
pass
if DATE_RE.match(obj):
try:
return date_parser.parse(obj).date()
except ValueError:
pass
if DATETIME_RE.match(obj):
try:
return date_parser.parse(obj)
except ValueError:
pass
return obj | ValueError | dataset/ETHPy150Open derek-schaefer/django-json-field/json_field/fields.py/JSONDecoder.decode |
5,632 | @wsgi.action('os-getVNCConsole')
def get_vnc_console(self, req, id, body):
"""Get vnc connection information to access a server."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_vnc_console below will cope
console_type = body['os-getVNCConsole'].get('type')
instance = common.get_instance(self.compute_api, context, id)
try:
output = self.compute_api.get_vnc_console(context,
instance,
console_type)
except exception.InstanceNotReady:
raise webob.exc.HTTPConflict(
explanation=_('Instance not yet ready'))
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except (exception.ConsoleTypeUnavailable,
exception.ConsoleTypeInvalid) as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except __HOLE__:
msg = _("Unable to get vnc console, functionality not implemented")
raise webob.exc.HTTPNotImplemented(explanation=msg)
return {'console': {'type': console_type, 'url': output['url']}} | NotImplementedError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/consoles.py/ConsolesController.get_vnc_console |
5,633 | @wsgi.action('os-getSPICEConsole')
def get_spice_console(self, req, id, body):
"""Get spice connection information to access a server."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_spice_console below will cope
console_type = body['os-getSPICEConsole'].get('type')
instance = common.get_instance(self.compute_api, context, id)
try:
output = self.compute_api.get_spice_console(context,
instance,
console_type)
except (exception.ConsoleTypeUnavailable,
exception.ConsoleTypeInvalid) as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
except __HOLE__:
msg = _("Unable to get spice console, "
"functionality not implemented")
raise webob.exc.HTTPNotImplemented(explanation=msg)
return {'console': {'type': console_type, 'url': output['url']}} | NotImplementedError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/consoles.py/ConsolesController.get_spice_console |
5,634 | @wsgi.action('os-getRDPConsole')
def get_rdp_console(self, req, id, body):
"""Get text console output."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_rdp_console below will cope
console_type = body['os-getRDPConsole'].get('type')
instance = common.get_instance(self.compute_api, context, id)
try:
output = self.compute_api.get_rdp_console(context,
instance,
console_type)
except (exception.ConsoleTypeUnavailable,
exception.ConsoleTypeInvalid) as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
except __HOLE__:
msg = _("Unable to get rdp console, functionality not implemented")
raise webob.exc.HTTPNotImplemented(explanation=msg)
return {'console': {'type': console_type, 'url': output['url']}} | NotImplementedError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/consoles.py/ConsolesController.get_rdp_console |
5,635 | @wsgi.action('os-getSerialConsole')
def get_serial_console(self, req, id, body):
"""Get connection to a serial console."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown get_serial_console below will cope
console_type = body['os-getSerialConsole'].get('type')
instance = common.get_instance(self.compute_api, context, id)
try:
output = self.compute_api.get_serial_console(context,
instance,
console_type)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
except (exception.ConsoleTypeUnavailable,
exception.ConsoleTypeInvalid,
exception.ImageSerialPortNumberInvalid,
exception.ImageSerialPortNumberExceedFlavorValue,
exception.SocketPortRangeExhaustedException) as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except __HOLE__:
msg = _("Unable to get serial console, "
"functionality not implemented")
raise webob.exc.HTTPNotImplemented(explanation=msg)
return {'console': {'type': console_type, 'url': output['url']}} | NotImplementedError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/consoles.py/ConsolesController.get_serial_console |
5,636 | def __setattr__(self, prop, value):
# Add validity check for self.change
if (prop == 'change' and Resource.CHANGE_TYPES and
value is not None and not value in Resource.CHANGE_TYPES):
raise ChangeTypeError(value)
else:
try:
object.__setattr__(self, prop, value)
except __HOLE__:
# assume an extra one...
self._set_extra(prop, value) | AttributeError | dataset/ETHPy150Open resync/resync/resync/resource.py/Resource.__setattr__ |
5,637 | def seek(self, pos, whence=0):
"""Change stream position.
Change the stream position to byte offset offset. offset is
interpreted relative to the position indicated by whence. Values
for whence are:
* 0 -- start of stream (the default); offset should be zero or positive
* 1 -- current stream position; offset may be negative
* 2 -- end of stream; offset is usually negative
Return the new absolute position.
"""
self._checkClosed()
try:
pos = pos.__index__()
#except AttributeError as err:
#raise TypeError("an integer is required") from err
except __HOLE__:
raise TypeError("an integer is required")
if whence == 0:
if pos < 0:
raise ValueError("negative seek position %r" % (pos,))
self._pos = pos
elif whence == 1:
self._pos = max(0, self._pos + pos)
elif whence == 2:
self._pos = max(0, self._node.nrows + pos)
else:
raise ValueError("invalid whence value")
return self._pos
#def seekable(self) -> bool: | AttributeError | dataset/ETHPy150Open PyTables/PyTables/tables/nodes/filenode.py/RawPyTablesIO.seek |
5,638 | def __init__(self, node, h5file, **kwargs):
if node is not None:
# Open an existing node and get its version.
self._check_attributes(node)
self._version = node.attrs.NODE_TYPE_VERSION
elif h5file is not None:
# Check for allowed keyword arguments,
# to avoid unwanted arguments falling through to array constructor.
for kwarg in kwargs:
if kwarg not in self.__allowed_init_kwargs:
raise TypeError(
"%s keyword argument is not allowed" % repr(kwarg))
# Turn 'expectedsize' into 'expectedrows'.
if 'expectedsize' in kwargs:
# These match since one byte is stored per row.
expectedrows = kwargs['expectedsize']
kwargs = kwargs.copy()
del kwargs['expectedsize']
kwargs['expectedrows'] = expectedrows
# Create a new array in the specified PyTables file.
self._version = NodeTypeVersions[-1]
shape = self._byte_shape[self._version]
node = h5file.create_earray(
atom=tables.UInt8Atom(), shape=shape, **kwargs)
# Set the node attributes, else remove the array itself.
try:
self._set_attributes(node)
except __HOLE__:
h5file.remove_node(kwargs['where'], kwargs['name'])
raise
RawPyTablesIO.__init__(self, node, 'a+')
self._checkReadable()
self._checkWritable() | RuntimeError | dataset/ETHPy150Open PyTables/PyTables/tables/nodes/filenode.py/RAFileNode.__init__ |
5,639 | def expand_to_semantic_unit(string, startIndex, endIndex):
symbols = "([{)]}"
breakSymbols = ",;=&|\n"
lookBackBreakSymbols = breakSymbols + "([{"
lookForwardBreakSymbols = breakSymbols + ")]}"
symbolsRe = re.compile(r'(['+re.escape(symbols)+re.escape(breakSymbols)+'])')
counterparts = {
"(":")",
"{":"}",
"[":"]",
")":"(",
"}":"{",
"]":"["
}
symbolStack = []
searchIndex = startIndex - 1;
while True:
if(searchIndex < 0):
newStartIndex = searchIndex + 1
break
char = string[searchIndex:searchIndex+1]
result = symbolsRe.match(char)
if result:
symbol = result.group()
if(symbol in lookBackBreakSymbols and len(symbolStack) == 0):
newStartIndex = searchIndex + 1
break
if symbol in symbols:
if len(symbolStack) > 0 and symbolStack[len(symbolStack) - 1] == counterparts[symbol]:
symbolStack.pop()
else:
symbolStack.append(symbol)
# print(char, symbolStack)
searchIndex -= 1
searchIndex = endIndex;
while True:
char = string[searchIndex:searchIndex+1]
result = symbolsRe.match(char)
if result:
symbol = result.group()
if len(symbolStack) == 0 and symbol in lookForwardBreakSymbols:
newEndIndex = searchIndex;
break
if symbol in symbols:
if len(symbolStack) > 0 and symbolStack[len(symbolStack) - 1] == counterparts[symbol]:
symbolStack.pop()
else:
symbolStack.append(symbol)
if searchIndex >= len(string) - 1:
return None
# print(char, symbolStack, searchIndex)
searchIndex += 1
s = string[newStartIndex:newEndIndex]
trimResult = utils.trim(s)
if trimResult:
newStartIndex = newStartIndex + trimResult["start"];
newEndIndex = newEndIndex - (len(s) - trimResult["end"]);
try:
if newStartIndex == startIndex and newEndIndex == endIndex:
return None
if newStartIndex > startIndex or newEndIndex < endIndex:
return None
return utils.create_return_obj(newStartIndex, newEndIndex, string, "semantic_unit")
except __HOLE__:
return None | NameError | dataset/ETHPy150Open aronwoost/sublime-expand-region/expand_to_semantic_unit.py/expand_to_semantic_unit |
5,640 | def get_target(self, addon_short_name):
try:
return [addon for addon in self.target_addons if addon.name == addon_short_name][0]
except __HOLE__:
return None | IndexError | dataset/ETHPy150Open CenterForOpenScience/osf.io/website/archiver/model.py/ArchiveJob.get_target |
5,641 | def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
max_iter=100, tol=1e-4, verbose=0,
solver='lbfgs', coef=None, copy=False,
class_weight=None, dual=False, penalty='l2',
intercept_scaling=1., multi_class='ovr',
random_state=None, check_input=True,
max_squared_sum=None, sample_weight=None):
"""Compute a Logistic Regression model for a list of regularization
parameters.
This is an implementation that uses the result of the previous model
to speed up computations along the set of solutions, making it faster
than sequentially calling LogisticRegression for the different parameters.
Read more in the :ref:`User Guide <logistic_regression>`.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data.
y : array-like, shape (n_samples,)
Input data, target values.
Cs : int | array-like, shape (n_cs,)
List of values for the regularization parameter or integer specifying
the number of regularization parameters that should be used. In this
case, the parameters will be chosen in a logarithmic scale between
1e-4 and 1e4.
pos_class : int, None
The class with respect to which we perform a one-vs-all fit.
If None, then it is assumed that the given problem is binary.
fit_intercept : bool
Whether to fit an intercept for the model. In this case the shape of
the returned array is (n_cs, n_features + 1).
max_iter : int
Maximum number of iterations for the solver.
tol : float
Stopping criterion. For the newton-cg and lbfgs solvers, the iteration
will stop when ``max{|g_i | i = 1, ..., n} <= tol``
where ``g_i`` is the i-th component of the gradient.
verbose : int
For the liblinear and lbfgs solvers set verbose to any positive
number for verbosity.
solver : {'lbfgs', 'newton-cg', 'liblinear', 'sag'}
Numerical solver to use.
coef : array-like, shape (n_features,), default None
Initialization value for coefficients of logistic regression.
Useless for liblinear solver.
copy : bool, default False
Whether or not to produce a copy of the data. A copy is not required
anymore. This parameter is deprecated and will be removed in 0.19.
class_weight : dict or 'balanced', optional
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
dual : bool
Dual or primal formulation. Dual formulation is only implemented for
l2 penalty with liblinear solver. Prefer dual=False when
n_samples > n_features.
penalty : str, 'l1' or 'l2'
Used to specify the norm used in the penalization. The 'newton-cg',
'sag' and 'lbfgs' solvers support only l2 penalties.
intercept_scaling : float, default 1.
Useful only when the solver 'liblinear' is used
and self.fit_intercept is set to True. In this case, x becomes
[x, self.intercept_scaling],
i.e. a "synthetic" feature with constant value equal to
intercept_scaling is appended to the instance vector.
The intercept becomes ``intercept_scaling * synthetic_feature_weight``.
Note! the synthetic feature weight is subject to l1/l2 regularization
as all other features.
To lessen the effect of regularization on synthetic feature weight
(and therefore on the intercept) intercept_scaling has to be increased.
multi_class : str, {'ovr', 'multinomial'}
Multiclass option can be either 'ovr' or 'multinomial'. If the option
chosen is 'ovr', then a binary problem is fit for each label. Else
the loss minimised is the multinomial loss fit across
the entire probability distribution. Works only for the 'lbfgs' and
'newton-cg' solvers.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data. Used only in solvers 'sag' and 'liblinear'.
check_input : bool, default True
If False, the input arrays X and y will not be checked.
max_squared_sum : float, default None
Maximum squared sum of X over samples. Used only in SAG solver.
If None, it will be computed, going through all the samples.
The value should be precomputed to speed up cross validation.
sample_weight : array-like, shape(n_samples,) optional
Array of weights that are assigned to individual samples.
If not provided, then each sample is given unit weight.
Returns
-------
coefs : ndarray, shape (n_cs, n_features) or (n_cs, n_features + 1)
List of coefficients for the Logistic Regression model. If
fit_intercept is set to True then the second dimension will be
n_features + 1, where the last item represents the intercept.
Cs : ndarray
Grid of Cs used for cross-validation.
n_iter : array, shape (n_cs,)
Actual number of iteration for each Cs.
Notes
-----
You might get slightly different results with the solver liblinear than
with the others since this uses LIBLINEAR which penalizes the intercept.
"""
if copy:
warnings.warn("A copy is not required anymore. The 'copy' parameter "
"is deprecated and will be removed in 0.19.",
DeprecationWarning)
if isinstance(Cs, numbers.Integral):
Cs = np.logspace(-4, 4, Cs)
_check_solver_option(solver, multi_class, penalty, dual)
# Preprocessing.
if check_input or copy:
X = check_array(X, accept_sparse='csr', dtype=np.float64)
y = check_array(y, ensure_2d=False, copy=copy, dtype=None)
check_consistent_length(X, y)
_, n_features = X.shape
classes = np.unique(y)
random_state = check_random_state(random_state)
if pos_class is None and multi_class != 'multinomial':
if (classes.size > 2):
raise ValueError('To fit OvR, use the pos_class argument')
# np.unique(y) gives labels in sorted order.
pos_class = classes[1]
# If sample weights exist, convert them to array (support for lists)
# and check length
# Otherwise set them to 1 for all examples
if sample_weight is not None:
sample_weight = np.array(sample_weight, dtype=np.float64, order='C')
check_consistent_length(y, sample_weight)
else:
sample_weight = np.ones(X.shape[0])
# If class_weights is a dict (provided by the user), the weights
# are assigned to the original labels. If it is "balanced", then
# the class_weights are assigned after masking the labels with a OvR.
le = LabelEncoder()
if isinstance(class_weight, dict) or multi_class == 'multinomial':
if solver == "liblinear":
if classes.size == 2:
# Reconstruct the weights with keys 1 and -1
temp = {1: class_weight[pos_class],
-1: class_weight[classes[0]]}
class_weight = temp.copy()
else:
raise ValueError("In LogisticRegressionCV the liblinear "
"solver cannot handle multiclass with "
"class_weight of type dict. Use the lbfgs, "
"newton-cg or sag solvers or set "
"class_weight='balanced'")
else:
class_weight_ = compute_class_weight(class_weight, classes, y)
sample_weight *= class_weight_[le.fit_transform(y)]
# For doing a ovr, we need to mask the labels first. for the
# multinomial case this is not necessary.
if multi_class == 'ovr':
w0 = np.zeros(n_features + int(fit_intercept))
mask_classes = np.array([-1, 1])
mask = (y == pos_class)
y_bin = np.ones(y.shape, dtype=np.float64)
y_bin[~mask] = -1.
# for compute_class_weight
# 'auto' is deprecated and will be removed in 0.19
if class_weight in ("auto", "balanced"):
class_weight_ = compute_class_weight(class_weight, mask_classes,
y_bin)
sample_weight *= class_weight_[le.fit_transform(y_bin)]
else:
if solver != 'sag':
lbin = LabelBinarizer()
Y_multi = lbin.fit_transform(y)
if Y_multi.shape[1] == 1:
Y_multi = np.hstack([1 - Y_multi, Y_multi])
else:
# SAG multinomial solver needs LabelEncoder, not LabelBinarizer
le = LabelEncoder()
Y_multi = le.fit_transform(y)
w0 = np.zeros((classes.size, n_features + int(fit_intercept)),
order='F')
if coef is not None:
# it must work both giving the bias term and not
if multi_class == 'ovr':
if coef.size not in (n_features, w0.size):
raise ValueError(
'Initialization coef is of shape %d, expected shape '
'%d or %d' % (coef.size, n_features, w0.size))
w0[:coef.size] = coef
else:
# For binary problems coef.shape[0] should be 1, otherwise it
# should be classes.size.
n_classes = classes.size
if n_classes == 2:
n_classes = 1
if (coef.shape[0] != n_classes or
coef.shape[1] not in (n_features, n_features + 1)):
raise ValueError(
'Initialization coef is of shape (%d, %d), expected '
'shape (%d, %d) or (%d, %d)' % (
coef.shape[0], coef.shape[1], classes.size,
n_features, classes.size, n_features + 1))
w0[:, :coef.shape[1]] = coef
if multi_class == 'multinomial':
# fmin_l_bfgs_b and newton-cg accepts only ravelled parameters.
if solver in ['lbfgs', 'newton-cg']:
w0 = w0.ravel()
target = Y_multi
if solver == 'lbfgs':
func = lambda x, *args: _multinomial_loss_grad(x, *args)[0:2]
elif solver == 'newton-cg':
func = lambda x, *args: _multinomial_loss(x, *args)[0]
grad = lambda x, *args: _multinomial_loss_grad(x, *args)[1]
hess = _multinomial_grad_hess
warm_start_sag = {'coef': w0.T}
else:
target = y_bin
if solver == 'lbfgs':
func = _logistic_loss_and_grad
elif solver == 'newton-cg':
func = _logistic_loss
grad = lambda x, *args: _logistic_loss_and_grad(x, *args)[1]
hess = _logistic_grad_hess
warm_start_sag = {'coef': np.expand_dims(w0, axis=1)}
coefs = list()
n_iter = np.zeros(len(Cs), dtype=np.int32)
for i, C in enumerate(Cs):
if solver == 'lbfgs':
try:
w0, loss, info = optimize.fmin_l_bfgs_b(
func, w0, fprime=None,
args=(X, target, 1. / C, sample_weight),
iprint=(verbose > 0) - 1, pgtol=tol, maxiter=max_iter)
except __HOLE__:
# old scipy doesn't have maxiter
w0, loss, info = optimize.fmin_l_bfgs_b(
func, w0, fprime=None,
args=(X, target, 1. / C, sample_weight),
iprint=(verbose > 0) - 1, pgtol=tol)
if info["warnflag"] == 1 and verbose > 0:
warnings.warn("lbfgs failed to converge. Increase the number "
"of iterations.")
try:
n_iter_i = info['nit'] - 1
except:
n_iter_i = info['funcalls'] - 1
elif solver == 'newton-cg':
args = (X, target, 1. / C, sample_weight)
w0, n_iter_i = newton_cg(hess, func, grad, w0, args=args,
maxiter=max_iter, tol=tol)
elif solver == 'liblinear':
coef_, intercept_, n_iter_i, = _fit_liblinear(
X, target, C, fit_intercept, intercept_scaling, class_weight,
penalty, dual, verbose, max_iter, tol, random_state,
sample_weight=sample_weight)
if fit_intercept:
w0 = np.concatenate([coef_.ravel(), intercept_])
else:
w0 = coef_.ravel()
elif solver == 'sag':
if multi_class == 'multinomial':
target = target.astype(np.float64)
loss = 'multinomial'
else:
loss = 'log'
w0, n_iter_i, warm_start_sag = sag_solver(
X, target, sample_weight, loss, 1. / C, max_iter, tol,
verbose, random_state, False, max_squared_sum, warm_start_sag)
else:
raise ValueError("solver must be one of {'liblinear', 'lbfgs', "
"'newton-cg', 'sag'}, got '%s' instead" % solver)
if multi_class == 'multinomial':
multi_w0 = np.reshape(w0, (classes.size, -1))
if classes.size == 2:
multi_w0 = multi_w0[1][np.newaxis, :]
coefs.append(multi_w0)
else:
coefs.append(w0.copy())
n_iter[i] = n_iter_i
return coefs, np.array(Cs), n_iter
# helper function for LogisticCV | TypeError | dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/linear_model/logistic.py/logistic_regression_path |
5,642 | def _import_speedups():
try:
from simplejson import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except __HOLE__:
return None, None | ImportError | dataset/ETHPy150Open kennethreitz/tablib/tablib/packages/omnijson/packages/simplejson/encoder.py/_import_speedups |
5,643 | def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except __HOLE__:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' | KeyError | dataset/ETHPy150Open kennethreitz/tablib/tablib/packages/omnijson/packages/simplejson/encoder.py/py_encode_basestring_ascii |
5,644 | def run(self, edit):
# You can change PLUGIN_VERSION according to your needs.
region = self.view.find("^#define\s+(?:PLUGIN_)?VERSION\s+\"\d{1,2}\.\d{1,2}\.\d{1,3}\"", 0)
if region != None:
strLine = self.view.substr(region)
rIndex1 = strLine.rfind(".")
rIndex2 = strLine.rfind("\"")
sBuild = strLine[rIndex1+1:rIndex2]
try:
iBuild = int(sBuild)
iBuild += 1
self.view.replace(edit, region, strLine[:rIndex1+1] + str(iBuild) + "\"")
except __HOLE__:
pass | ValueError | dataset/ETHPy150Open austinwagner/sublime-sourcepawn/sm_version_auto_increment.py/VerIncCommand.run |
5,645 | def publishToNewObserver(observer, eventDict, textFromEventDict):
"""
Publish an old-style (L{twisted.python.log}) event to a new-style
(L{twisted.logger}) observer.
@note: It's possible that a new-style event was sent to a
L{LegacyLogObserverWrapper}, and may now be getting sent back to a
new-style observer. In this case, it's already a new-style event,
adapted to also look like an old-style event, and we don't need to
tweak it again to be a new-style event, hence the checks for
already-defined new-style keys.
@param observer: A new-style observer to handle this event.
@type observer: L{ILogObserver}
@param eventDict: An L{old-style <twisted.python.log>}, log event.
@type eventDict: L{dict}
@param textFromEventDict: callable that can format an old-style event as a
string. Passed here rather than imported to avoid circular dependency.
@type textFromEventDict: 1-arg L{callable} taking L{dict} returning L{str}
@return: L{None}
"""
if "log_time" not in eventDict:
eventDict["log_time"] = eventDict["time"]
if "log_format" not in eventDict:
text = textFromEventDict(eventDict)
if text is not None:
eventDict["log_text"] = text
eventDict["log_format"] = u"{log_text}"
if "log_level" not in eventDict:
if "logLevel" in eventDict:
try:
level = fromStdlibLogLevelMapping[eventDict["logLevel"]]
except __HOLE__:
level = None
elif "isError" in eventDict:
if eventDict["isError"]:
level = LogLevel.critical
else:
level = LogLevel.info
else:
level = LogLevel.info
if level is not None:
eventDict["log_level"] = level
if "log_namespace" not in eventDict:
eventDict["log_namespace"] = u"log_legacy"
if "log_system" not in eventDict and "system" in eventDict:
eventDict["log_system"] = eventDict["system"]
observer(eventDict) | KeyError | dataset/ETHPy150Open twisted/twisted/twisted/logger/_legacy.py/publishToNewObserver |
5,646 | def kill_members(members, sig, hosts=nodes):
for member in sorted(members):
try:
if ha_tools_debug:
print('killing %s' % member)
proc = hosts[member]['proc']
# Not sure if cygwin makes sense here...
if sys.platform in ('win32', 'cygwin'):
os.kill(proc.pid, signal.CTRL_C_EVENT)
else:
os.kill(proc.pid, sig)
except __HOLE__:
if ha_tools_debug:
print('%s already dead?' % member) | OSError | dataset/ETHPy150Open mongodb/motor/test/high_availability/ha_tools.py/kill_members |
5,647 | def wait_for(proc, port_num):
trys = 0
while proc.poll() is None and trys < 160:
trys += 1
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
try:
s.connect((hostname, port_num))
return True
except (__HOLE__, socket.error):
time.sleep(0.25)
finally:
s.close()
kill_all_members()
return False | IOError | dataset/ETHPy150Open mongodb/motor/test/high_availability/ha_tools.py/wait_for |
5,648 | def start_replica_set(members, auth=False, fresh=True):
global cur_port, key_file
if fresh:
if os.path.exists(dbpath):
try:
shutil.rmtree(dbpath)
except OSError:
pass
try:
os.makedirs(dbpath)
except __HOLE__ as e:
print(e)
print('\tWhile creating %s' % dbpath)
if auth:
key_file = os.path.join(dbpath, 'key.txt')
if not os.path.exists(key_file):
f = open(key_file, 'wb')
try:
f.write(b'my super secret system password')
finally:
f.close()
os.chmod(key_file, S_IRUSR)
for i in range(len(members)):
host = '%s:%d' % (hostname, cur_port)
members[i].update({'_id': i, 'host': host})
path = os.path.join(dbpath, 'db' + str(i))
if not os.path.exists(path):
os.makedirs(path)
member_logpath = os.path.join(logpath, 'db' + str(i) + '.log')
if not os.path.exists(os.path.dirname(member_logpath)):
os.makedirs(os.path.dirname(member_logpath))
cmd = [mongod,
'--dbpath', path,
'--port', str(cur_port),
'--replSet', set_name,
'--nojournal', '--oplogSize', '64',
'--logappend', '--logpath', member_logpath]
if auth:
cmd += ['--keyFile', key_file]
if ha_tools_debug:
print('starting %s' % ' '.join(cmd))
proc = start_subprocess(cmd)
nodes[host] = {'proc': proc, 'cmd': cmd}
assert wait_for(proc, cur_port)
cur_port += 1
config = {'_id': set_name, 'members': members}
primary = members[0]['host']
c = pymongo.MongoClient(primary, use_greenlets=use_greenlets)
try:
if ha_tools_debug:
pprint.pprint({'replSetInitiate': config})
c.admin.command('replSetInitiate', config)
except pymongo.errors.OperationFailure as e:
# Already initialized from a previous run?
if ha_tools_debug:
print(e)
expected_arbiters = 0
for member in members:
if member.get('arbiterOnly'):
expected_arbiters += 1
expected_secondaries = len(members) - expected_arbiters - 1
# Wait for 4 minutes for replica set to come up.
patience = 4
for i in range(int(patience * 60 / 1)):
time.sleep(1)
try:
if (get_primary() and
len(get_secondaries()) == expected_secondaries and
len(get_arbiters()) == expected_arbiters):
break
except pymongo.errors.ConnectionFailure:
# Keep waiting
pass
if ha_tools_debug:
print('waiting for RS %s' % i)
else:
kill_all_members()
raise Exception(
"Replica set still not initalized after %s minutes" % patience)
return primary, set_name | OSError | dataset/ETHPy150Open mongodb/motor/test/high_availability/ha_tools.py/start_replica_set |
5,649 | def get_hidden_members():
# Both 'hidden' and 'slaveDelay'
secondaries = get_secondaries()
readers = get_hosts() + get_passives()
for member in readers:
try:
secondaries.remove(member)
except __HOLE__:
# Skip primary
pass
return secondaries | KeyError | dataset/ETHPy150Open mongodb/motor/test/high_availability/ha_tools.py/get_hidden_members |
5,650 | def sync_table(model):
"""
Inspects the model and creates / updates the corresponding table and columns.
Any User Defined Types used in the table are implicitly synchronized.
This function can only add fields that are not part of the primary key.
Note that the attributes removed from the model are not deleted on the database.
They become effectively ignored by (will not show up on) the model.
**This function should be used with caution, especially in production environments.
Take care to execute schema modifications in a single context (i.e. not concurrently with other clients).**
*There are plans to guard schema-modifying functions with an environment-driven conditional.*
"""
if not _allow_schema_modification():
return
if not issubclass(model, Model):
raise CQLEngineException("Models must be derived from base Model.")
if model.__abstract__:
raise CQLEngineException("cannot create table from abstract model")
cf_name = model.column_family_name()
raw_cf_name = model._raw_column_family_name()
ks_name = model._get_keyspace()
cluster = get_cluster()
try:
keyspace = cluster.metadata.keyspaces[ks_name]
except __HOLE__:
raise CQLEngineException("Keyspace '{0}' for model {1} does not exist.".format(ks_name, model))
tables = keyspace.tables
syncd_types = set()
for col in model._columns.values():
udts = []
columns.resolve_udts(col, udts)
for udt in [u for u in udts if u not in syncd_types]:
_sync_type(ks_name, udt, syncd_types)
if raw_cf_name not in tables:
log.debug("sync_table creating new table %s", cf_name)
qs = _get_create_table(model)
try:
execute(qs)
except CQLEngineException as ex:
# 1.2 doesn't return cf names, so we have to examine the exception
# and ignore if it says the column family already exists
if "Cannot add already existing column family" not in unicode(ex):
raise
else:
log.debug("sync_table checking existing table %s", cf_name)
table_meta = tables[raw_cf_name]
_validate_pk(model, table_meta)
table_columns = table_meta.columns
model_fields = set()
for model_name, col in model._columns.items():
db_name = col.db_field_name
model_fields.add(db_name)
if db_name in table_columns:
col_meta = table_columns[db_name]
if col_meta.cql_type != col.db_type:
msg = 'Existing table {0} has column "{1}" with a type ({2}) differing from the model type ({3}).' \
' Model should be updated.'.format(cf_name, db_name, col_meta.cql_type, col.db_type)
warnings.warn(msg)
log.warning(msg)
continue
if col.primary_key or col.primary_key:
raise CQLEngineException("Cannot add primary key '{0}' (with db_field '{1}') to existing table {2}".format(model_name, db_name, cf_name))
query = "ALTER TABLE {0} add {1}".format(cf_name, col.get_column_def())
execute(query)
db_fields_not_in_model = model_fields.symmetric_difference(table_columns)
if db_fields_not_in_model:
log.info("Table {0} has fields not referenced by model: {1}".format(cf_name, db_fields_not_in_model))
_update_options(model)
table = cluster.metadata.keyspaces[ks_name].tables[raw_cf_name]
indexes = [c for n, c in model._columns.items() if c.index]
# TODO: support multiple indexes in C* 3.0+
for column in indexes:
index_name = _get_index_name_by_column(table, column.db_field_name)
if index_name:
continue
qs = ['CREATE INDEX']
qs += ['ON {0}'.format(cf_name)]
qs += ['("{0}")'.format(column.db_field_name)]
qs = ' '.join(qs)
execute(qs) | KeyError | dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/management.py/sync_table |
5,651 | def _update_options(model):
"""Updates the table options for the given model if necessary.
:param model: The model to update.
:return: `True`, if the options were modified in Cassandra,
`False` otherwise.
:rtype: bool
"""
log.debug("Checking %s for option differences", model)
model_options = model.__options__ or {}
table_meta = _get_table_metadata(model)
# go to CQL string first to normalize meta from different versions
existing_option_strings = set(table_meta._make_option_strings(table_meta.options))
existing_options = _options_map_from_strings(existing_option_strings)
model_option_strings = metadata.TableMetadataV3._make_option_strings(model_options)
model_options = _options_map_from_strings(model_option_strings)
update_options = {}
for name, value in model_options.items():
try:
existing_value = existing_options[name]
except KeyError:
raise KeyError("Invalid table option: '%s'; known options: %s" % (name, existing_options.keys()))
if isinstance(existing_value, six.string_types):
if value != existing_value:
update_options[name] = value
else:
try:
for k, v in value.items():
if existing_value[k] != v:
update_options[name] = value
break
except __HOLE__:
update_options[name] = value
if update_options:
options = ' AND '.join(metadata.TableMetadataV3._make_option_strings(update_options))
query = "ALTER TABLE {0} WITH {1}".format(model.column_family_name(), options)
execute(query)
return True
return False | KeyError | dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/management.py/_update_options |
5,652 | def drop_table(model):
"""
Drops the table indicated by the model, if it exists.
**This function should be used with caution, especially in production environments.
Take care to execute schema modifications in a single context (i.e. not concurrently with other clients).**
*There are plans to guard schema-modifying functions with an environment-driven conditional.*
"""
if not _allow_schema_modification():
return
# don't try to delete non existant tables
meta = get_cluster().metadata
ks_name = model._get_keyspace()
raw_cf_name = model._raw_column_family_name()
try:
meta.keyspaces[ks_name].tables[raw_cf_name]
execute('DROP TABLE {0};'.format(model.column_family_name()))
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open datastax/python-driver/cassandra/cqlengine/management.py/drop_table |
5,653 | @property
def _core_plugin(self):
try:
return self._plugin
except __HOLE__:
self._plugin = manager.NeutronManager.get_plugin()
return self._plugin | AttributeError | dataset/ETHPy150Open openstack/networking-cisco/networking_cisco/plugins/cisco/service_plugins/cisco_router_plugin.py/CiscoRouterPlugin._core_plugin |
5,654 | def test_package_import__semantics(self):
# Generate a couple of broken modules to try importing.
# ...try loading the module when there's a SyntaxError
self.rewrite_file('for')
try: __import__(self.module_name)
except SyntaxError: pass
else: raise RuntimeError('Failed to induce SyntaxError') # self.fail()?
self.assertNotIn(self.module_name, sys.modules)
self.assertFalse(hasattr(sys.modules[self.package_name], 'foo'))
# ...make up a variable name that isn't bound in __builtins__
var = 'a'
while var in dir(__builtins__):
var += random.choose(string.ascii_letters)
# ...make a module that just contains that
self.rewrite_file(var)
try: __import__(self.module_name)
except __HOLE__: pass
else: raise RuntimeError('Failed to induce NameError.')
# ...now change the module so that the NameError doesn't
# happen
self.rewrite_file('%s = 1' % var)
module = __import__(self.module_name).foo
self.assertEqual(getattr(module, var), 1) | NameError | dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_pkgimport.py/TestImport.test_package_import__semantics |
5,655 | @classmethod
def from_path(cls, path, extension_required=False):
"""Locates the project for a path."""
path = os.path.abspath(path)
if os.path.isfile(path) and (not extension_required or
path.endswith('.lektorproject')):
return cls.from_file(path)
try:
files = [x for x in os.listdir(path)
if x.lower().endswith('.lektorproject')]
except __HOLE__:
return None
if len(files) == 1:
return cls.from_file(os.path.join(path, files[0]))
if os.path.isdir(path) and \
os.path.isfile(os.path.join(path, 'content/contents.lr')):
return cls(
name=os.path.basename(path),
project_file=None,
tree=path,
) | OSError | dataset/ETHPy150Open lektor/lektor-archive/lektor/project.py/Project.from_path |
5,656 | def number(self, num):
"""Parse a string containing a label or number into an address.
"""
try:
if num.startswith('$'):
# hexadecimal
return self._constrain(int(num[1:], 16))
elif num.startswith('+'):
# decimal
return self._constrain(int(num[1:], 10))
elif num.startswith('%'):
# binary
return self._constrain(int(num[1:], 2))
elif num in self.labels:
# label name
return self.labels[num]
else:
matches = re.match('^([^\s+-]+)\s*([+\-])\s*([$+%]?\d+)$', num)
if matches:
label, sign, offset = matches.groups()
if label not in self.labels:
raise KeyError("Label not found: %s" % label)
base = self.labels[label]
offset = self.number(offset)
if sign == '+':
address = base + offset
else:
address = base - offset
return self._constrain(address)
else:
return self._constrain(int(num, self.radix))
except __HOLE__:
raise KeyError("Label not found: %s" % num) | ValueError | dataset/ETHPy150Open mnaberez/py65/py65/utils/addressing.py/AddressParser.number |
5,657 | def load_template_source(self, template_name, template_dirs=None):
filepath = self.get_template_path(template_name, template_dirs)
try:
with io.open(filepath, encoding=self.engine.file_charset) as fp:
return fp.read(), filepath
except __HOLE__:
raise TemplateDoesNotExist(template_name) | IOError | dataset/ETHPy150Open allegro/ralph/src/ralph/lib/template/loaders.py/AppTemplateLoader.load_template_source |
5,658 | def handle_short_request():
"""
Handle short requests such as passing keystrokes to the application
or sending the initial html page. If returns True, then this
function recognised and handled a short request, and the calling
script should immediately exit.
web_display.set_preferences(..) should be called before calling this
function for the preferences to take effect
"""
global _prefs
if not is_web_request():
return False
if os.environ['REQUEST_METHOD'] == "GET":
# Initial request, send the HTML and javascript.
sys.stdout.write("Content-type: text/html\r\n\r\n" +
html_escape(_prefs.app_name).join(_html_page))
return True
if os.environ['REQUEST_METHOD'] != "POST":
# Don't know what to do with head requests etc.
return False
if not os.environ.has_key('HTTP_X_URWID_ID'):
# If no urwid id, then the application should be started.
return False
urwid_id = os.environ['HTTP_X_URWID_ID']
if len(urwid_id)>20:
#invalid. handle by ignoring
#assert 0, "urwid id too long!"
sys.stdout.write("Status: 414 URI Too Long\r\n\r\n")
return True
for c in urwid_id:
if c not in "0123456789":
# invald. handle by ignoring
#assert 0, "invalid chars in id!"
sys.stdout.write("Status: 403 Forbidden\r\n\r\n")
return True
if os.environ.get('HTTP_X_URWID_METHOD',None) == "polling":
# this is a screen update request
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
s.connect( os.path.join(_prefs.pipe_dir,
"urwid"+urwid_id+".update") )
data = "Content-type: text/plain\r\n\r\n"+s.recv(BUF_SZ)
while data:
sys.stdout.write(data)
data = s.recv(BUF_SZ)
return True
except socket.error:
sys.stdout.write("Status: 404 Not Found\r\n\r\n")
return True
# this is a keyboard input request
try:
fd = os.open((os.path.join(_prefs.pipe_dir,
"urwid"+urwid_id+".in")), os.O_WRONLY)
except __HOLE__:
sys.stdout.write("Status: 404 Not Found\r\n\r\n")
return True
# FIXME: use the correct encoding based on the request
keydata = sys.stdin.read(MAX_READ)
os.write(fd,keydata.encode('ascii'))
os.close(fd)
sys.stdout.write("Content-type: text/plain\r\n\r\n")
return True | OSError | dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/web_display.py/handle_short_request |
5,659 | def daemonize( errfile ):
"""
Detach process and become a daemon.
"""
pid = os.fork()
if pid:
os._exit(0)
os.setsid()
signal.signal(signal.SIGHUP, signal.SIG_IGN)
os.umask(0)
pid = os.fork()
if pid:
os._exit(0)
os.chdir("/")
for fd in range(0,20):
try:
os.close(fd)
except __HOLE__:
pass
sys.stdin = open("/dev/null","r")
sys.stdout = open("/dev/null","w")
sys.stderr = ErrorLog( errfile ) | OSError | dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/web_display.py/daemonize |
5,660 | def lookup(self, line, column):
try:
# Let's hope for a direct match first
return self.index[(line, column)]
except __HOLE__:
pass
# Figure out which line to search through
line_index = self.line_index[line]
# Find the closest column token
line_index
i = bisect_right(line_index, column)
if not i:
# You're gonna have a bad time
i = len(line_index)-2
# raise IndexError
# We actually want the one less than current
column = line_index[i - 1]
# Return from the main index, based on the (line, column) tuple
return self.index[(line, column)] | KeyError | dataset/ETHPy150Open lavrton/sublime-better-typescript/sourcemap/objects.py/SourceMapIndex.lookup |
5,661 | def validate(self, request):
""" Checks a request for proper authentication details.
Returns a tuple of ``(access_token, error_response_arguments)``, which are
designed to be passed to the :py:meth:`make_error_response` method.
For example, to restrict access to a given endpoint:
.. code-block:: python
def foo_bar_resource(request, *args, **kwargs):
authenticator = AccessTokenAuthenticator(
required_scope_names=('foo', 'bar'))
access_token, error_args = authenticator.validate(request)
if not access_token:
return authenticator.make_error_response(*error_args)
# ... can now return use access_token
:rtype: When the request validates successfully, returns a
a tuple of (:py:class:`djoauth2.models.AccessToken`, ``None``). If the
request fails to validate, returns a tuple of (``None``,
``error_details_tuple``). The ``error_details_tuple`` is a tuple of
arguments to use to call the :py:func:`make_error_response` method.
"""
# Ensure that all of the scopes that are being checked against exist.
# Otherwise, raise a ValueError.
for name in self.required_scope_names:
if not Scope.objects.filter(name=name).exists():
raise ValueError('Scope with name "{}" does not exist.'.format(name))
# From http://tools.ietf.org/html/rfc6750#section-3.1 :
#
# If the request lacks any authentication information (e.g., the
# client was unaware that authentication is necessary or attempted
# using an unsupported authentication method), the resource server
# SHOULD NOT include an error code or other error information.
#
# In the case that the request fails to validate, this flag will
# be returned and should be passed to the 'make_error_response' method
# in order to comply with the specification and restrict error information.
expose_errors = False
try:
# From http://tools.ietf.org/html/rfc6750#section-1 :
#
# This specification defines the use of bearer tokens over HTTP/1.1
# [RFC2616] using Transport Layer Security (TLS) [RFC5246] to access
# protected resources. TLS is mandatory to implement and use with
# this specification; other specifications may extend this
# specification for use with other protocols. While designed for use
# with access tokens
#
# and later, from http://tools.ietf.org/html/rfc6750#section-5.3 :
#
# Always use TLS (https): Clients MUST always use TLS [RFC5246]
# (https) or equivalent transport security when making requests with
# bearer tokens. Failing to do so exposes the token to numerous
# attacks that could give attackers unintended access.
#
if settings.DJOAUTH2_SSL_ONLY and not request.is_secure():
raise InvalidRequest('insecure request: must use TLS')
http_authorization = request.META.get('HTTP_AUTHORIZATION', '')
if not http_authorization:
raise InvalidRequest('missing HTTP_AUTHORIZATION header')
try:
auth_method, auth_value = http_authorization.strip().split(' ', 1)
except __HOLE__:
raise InvalidRequest('malformed HTTP_AUTHORIZATION header')
if auth_method != 'Bearer':
raise InvalidRequest('authentication method is not "Bearer"')
# Used in the case that the request does not validate. See comment above.
# At this point in the validation, it is certain that the Client
# attempted to authenticate via the 'Bearer' method.
expose_errors = True
try:
access_token = AccessToken.objects.get(value=auth_value)
except AccessToken.DoesNotExist:
raise InvalidToken('access token does not exist')
if access_token.is_expired():
raise InvalidToken('access token is expired')
if not access_token.has_scope(*self.required_scope_names):
raise InsufficientScope('access token has insufficient scope')
return (access_token, None)
except AuthenticationError as validation_error:
return (None, (validation_error, expose_errors)) | ValueError | dataset/ETHPy150Open Locu/djoauth2/djoauth2/access_token.py/AccessTokenAuthenticator.validate |
5,662 | def get(self, asset_name):
"""Serve out the contents of a file to self.response.
Args:
asset_name: The name of the static asset to serve. Must be in ASSETS_PATH.
"""
with self._asset_name_to_path_lock:
if self._asset_name_to_path is None:
self._initialize_asset_map()
if asset_name in self._asset_name_to_path:
asset_path = self._asset_name_to_path[asset_name]
try:
with open(asset_path, 'rb') as f:
data = f.read()
except (OSError, __HOLE__):
logging.exception('Error reading file %s', asset_path)
self.response.set_status(500)
else:
content_type, _ = mimetypes.guess_type(asset_path)
assert content_type, (
'cannot determine content-type for %r' % asset_path
)
self.response.headers['Content-Type'] = content_type
self.response.out.write(data)
else:
self.response.set_status(404) | IOError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/devappserver2/admin/static_file_handler.py/StaticFileHandler.get |
5,663 | def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] <> '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
try:
sz = int(environ['CONTENT_LENGTH'])
except:
sz = 0
if environ['REQUEST_METHOD'] == 'PUT' and sz > 0:
for putglob in self.puttable:
if fnmatch(path_info, putglob):
data = environ['wsgi.input'].read(sz)
try:
with open(full_path, "wb") as f: f.write(data)
return self.success_no_content(environ, start_response)
except:
print sys.exc_info()[1]
return self.server_error(environ, start_response)
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return ['']
except (IOError, __HOLE__), e:
print e
return self.not_found(environ, start_response) | OSError | dataset/ETHPy150Open cloudera/hue/tools/ace-editor/static.py/Cling.__call__ |
5,664 | def iter_and_close(file_like, block_size):
"""Yield file contents by block then close the file."""
while 1:
try:
block = file_like.read(block_size)
if block: yield block
else: raise StopIteration
except __HOLE__, si:
file_like.close()
return | StopIteration | dataset/ETHPy150Open cloudera/hue/tools/ace-editor/static.py/iter_and_close |
5,665 | def command():
usage = "%prog [--help] [-d DIR] [-l [HOST][:PORT]] [-p GLOB[,GLOB...]]"
parser = OptionParser(usage=usage, version="static 0.3.6")
parser.add_option("-d", "--dir", dest="rootdir", default=".",
help="Root directory to serve. Defaults to '.' .", metavar="DIR")
parser.add_option("-l", "--listen", dest="listen", default="127.0.0.1:8888",
help="Listen on this interface (given by its hostname or IP) and port."+
" HOST defaults to 127.0.0.1. PORT defaults to 8888. "+
"Leave HOST empty to listen on all interfaces (INSECURE!).",
metavar="[HOST][:PORT]")
parser.add_option("-p", "--puttable", dest="puttable", default="",
help="Comma or space-separated list of request paths for which to"+
" permit PUT requests. Each path is a glob pattern that may "+
"contain wildcard characters '*' and/or '?'. "+
"'*' matches any sequence of characters, including the empty"+
" string. '?' matches exactly 1 arbitrary character. "+
"NOTE: Both '*' and '?' match slashes and dots. "+
"I.e. --puttable=* makes every file under DIR writable!",
metavar="GLOB[,GLOB...]")
parser.add_option("--validate", dest="validate", action="store_true",
default=False,
help="Enable HTTP validation. You don't need this unless "+
"you're developing static.py itself.")
options, args = parser.parse_args()
if len(args) > 0:
parser.print_help(sys.stderr)
sys.exit(1)
parts = options.listen.split(":")
if len(parts) == 1:
try: # if the the listen argument consists only of a port number
port = int(parts[0])
host = None
except: # could not parse as port number => must be a host IP or name
host = parts[0]
port = None
elif len(parts) == 2:
host, port = parts
else:
sys.exit("Invalid host:port specification.")
if not host:
host = '0.0.0.0'
if not port:
port = 8888
try:
port = int(port)
if port <= 0 or port > 65535: raise ValueError
except:
sys.exit("Invalid host:port specification.")
puttable = set(path.abspath(p) for p in
options.puttable.replace(","," ").split())
if puttable and host not in ('127.0.0.1', 'localhost'):
print("Permitting PUT access for non-localhost connections may be unwise.")
options.rootdir = path.abspath(options.rootdir)
for p in puttable:
if not p.startswith(options.rootdir):
sys.exit("puttable path '%s' not under root '%s'" % (p, options.rootdir))
# cut off root prefix from puttable paths
puttable = set(p[len(options.rootdir):] for p in puttable)
app = Cling(options.rootdir, puttable=puttable)
if options.validate:
app = validator(app)
try:
print "Serving %s to http://%s:%d" % (options.rootdir, host, port)
if puttable:
print("The following paths (relative to server root) may be "+
"OVERWRITTEN via HTTP PUT.")
for p in puttable:
print p
make_server(host, port, app).serve_forever()
except __HOLE__, ki:
print "Ciao, baby!"
except:
sys.exit("Problem initializing server: %s" % sys.exc_info()[1]) | KeyboardInterrupt | dataset/ETHPy150Open cloudera/hue/tools/ace-editor/static.py/command |
5,666 | def safe_fork(self):
try:
return os.fork()
except __HOLE__, e:
if e.errno == errno.EWOULDBLOCK:
time.sleep(5)
self.safe_fork() | OSError | dataset/ETHPy150Open cyberdelia/peafowl/peafowl/runner.py/ProcessHelper.safe_fork |
5,667 | def is_running(self):
if not self.pid_file:
return False
try:
pid_file = open(self.pid_file, 'r')
pid = int(pid_file.read())
pid_file.close()
if pid == 0:
return False
except __HOLE__, e:
return False
try:
os.kill(pid, 0)
return pid
except OSError, e:
if e.errno == errno.ESRCH:
return None
elif e.errno == errno.EPERM:
return pid | IOError | dataset/ETHPy150Open cyberdelia/peafowl/peafowl/runner.py/ProcessHelper.is_running |
5,668 | def __getattr__(self, attr):
try:
return self._items[attr]
except __HOLE__:
raise AttributeError, attr | KeyError | dataset/ETHPy150Open tjguk/winsys/winsys/experimental/change_journal.py/Data.__getattr__ |
5,669 | def define_field(conn, table, field, pks):
"Determine field type, default value, references, etc."
f = {}
ref = references(conn, table, field['column_name'])
if ref:
f.update(ref)
elif field['column_default'] and \
field['column_default'].startswith("nextval") and \
field['column_name'] in pks:
# postgresql sequence (SERIAL) and primary key!
f['type'] = "'id'"
elif field['data_type'].startswith('character'):
f['type'] = "'string'"
if field['character_maximum_length']:
f['length'] = field['character_maximum_length']
elif field['data_type'] in ('text', ):
f['type'] = "'text'"
elif field['data_type'] in ('boolean', 'bit'):
f['type'] = "'boolean'"
elif field['data_type'] in ('integer', 'smallint', 'bigint'):
f['type'] = "'integer'"
elif field['data_type'] in ('double precision', 'real'):
f['type'] = "'double'"
elif field['data_type'] in ('timestamp', 'timestamp without time zone'):
f['type'] = "'datetime'"
elif field['data_type'] in ('date', ):
f['type'] = "'date'"
elif field['data_type'] in ('time', 'time without time zone'):
f['type'] = "'time'"
elif field['data_type'] in ('numeric', 'currency'):
f['type'] = "'decimal'"
f['precision'] = field['numeric_precision']
f['scale'] = field['numeric_scale'] or 0
elif field['data_type'] in ('bytea', ):
f['type'] = "'blob'"
elif field['data_type'] in ('point', 'lseg', 'polygon', 'unknown', 'USER-DEFINED'):
f['type'] = "" # unsupported?
else:
raise RuntimeError("Data Type not supported: %s " % str(field))
try:
if field['column_default']:
if field['column_default'] == "now()":
d = "request.now"
elif field['column_default'] == "true":
d = "True"
elif field['column_default'] == "false":
d = "False"
else:
d = repr(eval(field['column_default']))
f['default'] = str(d)
except (__HOLE__, SyntaxError):
pass
except Exception, e:
raise RuntimeError(
"Default unsupported '%s'" % field['column_default'])
if not field['is_nullable']:
f['notnull'] = "True"
comment = get_comment(conn, table, field)
if comment is not None:
f['comment'] = repr(comment)
return f | ValueError | dataset/ETHPy150Open uwdata/termite-data-server/web2py/scripts/extract_pgsql_models.py/define_field |
5,670 | def prompt_for_ip():
"""
Prompt the user to enter an IP address or "quit" to terminate the sample.
Any IP address the user enters is validated using the ipaddress module. If
the user entered an IP address the IP address string is returned, otherwise
None is returned.
"""
ip = None
while ip is None:
ip = input(
"Enter an IP address, or quit to terminate the sample: ").strip()
if ip == "quit":
return None
# Use Python's ipaddress module to validate the IP address.
try:
parsed_ipaddress = ipaddress.ip_address(ip)
ip = str(parsed_ipaddress)
except __HOLE__ as e:
print(str(e))
ip = None
return ip | ValueError | dataset/ETHPy150Open ibm-security-intelligence/api-samples/siem/09_GetOffensesForIp.py/prompt_for_ip |
5,671 | @require_POST
def add_tag(request):
response = {'status': -1, 'message': ''}
try:
validstatus = valid_project(name=request.POST['name'])
if validstatus:
tag = DocumentTag.objects.create_tag(request.user, request.POST['name'])
response['name'] = request.POST['name']
response['id'] = tag.id
response['docs'] = []
response['owner'] = request.user.username
response['status'] = 0
else:
response['status'] = -1
except __HOLE__, e:
response['message'] = _('Form is missing %s field') % e.message
except Exception, e:
response['message'] = force_unicode(e)
return JsonResponse(response) | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/api.py/add_tag |
5,672 | @require_POST
def tag(request):
response = {'status': -1, 'message': ''}
request_json = json.loads(request.POST['data'])
try:
tag = DocumentTag.objects.tag(request.user, request_json['doc_id'], request_json.get('tag'), request_json.get('tag_id'))
response['tag_id'] = tag.id
response['status'] = 0
except __HOLE__, e:
response['message'] = _('Form is missing %s field') % e.message
except Exception, e:
response['message'] = force_unicode(e)
return JsonResponse(response) | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/api.py/tag |
5,673 | @require_POST
def update_tags(request):
response = {'status': -1, 'message': ''}
request_json = json.loads(request.POST['data'])
try:
doc = DocumentTag.objects.update_tags(request.user, request_json['doc_id'], request_json['tag_ids'])
response['doc'] = massage_doc_for_json(doc, request.user)
response['status'] = 0
except __HOLE__, e:
response['message'] = _('Form is missing %s field') % e.message
except Exception, e:
response['message'] = force_unicode(e)
return JsonResponse(response) | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/api.py/update_tags |
5,674 | @require_POST
def remove_tag(request):
response = {'status': -1, 'message': _('Error')}
try:
DocumentTag.objects.delete_tag(request.POST['tag_id'], request.user)
response['message'] = _('Project removed!')
response['status'] = 0
except __HOLE__, e:
response['message'] = _('Form is missing %s field') % e.message
except Exception, e:
response['message'] = force_unicode(e)
return JsonResponse(response) | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/api.py/remove_tag |
5,675 | @require_POST
def update_permissions(request):
response = {'status': -1, 'message': _('Error')}
data = json.loads(request.POST['data'])
doc_id = request.POST['doc_id']
try:
doc = Document.objects.get_doc_for_writing(doc_id, request.user)
doc.sync_permissions(data)
response['message'] = _('Permissions updated!')
response['status'] = 0
response['doc'] = massage_doc_for_json(doc, request.user)
except __HOLE__, e:
response['message'] = _('Form is missing %s field') % e.message
except Exception, e:
LOG.exception(e.message)
response['message'] = force_unicode(e)
return JsonResponse(response) | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/src/desktop/api.py/update_permissions |
5,676 | @react.nosync
@react.input
def plot(plot):
""" The Bokeh plot object to display. In JS, this signal
provides the corresponding backbone model.
"""
try:
from bokeh.models import Plot
except __HOLE__:
from bokeh.models import PlotObject as Plot
if not isinstance(plot, Plot):
raise ValueError('Plot must be a Bokeh plot object.')
plot.responsive = False # Flexx handles responsiveness
return plot | ImportError | dataset/ETHPy150Open zoofIO/flexx/flexx/ui/widgets/_bokeh.py/BokehWidget.plot |
5,677 | def _build_line_mappings(self, start, finish):
forward = {}
backward = {}
# Get information about blank lines: The git diff porcelain format
# (which we use for everything else) doesn't distinguish between
# additions and removals, so this is a very dirty hack to get around
# the problem.
p = os.popen('git diff %s %s -- %s | grep -E "^[+-]$"' % (
start,
finish,
self.path,
))
blank_lines = [l.strip() for l in p.readlines()]
p = os.popen('git diff --word-diff=porcelain %s %s -- %s' % (
start,
finish,
self.path,
))
# The diff output is in sections: A header line (indicating the
# range of lines this section covers) and then a number of
# content lines.
sections = []
# Skip initial headers: They don't interest us.
line = ''
while not line.startswith('@@'):
line = p.readline()
while line:
header_line = line
content_lines = []
line = p.readline()
while line and not line.startswith('@@'):
content_lines.append(line)
line = p.readline()
sections.append((header_line, content_lines, ))
start_ln = finish_ln = 0
for header_line, content_lines in sections:
# The headers line has the format '@@ +a,b -c,d @@[ e]' where
# a is the first line number shown from start and b is the
# number of lines shown from start, and c is the first line
# number show from finish and d is the number of lines show
# from from finish, and e is Git's guess at the name of the
# context (and is not always present)
headers = header_line.strip('@ \n').split(' ')
headers = map(lambda x: x.strip('+-').split(','), headers)
start_range = map(int, headers[0])
finish_range = map(int, headers[1])
while start_ln < start_range[0] - 1 and \
finish_ln < finish_range[0] - 1:
forward[start_ln] = finish_ln
backward[finish_ln] = start_ln
start_ln += 1
finish_ln += 1
# Now we're into the diff itself. Individual lines of input
# are separated by a line containing only a '~', this helps
# to distinguish between an addition, a removal, and a change.
line_iter = iter(content_lines)
try:
while True:
group_size = -1
line_delta = 0
line = ' '
while line != '~':
if line.startswith('+'):
line_delta += 1
elif line.startswith('-'):
line_delta -= 1
group_size += 1
line = line_iter.next().rstrip()
if group_size == 0:
# Two '~' lines next to each other means a blank
# line has been either added or removed. Git
# doesn't tell us which. This is all crazy.
if blank_lines.pop(0) == '+':
line_delta += 1
else:
line_delta -= 1
if line_delta == 1:
backward[finish_ln] = None
finish_ln += 1
elif line_delta == -1:
forward[start_ln] = None
start_ln += 1
else:
forward[start_ln] = finish_ln
backward[finish_ln] = start_ln
start_ln += 1
finish_ln += 1
except __HOLE__:
pass
# Make sure the mappings stretch the the beginning and end of
# the files.
p = os.popen('git show %s:%s' % (start, self.path))
start_len = len(p.readlines())
p = os.popen('git show %s:%s' % (finish, self.path))
finish_len = len(p.readlines())
while start_ln <= start_len and finish_ln <= finish_len:
forward[start_ln] = finish_ln
backward[finish_ln] = start_ln
start_ln += 1
finish_ln += 1
return forward, backward | StopIteration | dataset/ETHPy150Open georgebrock/git-browse/gitbrowse/git.py/GitFileHistory._build_line_mappings |
5,678 | def _sign(secret, to_sign):
def portable_bytes(s):
try:
return bytes(s, 'utf-8')
except __HOLE__:
return bytes(s)
return _encode(hmac.new(portable_bytes(secret), portable_bytes(to_sign), hashlib.sha256).digest()) | TypeError | dataset/ETHPy150Open firebase/firebase-token-generator-python/firebase_token_generator.py/_sign |
5,679 | def closed(self, code, reason):
self.closed_code, self.closed_reason = code, reason
if not (self.closed_code == 1000 or getattr(self.stream.closing, 'code', None) == 1000):
try:
error = json.loads(self.closed_reason)
raise DXJobLogStreamingException("Error while streaming job logs: {type}: {message}\n".format(**error))
except (__HOLE__, ValueError):
error = "Error while streaming job logs: {code} {reason}\n".format(code=self.closed_code,
reason=self.closed_reason)
raise DXJobLogStreamingException(error)
elif self.print_job_info:
if self.job_id not in self.seen_jobs:
self.seen_jobs[self.job_id] = {}
for job_id in self.seen_jobs.keys():
self.seen_jobs[job_id] = dxpy.describe(job_id)
print(get_find_executions_string(self.seen_jobs[job_id], has_children=False, show_outputs=True))
else:
self.seen_jobs[self.job_id] = dxpy.describe(self.job_id)
if self.seen_jobs[self.job_id].get('state') in ['failed', 'terminated']:
err_exit(code=3) | KeyError | dataset/ETHPy150Open dnanexus/dx-toolkit/src/python/dxpy/utils/job_log_client.py/DXJobLogStreamClient.closed |
5,680 | def _unpickle_method(func_name, obj, cls):
"""
Author: Steven Bethard
http://bytes.com/topic/python/answers/552476-why-cant-you-pickle-instancemethods
"""
for cls in cls.mro():
try:
func = cls.__dict__[func_name]
except __HOLE__:
pass
else:
break
return func.__get__(obj, cls) | KeyError | dataset/ETHPy150Open neuropoly/spinalcordtoolbox/dev/straightening/sct_straighten_spinalcord_LargeFOVOutput.py/_unpickle_method |
5,681 | def worker_landmarks_curved(self, arguments_worker):
try:
iz = arguments_worker[0]
iz_curved, x_centerline_deriv, y_centerline_deriv, z_centerline_deriv, x_centerline_fit, y_centerline_fit, \
z_centerline = arguments_worker[1]
temp_results = []
if iz in iz_curved:
# calculate d using formula: ax + by + cz + d = 0
a = x_centerline_deriv[iz]
b = y_centerline_deriv[iz]
c = z_centerline_deriv[iz]
x = x_centerline_fit[iz]
y = y_centerline_fit[iz]
z = z_centerline[iz]
d = -(a * x + b * y + c * z)
# set coordinates for landmark at the center of the cross
coord = Coordinate([0, 0, 0, 0])
coord.x, coord.y, coord.z = x_centerline_fit[iz], y_centerline_fit[iz], z_centerline[iz]
temp_results.append(coord)
# set y coordinate to y_centerline_fit[iz] for elements 1 and 2 of the cross
cross_coordinates = [Coordinate(), Coordinate(), Coordinate(), Coordinate(),
Coordinate(), Coordinate(), Coordinate(), Coordinate(),
Coordinate(), Coordinate(), Coordinate(), Coordinate(),
Coordinate(), Coordinate(), Coordinate(), Coordinate()]
cross_coordinates[0].y = y_centerline_fit[iz]
cross_coordinates[1].y = y_centerline_fit[iz]
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
x_n = Symbol('x_n')
cross_coordinates[1].x, cross_coordinates[0].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * y + d) - z) ** 2 -
self.gapxy ** 2, x_n) # x for -x and +x
cross_coordinates[0].z = (-1 / c) * (a * cross_coordinates[0].x + b * y + d) # z for +x
cross_coordinates[1].z = (-1 / c) * (a * cross_coordinates[1].x + b * y + d) # z for -x
# set x coordinate to x_centerline_fit[iz] for elements 3 and 4 of the cross
cross_coordinates[2].x = x_centerline_fit[iz]
cross_coordinates[3].x = x_centerline_fit[iz]
# set coordinates for landmarks +y and -y. Here, x coordinate is 0 (already initialized).
y_n = Symbol('y_n')
cross_coordinates[3].y, cross_coordinates[2].y = solve((y_n - y) ** 2 +
((-1 / c) * (a * x + b * y_n + d) - z) ** 2 -
self.gapxy ** 2, y_n) # y for -y and +y
cross_coordinates[2].z = (-1 / c) * (a * x + b * cross_coordinates[2].y + d) # z for +y
cross_coordinates[3].z = (-1 / c) * (a * x + b * cross_coordinates[3].y + d) # z for -y
# set the first corner
cross_coordinates[4].y = y_centerline_fit[iz] + self.gapxy
cross_coordinates[5].y = y_centerline_fit[iz] + self.gapxy
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
cross_coordinates[5].x, cross_coordinates[4].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * (y + self.gapxy) + d)
- z) ** 2 - self.gapxy ** 2, x_n)
cross_coordinates[4].z = (-1 / c) * (a * cross_coordinates[4].x + b * (y + self.gapxy) + d) # z for +x
cross_coordinates[5].z = (-1 / c) * (a * cross_coordinates[5].x + b * (y + self.gapxy) + d) # z for -x
# set the other corner
cross_coordinates[6].y = y_centerline_fit[iz] - self.gapxy
cross_coordinates[7].y = y_centerline_fit[iz] - self.gapxy
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
cross_coordinates[7].x, cross_coordinates[6].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * (y - self.gapxy) + d)
- z) ** 2 - self.gapxy ** 2, x_n)
cross_coordinates[6].z = (-1 / c) * (a * cross_coordinates[6].x + b * (y - self.gapxy) + d) # z for +x
cross_coordinates[7].z = (-1 / c) * (a * cross_coordinates[7].x + b * (y - self.gapxy) + d) # z for -x
gapxy = self.gapxy/2
cross_coordinates[8].y = y_centerline_fit[iz]
cross_coordinates[9].y = y_centerline_fit[iz]
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
x_n = Symbol('x_n')
cross_coordinates[9].x, cross_coordinates[8].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * y + d) - z) ** 2 -
gapxy ** 2, x_n) # x for -x and +x
cross_coordinates[8].z = (-1 / c) * (a * cross_coordinates[8].x + b * y + d) # z for +x
cross_coordinates[9].z = (-1 / c) * (a * cross_coordinates[9].x + b * y + d) # z for -x
# set x coordinate to x_centerline_fit[iz] for elements 3 and 4 of the cross
cross_coordinates[10].x = x_centerline_fit[iz]
cross_coordinates[11].x = x_centerline_fit[iz]
# set coordinates for landmarks +y and -y. Here, x coordinate is 0 (already initialized).
y_n = Symbol('y_n')
cross_coordinates[11].y, cross_coordinates[10].y = solve((y_n - y) ** 2 +
((-1 / c) * (a * x + b * y_n + d) - z) ** 2 -
gapxy ** 2, y_n) # y for -y and +y
cross_coordinates[10].z = (-1 / c) * (a * x + b * cross_coordinates[10].y + d) # z for +y
cross_coordinates[11].z = (-1 / c) * (a * x + b * cross_coordinates[11].y + d) # z for -y
# set the first corner
cross_coordinates[12].y = y_centerline_fit[iz] + gapxy
cross_coordinates[13].y = y_centerline_fit[iz] + gapxy
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
cross_coordinates[13].x, cross_coordinates[12].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * (y + gapxy) + d)
- z) ** 2 - gapxy ** 2, x_n)
cross_coordinates[12].z = (-1 / c) * (a * cross_coordinates[12].x + b * (y + gapxy) + d) # z for +x
cross_coordinates[13].z = (-1 / c) * (a * cross_coordinates[13].x + b * (y + gapxy) + d) # z for -x
# set the other corner
cross_coordinates[14].y = y_centerline_fit[iz] - gapxy
cross_coordinates[15].y = y_centerline_fit[iz] - gapxy
# set x and z coordinates for landmarks +x and -x, forcing de landmark to be in the orthogonal plan
# and the distance landmark/curve to be gapxy
cross_coordinates[15].x, cross_coordinates[14].x = solve((x_n - x) ** 2 +
((-1 / c) * (a * x_n + b * (y - gapxy) + d)
- z) ** 2 - gapxy ** 2, x_n)
cross_coordinates[14].z = (-1 / c) * (a * cross_coordinates[14].x + b * (y - gapxy) + d) # z for +x
cross_coordinates[15].z = (-1 / c) * (a * cross_coordinates[15].x + b * (y - gapxy) + d) # z for -x
for coord in cross_coordinates:
temp_results.append(coord)
else:
if self.all_labels >= 1:
temp_results.append(Coordinate([x_centerline_fit[iz], y_centerline_fit[iz],
z_centerline[iz], 0], mode='continuous'))
return iz, temp_results
except __HOLE__:
return
except Exception as e:
raise e | KeyboardInterrupt | dataset/ETHPy150Open neuropoly/spinalcordtoolbox/dev/straightening/sct_straighten_spinalcord_LargeFOVOutput.py/SpinalCordStraightener.worker_landmarks_curved |
5,682 | def straighten(self):
# Initialization
fname_anat = self.input_filename
fname_centerline = self.centerline_filename
fname_output = self.output_filename
gapxy = self.gapxy
gapz = self.gapz
padding = self.padding
leftright_width = self.leftright_width
remove_temp_files = self.remove_temp_files
verbose = self.verbose
interpolation_warp = self.interpolation_warp
algo_fitting = self.algo_fitting
window_length = self.window_length
type_window = self.type_window
crop = self.crop
# start timer
start_time = time.time()
# get path of the toolbox
status, path_sct = commands.getstatusoutput('echo $SCT_DIR')
sct.printv(path_sct, verbose)
if self.debug == 1:
print "\n*** WARNING: DEBUG MODE ON ***\n"
fname_anat = path_sct + "/testing/sct_testing_data/data/t2/t2.nii.gz"
fname_centerline = path_sct + "/testing/sct_testing_data/data/t2/t2_seg.nii.gz"
remove_temp_files = 0
type_window = "hanning"
verbose = 2
# check existence of input files
sct.check_file_exist(fname_anat, verbose)
sct.check_file_exist(fname_centerline, verbose)
# Display arguments
sct.printv("\nCheck input arguments...", verbose)
sct.printv(" Input volume ...................... " + fname_anat, verbose)
sct.printv(" Centerline ........................ " + fname_centerline, verbose)
sct.printv(" Final interpolation ............... " + interpolation_warp, verbose)
sct.printv(" Verbose ........................... " + str(verbose), verbose)
sct.printv("", verbose)
# Extract path/file/extension
path_anat, file_anat, ext_anat = sct.extract_fname(fname_anat)
path_centerline, file_centerline, ext_centerline = sct.extract_fname(fname_centerline)
# create temporary folder
path_tmp = "tmp." + time.strftime("%y%m%d%H%M%S")
sct.run("mkdir " + path_tmp, verbose)
# copy files into tmp folder
sct.run("cp " + fname_anat + " " + path_tmp, verbose)
sct.run("cp " + fname_centerline + " " + path_tmp, verbose)
# go to tmp folder
os.chdir(path_tmp)
try:
# Change orientation of the input centerline into RPI
sct.printv("\nOrient centerline to RPI orientation...", verbose)
fname_centerline_orient = file_centerline + "_rpi.nii.gz"
set_orientation(file_centerline+ext_centerline, "RPI", fname_centerline_orient)
# Get dimension
sct.printv('\nGet dimensions...', verbose)
from msct_image import Image
image_centerline = Image(fname_centerline_orient)
nx, ny, nz, nt, px, py, pz, pt = image_centerline.dim
# nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_centerline_orient)
sct.printv('.. matrix size: '+str(nx)+' x '+str(ny)+' x '+str(nz), verbose)
sct.printv('.. voxel size: '+str(px)+'mm x '+str(py)+'mm x '+str(pz)+'mm', verbose)
# smooth centerline
x_centerline_fit, y_centerline_fit, z_centerline, x_centerline_deriv, y_centerline_deriv, \
z_centerline_deriv = smooth_centerline(fname_centerline_orient, algo_fitting=algo_fitting,
type_window=type_window, window_length=window_length,
verbose=verbose)
# Get coordinates of landmarks along curved centerline
# ==========================================================================================
sct.printv("\nGet coordinates of landmarks along curved centerline...", verbose)
# landmarks are created along the curved centerline every z=gapz. They consist of a "cross" of size gapx
# and gapy. In voxel space!!!
# compute the length of the spinal cord based on fitted centerline and size of centerline in z direction
length_centerline, size_z_centerline = 0.0, 0.0
from math import sqrt
for iz in range(0, len(z_centerline) - 1):
length_centerline += sqrt(((x_centerline_fit[iz] - x_centerline_fit[iz + 1]) * px) ** 2 +
((y_centerline_fit[iz] - y_centerline_fit[iz + 1]) * py) ** 2 +
((z_centerline[iz] - z_centerline[iz + 1]) * pz) ** 2)
size_z_centerline += abs((z_centerline[iz] - z_centerline[iz + 1]) * pz)
# compute the size factor between initial centerline and straight bended centerline
factor_curved_straight = length_centerline / size_z_centerline
middle_slice = (z_centerline[0] + z_centerline[-1]) / 2.0
if verbose == 2:
print "Length of spinal cord = ", str(length_centerline)
print "Size of spinal cord in z direction = ", str(size_z_centerline)
print "Ratio length/size = ", str(factor_curved_straight)
# find z indices along centerline given a specific gap: iz_curved
nz_nonz = len(z_centerline)
nb_landmark = int(round(length_centerline/gapz))
if nb_landmark == 0:
nb_landmark = 1
if nb_landmark == 1:
iz_curved = [0]
iz_straight = [0]
else:
iz_curved = [0]
iz_straight = [(z_centerline[0] - middle_slice) * factor_curved_straight + middle_slice]
temp_length_centerline = iz_straight[0]
temp_previous_length = iz_straight[0]
for iz in range(1, len(z_centerline) - 1):
temp_length_centerline += sqrt(((x_centerline_fit[iz] - x_centerline_fit[iz + 1]) * px) ** 2 +
((y_centerline_fit[iz] - y_centerline_fit[iz + 1]) * py) ** 2 +
((z_centerline[iz] - z_centerline[iz + 1]) * pz) ** 2)
if temp_length_centerline >= temp_previous_length + gapz:
iz_curved.append(iz)
iz_straight.append(temp_length_centerline)
temp_previous_length = temp_length_centerline
iz_curved.append(nz_nonz - 1)
iz_straight.append((z_centerline[-1] - middle_slice) * factor_curved_straight + middle_slice)
# computing curved landmarks
landmark_curved = []
worker_arguments = (iz_curved, x_centerline_deriv, y_centerline_deriv, z_centerline_deriv,
x_centerline_fit, y_centerline_fit, z_centerline)
if self.cpu_number != 0:
from multiprocessing import Pool
arguments_landmarks = [(iz, worker_arguments) for iz in range(min(iz_curved), max(iz_curved) + 1, 1)]
pool = Pool(processes=self.cpu_number)
pool.map_async(self.worker_landmarks_curved, arguments_landmarks,
callback=self.worker_landmarks_curved_results)
pool.close()
try:
pool.join() # waiting for all the jobs to be done
if self.results_landmarks_curved:
landmark_curved = self.results_landmarks_curved
else:
raise ValueError("ERROR: no curved landmarks constructed...")
except __HOLE__:
print "\nWarning: Caught KeyboardInterrupt, terminating workers"
pool.terminate()
sys.exit(2)
except Exception as e:
print "Error during straightening on line {}".format(sys.exc_info()[-1].tb_lineno)
print e
sys.exit(2)
else:
landmark_curved_temp = [self.worker_landmarks_curved((iz, worker_arguments))
for iz in range(min(iz_curved), max(iz_curved) + 1, 1)]
landmark_curved_value = 1
for iz, l_curved in landmark_curved_temp:
for landmark in l_curved:
landmark.value = landmark_curved_value
landmark_curved.append(landmark)
landmark_curved_value += 1
# Get coordinates of landmarks along straight centerline
# ==========================================================================================
sct.printv("\nGet coordinates of landmarks along straight centerline...", verbose)
landmark_straight = []
# calculate the z indices corresponding to the Euclidean distance between two consecutive points on the
# curved centerline (approximation curve --> line)
# TODO: DO NOT APPROXIMATE CURVE --> LINE
# initialize x0 and y0 to be at the center of the FOV
x0 = int(round(nx/2))
y0 = int(round(ny/2))
landmark_curved_value = 1
for iz in range(min(iz_curved), max(iz_curved)+1, 1):
# compute new z-coordinate based on iz, middle slice and factor_curved_straight
if iz in iz_curved:
z0 = iz_straight[iz_curved.index(iz)]
# set coordinates for landmark at the center of the cross
landmark_straight.append(Coordinate([x0, y0, z0, landmark_curved_value]))
# set x, y and z coordinates for landmarks +x
landmark_straight.append(Coordinate([x0 + gapxy, y0, z0, landmark_curved_value+1]))
# set x, y and z coordinates for landmarks -x
landmark_straight.append(Coordinate([x0 - gapxy, y0, z0, landmark_curved_value+2]))
# set x, y and z coordinates for landmarks +y
landmark_straight.append(Coordinate([x0, y0 + gapxy, z0, landmark_curved_value+3]))
# set x, y and z coordinates for landmarks -y
landmark_straight.append(Coordinate([x0, y0 - gapxy, z0, landmark_curved_value+4]))
# set x, y and z coordinates for landmarks +x+y
landmark_straight.append(Coordinate([x0 + gapxy, y0 + gapxy, z0, landmark_curved_value + 5]))
# set x, y and z coordinates for landmarks -x+y
landmark_straight.append(Coordinate([x0 - gapxy, y0 + gapxy, z0, landmark_curved_value + 6]))
# set x, y and z coordinates for landmarks +x-y
landmark_straight.append(Coordinate([x0 + gapxy, y0 - gapxy, z0, landmark_curved_value + 7]))
# set x, y and z coordinates for landmarks -x-y
landmark_straight.append(Coordinate([x0 - gapxy, y0 - gapxy, z0, landmark_curved_value + 8]))
# internal crosses
gapxy_internal = gapxy/2
# set x, y and z coordinates for landmarks +x
landmark_straight.append(Coordinate([x0 + gapxy_internal, y0, z0, landmark_curved_value + 9]))
# set x, y and z coordinates for landmarks -x
landmark_straight.append(Coordinate([x0 - gapxy_internal, y0, z0, landmark_curved_value + 10]))
# set x, y and z coordinates for landmarks +y
landmark_straight.append(Coordinate([x0, y0 + gapxy_internal, z0, landmark_curved_value + 11]))
# set x, y and z coordinates for landmarks -y
landmark_straight.append(Coordinate([x0, y0 - gapxy_internal, z0, landmark_curved_value + 12]))
# set x, y and z coordinates for landmarks +x+y
landmark_straight.append(Coordinate([x0 + gapxy_internal, y0 + gapxy_internal, z0, landmark_curved_value + 13]))
# set x, y and z coordinates for landmarks -x+y
landmark_straight.append(Coordinate([x0 - gapxy_internal, y0 + gapxy_internal, z0, landmark_curved_value + 14]))
# set x, y and z coordinates for landmarks +x-y
landmark_straight.append(Coordinate([x0 + gapxy_internal, y0 - gapxy_internal, z0, landmark_curved_value + 15]))
# set x, y and z coordinates for landmarks -x-y
landmark_straight.append(Coordinate([x0 - gapxy_internal, y0 - gapxy_internal, z0, landmark_curved_value + 16]))
landmark_curved_value += 17
elif self.all_labels >= 1:
z0 = (z_centerline[iz] - middle_slice) * factor_curved_straight + middle_slice
landmark_straight.append(Coordinate([x0, y0, z0, landmark_curved_value]))
landmark_curved_value += 1
# Create NIFTI volumes with landmarks
# ==========================================================================================
# Pad input volume to deal with the fact that some landmarks on the curved centerline might be
# outside the FOV
# N.B. IT IS VERY IMPORTANT TO PAD ALSO ALONG X and Y, OTHERWISE SOME LANDMARKS MIGHT GET OUT OF THE FOV!!!
sct.printv('\nPad input volume to account for landmarks that fall outside the FOV...', verbose)
padding_x, padding_y, padding_z = padding, padding, padding
if nx + padding <= leftright_width:
padding_x = leftright_width - padding - nx
sct.run("isct_c3d " + fname_centerline_orient + " -pad " + str(padding_x) + "x" + str(padding_y) + "x" +
str(padding_z) + "vox " + str(padding_x) + "x" + str(padding_y) + "x" + str(padding_z) +
"vox 0 -o tmp.centerline_pad.nii.gz", verbose)
# Open padded centerline for reading
sct.printv('\nOpen padded centerline for reading...', verbose)
file_image = load('tmp.centerline_pad.nii.gz')
data = file_image.get_data()
hdr = file_image.get_header()
landmark_curved_rigid = []
if self.algo_landmark_rigid is not None and self.algo_landmark_rigid != 'None' and self.algo_landmark_rigid != 'euler':
# converting landmarks straight and curved to physical coordinates
from msct_image import Image
image_curved = Image(fname_centerline_orient, verbose=verbose)
# Reorganize landmarks
points_fixed, points_moving = [], []
for coord in landmark_straight:
point_straight = image_curved.transfo_pix2phys([[coord.x, coord.y, coord.z]])
points_fixed.append([point_straight[0][0], point_straight[0][1], point_straight[0][2]])
for coord in landmark_curved:
point_curved = image_curved.transfo_pix2phys([[coord.x, coord.y, coord.z]])
points_moving.append([point_curved[0][0], point_curved[0][1], point_curved[0][2]])
# Register curved landmarks on straight landmarks based on python implementation
sct.printv('\nComputing rigid transformation (algo='+self.algo_landmark_rigid+') ...', verbose)
import msct_register_landmarks
(rotation_matrix, translation_array, points_moving_reg, points_moving_barycenter) = \
msct_register_landmarks.getRigidTransformFromLandmarks(
points_fixed, points_moving, constraints=self.algo_landmark_rigid, show=False)
# reorganize registered pointsx
image_curved = Image(fname_centerline_orient, verbose=verbose)
for index_curved, ind in enumerate(range(0, len(points_moving_reg), 1)):
coord = Coordinate()
point_curved = image_curved.transfo_phys2continuouspix([[points_moving_reg[ind][0],
points_moving_reg[ind][1],
points_moving_reg[ind][2]]])
coord.x, coord.y, coord.z, coord.value = point_curved[0][0], point_curved[0][1], \
point_curved[0][2], index_curved+1
landmark_curved_rigid.append(coord)
# Create volumes containing curved and straight landmarks
data_curved_landmarks = data * 0
data_curved_rigid_landmarks = data * 0
data_straight_landmarks = data * 0
# Loop across cross index
for index in range(0, len(landmark_curved_rigid)):
x, y, z = int(round(landmark_curved[index].x)), int(round(landmark_curved[index].y)), \
int(round(landmark_curved[index].z))
# attribute landmark_value to the voxel and its neighbours
data_curved_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_curved[index].value
# get x, y and z coordinates of curved landmark (rounded to closest integer)
x, y, z = int(round(landmark_curved_rigid[index].x)), int(round(landmark_curved_rigid[index].y)),\
int(round(landmark_curved_rigid[index].z))
# attribute landmark_value to the voxel and its neighbours
data_curved_rigid_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding - 1:z + padding + 2] = landmark_curved_rigid[index].value
# get x, y and z coordinates of straight landmark (rounded to closest integer)
x, y, z = int(round(landmark_straight[index].x)), int(round(landmark_straight[index].y)), \
int(round(landmark_straight[index].z))
# attribute landmark_value to the voxel and its neighbours
data_straight_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding - 1:y + padding + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_straight[index].value
# Write NIFTI volumes
sct.printv('\nWrite NIFTI volumes...', verbose)
hdr.set_data_dtype('uint32') # set imagetype to uint8 #TODO: maybe use int32
img = Nifti1Image(data_curved_landmarks, None, hdr)
save(img, 'tmp.landmarks_curved.nii.gz')
sct.printv('.. File created: tmp.landmarks_curved.nii.gz', verbose)
hdr.set_data_dtype('uint32') # set imagetype to uint8 #TODO: maybe use int32
img = Nifti1Image(data_curved_rigid_landmarks, None, hdr)
save(img, 'tmp.landmarks_curved_rigid.nii.gz')
sct.printv('.. File created: tmp.landmarks_curved_rigid.nii.gz', verbose)
img = Nifti1Image(data_straight_landmarks, None, hdr)
save(img, 'tmp.landmarks_straight.nii.gz')
sct.printv('.. File created: tmp.landmarks_straight.nii.gz', verbose)
if self.algo_landmark_rigid == 'rigid-decomposed':
# writing rigid transformation file
text_file = open("tmp.curve2straight_rigid1.txt", "w")
text_file.write("#Insight Transform File V1.0\n")
text_file.write("#Transform 0\n")
text_file.write("Transform: AffineTransform_double_3_3\n")
text_file.write("Parameters: 1 0 0 0 1 0 0 0 1 %.9f %.9f %.9f\n" % (
translation_array[0, 0], translation_array[0, 1], translation_array[0, 2]))
text_file.write("FixedParameters: 0 0 0\n")
text_file.close()
text_file = open("tmp.curve2straight_rigid2.txt", "w")
text_file.write("#Insight Transform File V1.0\n")
text_file.write("#Transform 0\n")
text_file.write("Transform: AffineTransform_double_3_3\n")
text_file.write("Parameters: %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f 0.0 0.0 0.0\n" % (
rotation_matrix[0, 0], rotation_matrix[0, 1], rotation_matrix[0, 2], rotation_matrix[1, 0],
rotation_matrix[1, 1], rotation_matrix[1, 2], rotation_matrix[2, 0], rotation_matrix[2, 1],
rotation_matrix[2, 2]))
text_file.write("FixedParameters: %.9f %.9f %.9f\n" % (
points_moving_barycenter[0, 0], points_moving_barycenter[0, 1], points_moving_barycenter[0, 2]))
text_file.close()
else:
# writing rigid transformation file
text_file = open("tmp.curve2straight_rigid.txt", "w")
text_file.write("#Insight Transform File V1.0\n")
text_file.write("#Transform 0\n")
text_file.write("Transform: AffineTransform_double_3_3\n")
text_file.write("Parameters: %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f %.9f\n" % (
rotation_matrix[0, 0], rotation_matrix[0, 1], rotation_matrix[0, 2], rotation_matrix[1, 0],
rotation_matrix[1, 1], rotation_matrix[1, 2], rotation_matrix[2, 0], rotation_matrix[2, 1],
rotation_matrix[2, 2], translation_array[0, 0], translation_array[0, 1],
translation_array[0, 2]))
text_file.write("FixedParameters: %.9f %.9f %.9f\n" % (points_moving_barycenter[0],
points_moving_barycenter[1],
points_moving_barycenter[2]))
text_file.close()
elif self.algo_landmark_rigid == 'euler':
# Create volumes containing curved and straight landmarks
data_curved_landmarks = data * 0
data_straight_landmarks = data * 0
# Loop across cross index
for index in range(0, len(landmark_curved)):
x, y, z = int(round(landmark_curved[index].x)), int(round(landmark_curved[index].y)), \
int(round(landmark_curved[index].z))
# attribute landmark_value to the voxel and its neighbours
data_curved_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_curved[index].value
# get x, y and z coordinates of straight landmark (rounded to closest integer)
x, y, z = int(round(landmark_straight[index].x)), int(round(landmark_straight[index].y)), \
int(round(landmark_straight[index].z))
# attribute landmark_value to the voxel and its neighbours
data_straight_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_straight[index].value
# Write NIFTI volumes
sct.printv('\nWrite NIFTI volumes...', verbose)
hdr.set_data_dtype('uint32') # set imagetype to uint8 #TODO: maybe use int32
img = Nifti1Image(data_curved_landmarks, None, hdr)
save(img, 'tmp.landmarks_curved.nii.gz')
sct.printv('.. File created: tmp.landmarks_curved.nii.gz', verbose)
img = Nifti1Image(data_straight_landmarks, None, hdr)
save(img, 'tmp.landmarks_straight.nii.gz')
sct.printv('.. File created: tmp.landmarks_straight.nii.gz', verbose)
else:
# Create volumes containing curved and straight landmarks
data_curved_landmarks = data * 0
data_straight_landmarks = data * 0
# Loop across cross index
for index in range(0, len(landmark_curved)):
x, y, z = int(round(landmark_curved[index].x)), int(round(landmark_curved[index].y)), \
int(round(landmark_curved[index].z))
# attribute landmark_value to the voxel and its neighbours
data_curved_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_curved[index].value
# get x, y and z coordinates of straight landmark (rounded to closest integer)
x, y, z = int(round(landmark_straight[index].x)), int(round(landmark_straight[index].y)), \
int(round(landmark_straight[index].z))
# attribute landmark_value to the voxel and its neighbours
data_straight_landmarks[x + padding_x - 1:x + padding_x + 2, y + padding_y - 1:y + padding_y + 2,
z + padding_z - 1:z + padding_z + 2] = landmark_straight[index].value
# Write NIFTI volumes
sct.printv('\nWrite NIFTI volumes...', verbose)
hdr.set_data_dtype('uint32') # set imagetype to uint8 #TODO: maybe use int32
img = Nifti1Image(data_curved_landmarks, None, hdr)
save(img, 'tmp.landmarks_curved.nii.gz')
sct.printv('.. File created: tmp.landmarks_curved.nii.gz', verbose)
img = Nifti1Image(data_straight_landmarks, None, hdr)
save(img, 'tmp.landmarks_straight.nii.gz')
sct.printv('.. File created: tmp.landmarks_straight.nii.gz', verbose)
# Estimate deformation field by pairing landmarks
# ==========================================================================================
# convert landmarks to INT
sct.printv('\nConvert landmarks to INT...', verbose)
sct.run('isct_c3d tmp.landmarks_straight.nii.gz -type int -o tmp.landmarks_straight.nii.gz', verbose)
sct.run('isct_c3d tmp.landmarks_curved.nii.gz -type int -o tmp.landmarks_curved.nii.gz', verbose)
# This stands to avoid overlapping between landmarks
# TODO: do symmetric removal
sct.printv('\nMake sure all labels between landmark_straight and landmark_curved match 1...', verbose)
label_process_straight = ProcessLabels(fname_label="tmp.landmarks_straight.nii.gz",
fname_output=["tmp.landmarks_straight.nii.gz",
"tmp.landmarks_curved.nii.gz"],
fname_ref="tmp.landmarks_curved.nii.gz",
verbose=verbose)
label_process_straight.process('remove-symm')
# Estimate rigid transformation
sct.printv('\nEstimate rigid transformation between paired landmarks...', verbose)
sct.run('isct_ANTSUseLandmarkImagesToGetAffineTransform tmp.landmarks_straight.nii.gz '
'tmp.landmarks_curved.nii.gz rigid tmp.curve2straight_rigid.txt', verbose)
# Apply rigid transformation
sct.printv('\nApply rigid transformation to curved landmarks...', verbose)
Transform(input_filename="tmp.landmarks_curved.nii.gz", fname_dest="tmp.landmarks_curved_rigid.nii.gz",
output_filename="tmp.landmarks_straight.nii.gz", warp="tmp.curve2straight_rigid.txt",
interp="nn", verbose=verbose).apply()
if verbose == 2:
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
fig = plt.figure()
ax = Axes3D(fig)
# plt_centerline, = ax.plot(x_centerline_fit, y_centerline_fit, z_centerline, zdir='z')
plt_landmarks_curved, = ax.plot([coord.x for coord in landmark_curved],
[coord.y for coord in landmark_curved],
[coord.z for coord in landmark_curved],
'b.', markersize=3)
for coord in landmark_curved:
ax.text(coord.x, coord.y, coord.z, str(coord.value), color='blue')
plt_landmarks_straight, = ax.plot([coord.x for coord in landmark_straight],
[coord.y for coord in landmark_straight],
[coord.z for coord in landmark_straight],
'r.', markersize=3)
for coord in landmark_straight:
ax.text(coord.x, coord.y, coord.z, str(coord.value), color='red')
if self.algo_landmark_rigid is not None and self.algo_landmark_rigid != 'None':
plt_landmarks_curved_reg, = ax.plot([coord.x for coord in landmark_curved_rigid],
[coord.y for coord in landmark_curved_rigid],
[coord.z for coord in landmark_curved_rigid],
'g.', markersize=3)
plt.legend([plt_landmarks_curved, plt_landmarks_straight, plt_landmarks_curved_reg],
['Landmarks curved', 'Landmarks straight', 'Landmarks registered'])
else:
plt.legend([plt_landmarks_curved, plt_landmarks_straight],
['Landmarks curved', 'Landmarks straight'])
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
ax.set_aspect('equal')
plt.show()
if (self.use_continuous_labels == 1 and self.algo_landmark_rigid is not None and
self.algo_landmark_rigid != "None") or self.use_continuous_labels == '1':
if self.algo_landmark_rigid != 'euler':
landmark_curved_rigid, landmark_straight = ProcessLabels.remove_label_coord(landmark_curved_rigid,
landmark_straight,
symmetry=True)
# Writing landmark curve in text file
landmark_straight_file = open("LandmarksRealStraight.txt", "w+")
for i in landmark_straight:
landmark_straight_file.write(
str(i.x + padding_x) + "," + str(i.y + padding_y) + "," + str(i.z + padding_z) + "\n")
landmark_straight_file.close()
# Writing landmark curve in text file
landmark_curved_file = open("LandmarksRealCurve.txt", "w+")
for i in landmark_curved_rigid:
landmark_curved_file.write(
str(i.x + padding_x) + "," + str(i.y + padding_y) + "," + str(i.z + padding_z) + "\n")
landmark_curved_file.close()
# Estimate b-spline transformation curve --> straight
sct.printv("\nEstimate b-spline transformation: curve --> straight...", verbose)
sct.run("isct_ANTSUseLandmarkImagesWithTextFileToGetBSplineDisplacementField "
"tmp.landmarks_straight.nii.gz tmp.landmarks_curved_rigid.nii.gz "
"tmp.warp_curve2straight.nii.gz " + self.bspline_meshsize + " " +
self.bspline_numberOfLevels + " LandmarksRealCurve.txt LandmarksRealStraight.txt " +
self.bspline_order + " 0", verbose)
else:
landmark_curved, landmark_straight = ProcessLabels.remove_label_coord(landmark_curved,
landmark_straight,
symmetry=True)
# Writing landmark curve in text file
landmark_straight_file = open("LandmarksRealStraight.txt", "w+")
for i in landmark_straight:
landmark_straight_file.write(
str(i.x + padding_x) + "," +
str(i.y + padding_y) + "," +
str(i.z + padding_z) + "\n")
landmark_straight_file.close()
# Writing landmark curve in text file
landmark_curved_file = open("LandmarksRealCurve.txt", "w+")
for i in landmark_curved:
landmark_curved_file.write(
str(i.x + padding_x) + "," +
str(i.y + padding_y) + "," +
str(i.z + padding_z) + "\n")
landmark_curved_file.close()
# Estimate b-spline transformation curve --> straight
sct.printv("\nEstimate b-spline transformation: curve --> straight...", verbose)
status, output = sct.run('isct_ANTSLandmarksBSplineTransform '
'tmp.landmarks_straight.nii.gz '
'tmp.landmarks_curved.nii.gz '
'tmp.curve2straight_rigid.txt '
'tmp.warp_curve2straight.nii.gz ' +
self.bspline_meshsize + ' ' +
self.bspline_numberOfLevels + ' '
'LandmarksRealCurve.txt '
'LandmarksRealStraight.txt ' +
self.bspline_order +
' 0',
verbose=verbose)
else:
# This stands to avoid overlapping between landmarks
sct.printv("\nMake sure all labels between landmark_straight and landmark_curved match 2...", verbose)
label_process = ProcessLabels(fname_label="tmp.landmarks_curved_rigid.nii.gz",
fname_output=["tmp.landmarks_curved_rigid.nii.gz",
"tmp.landmarks_straight.nii.gz"],
fname_ref="tmp.landmarks_straight.nii.gz", verbose=verbose)
label_process.process("remove-symm")
# Estimate b-spline transformation curve --> straight
sct.printv("\nEstimate b-spline transformation: curve --> straight...", verbose)
sct.run("isct_ANTSUseLandmarkImagesToGetBSplineDisplacementField tmp.landmarks_straight.nii.gz "
"tmp.landmarks_curved_rigid.nii.gz tmp.warp_curve2straight.nii.gz " + self.bspline_meshsize
+ " " + self.bspline_numberOfLevels + " " + self.bspline_order + " 0", verbose)
# remove padding for straight labels
if crop == 1:
ImageCropper(input_file="tmp.landmarks_straight.nii.gz",
output_file="tmp.landmarks_straight_crop.nii.gz", dim=[0, 1, 2], bmax=True,
verbose=verbose).crop()
pass
else:
sct.run("cp tmp.landmarks_straight.nii.gz tmp.landmarks_straight_crop.nii.gz", verbose)
# Concatenate rigid and non-linear transformations...
sct.printv("\nConcatenate rigid and non-linear transformations...", verbose)
# TODO: !!! DO NOT USE sct.run HERE BECAUSE isct_ComposeMultiTransform OUTPUTS A NON-NULL STATUS !!!
if self.algo_landmark_rigid == 'rigid-decomposed':
cmd = "sct_concat_transfo -w tmp.curve2straight_rigid1.txt,tmp.curve2straight_rigid2.txt," \
"tmp.warp_curve2straight.nii.gz -d tmp.landmarks_straight_crop.nii.gz " \
"-o tmp.curve2straight.nii.gz"
else:
cmd = "isct_ComposeMultiTransform 3 tmp.curve2straight.nii.gz -R "+str(file_anat+ext_anat)+" " \
"tmp.warp_curve2straight.nii.gz tmp.curve2straight_rigid.txt"
sct.run(cmd, self.verbose)
# Estimate b-spline transformation straight --> curve
# TODO: invert warping field instead of estimating a new one
sct.printv("\nEstimate b-spline transformation: straight --> curve...", verbose)
if (self.use_continuous_labels == 1 and self.algo_landmark_rigid is not None
and self.algo_landmark_rigid != "None") or self.use_continuous_labels == "1":
if self.algo_landmark_rigid != 'euler':
sct.run("isct_ANTSUseLandmarkImagesWithTextFileToGetBSplineDisplacementField "
"tmp.landmarks_curved_rigid.nii.gz tmp.landmarks_straight.nii.gz "
"tmp.warp_straight2curve.nii.gz " + self.bspline_meshsize + " " +
self.bspline_numberOfLevels + " LandmarksRealStraight.txt LandmarksRealCurve.txt " +
self.bspline_order + " 0", verbose)
else:
# Estimate b-spline transformation curve --> straight
status, output = sct.run('isct_ANTSLandmarksBSplineTransform '
'tmp.landmarks_curved.nii.gz '
'tmp.landmarks_straight.nii.gz '
'tmp.straight2curve_rigid.txt '
'tmp.warp_straight2curve.nii.gz ' +
self.bspline_meshsize + ' ' +
self.bspline_numberOfLevels + ' ' +
'LandmarksRealStraight.txt '
'LandmarksRealCurve.txt ' +
self.bspline_order +
' 0',
verbose=verbose)
else:
sct.run("isct_ANTSUseLandmarkImagesToGetBSplineDisplacementField "
"tmp.landmarks_curved_rigid.nii.gz tmp.landmarks_straight.nii.gz "
"tmp.warp_straight2curve.nii.gz " + self.bspline_meshsize + " " + self.bspline_numberOfLevels +
" " + self.bspline_order + " 0", verbose)
# Concatenate rigid and non-linear transformations...
sct.printv("\nConcatenate rigid and non-linear transformations...", verbose)
if self.algo_landmark_rigid == "rigid-decomposed":
cmd = "sct_concat_transfo -w tmp.warp_straight2curve.nii.gz,-tmp.curve2straight_rigid2.txt," \
"-tmp.curve2straight_rigid1.txt -d " + file_anat + ext_anat + " -o tmp.straight2curve.nii.gz"
else:
if self.algo_landmark_rigid == 'euler':
#cmd = "isct_ComposeMultiTransform 3 tmp.straight2curve.nii.gz -R " + file_anat + ext_anat + \
# " tmp.warp_straight2curve.nii.gz tmp.straight2curve_rigid.txt"
cmd = "isct_ComposeMultiTransform 3 tmp.straight2curve.nii.gz -R " + file_anat + ext_anat + \
" tmp.warp_straight2curve.nii.gz tmp.straight2curve_rigid.txt"
else:
cmd = "isct_ComposeMultiTransform 3 tmp.straight2curve.nii.gz -R " + file_anat + ext_anat + \
" -i tmp.curve2straight_rigid.txt tmp.warp_straight2curve.nii.gz"
sct.run(cmd, self.verbose)
# Apply transformation to input image
sct.printv('\nApply transformation to input image...', verbose)
Transform(input_filename=str(file_anat+ext_anat), fname_dest=str(file_anat+ext_anat),
output_filename="tmp.anat_rigid_warp.nii.gz", interp=interpolation_warp,
warp="tmp.curve2straight.nii.gz", verbose=verbose).apply()
# compute the error between the straightened centerline/segmentation and the central vertical line.
# Ideally, the error should be zero.
# Apply deformation to input image
sct.printv('\nApply transformation to centerline image...', verbose)
Transform(input_filename=fname_centerline_orient, fname_dest="tmp.landmarks_straight_crop.nii.gz",
output_filename="tmp.centerline_straight.nii.gz", interp="nn",
warp="tmp.curve2straight.nii.gz", verbose=verbose).apply()
from msct_image import Image
file_centerline_straight = Image('tmp.centerline_straight.nii.gz', verbose=verbose)
coordinates_centerline = file_centerline_straight.getNonZeroCoordinates(sorting='z')
mean_coord = []
from numpy import mean
for z in range(coordinates_centerline[0].z, coordinates_centerline[-1].z):
temp_mean = [coord.value for coord in coordinates_centerline if coord.z == z]
if temp_mean:
mean_value = mean(temp_mean)
mean_coord.append(mean([[coord.x * coord.value / mean_value, coord.y * coord.value / mean_value]
for coord in coordinates_centerline if coord.z == z], axis=0))
# compute error between the input data and the nurbs
from math import sqrt
x0 = file_centerline_straight.data.shape[0]/2.0
y0 = file_centerline_straight.data.shape[1]/2.0
count_mean = 0
for coord_z in mean_coord:
if not isnan(sum(coord_z)):
dist = ((x0-coord_z[0])*px)**2 + ((y0-coord_z[1])*py)**2
self.mse_straightening += dist
dist = sqrt(dist)
if dist > self.max_distance_straightening:
self.max_distance_straightening = dist
count_mean += 1
self.mse_straightening = sqrt(self.mse_straightening/float(count_mean))
except Exception as e:
sct.printv('WARNING: Exception during Straightening:', 1, 'warning')
print 'Error on line {}'.format(sys.exc_info()[-1].tb_lineno)
print e
os.chdir('..')
# Generate output file (in current folder)
# TODO: do not uncompress the warping field, it is too time consuming!
sct.printv("\nGenerate output file (in current folder)...", verbose)
sct.generate_output_file(path_tmp + "/tmp.curve2straight.nii.gz", "warp_curve2straight.nii.gz", verbose)
sct.generate_output_file(path_tmp + "/tmp.straight2curve.nii.gz", "warp_straight2curve.nii.gz", verbose)
if fname_output == '':
fname_straight = sct.generate_output_file(path_tmp + "/tmp.anat_rigid_warp.nii.gz",
file_anat + "_straight" + ext_anat, verbose)
else:
fname_straight = sct.generate_output_file(path_tmp+'/tmp.anat_rigid_warp.nii.gz',
fname_output, verbose) # straightened anatomic
# Remove temporary files
if remove_temp_files:
sct.printv("\nRemove temporary files...", verbose)
sct.run("rm -rf " + path_tmp, verbose)
sct.printv('\nDone!\n', verbose)
sct.printv("Maximum x-y error = " + str(round(self.max_distance_straightening, 2)) + " mm", verbose, "bold")
sct.printv("Accuracy of straightening (MSE) = " + str(round(self.mse_straightening, 2)) +
" mm", verbose, "bold")
# display elapsed time
elapsed_time = time.time() - start_time
sct.printv("\nFinished! Elapsed time: " + str(int(round(elapsed_time))) + "s", verbose)
sct.printv("\nTo view results, type:", verbose)
sct.printv("fslview " + fname_straight + " &\n", verbose, "info") | KeyboardInterrupt | dataset/ETHPy150Open neuropoly/spinalcordtoolbox/dev/straightening/sct_straighten_spinalcord_LargeFOVOutput.py/SpinalCordStraightener.straighten |
5,683 | def check_token(self, user, token):
"""
Check that a password reset token is correct for a given user.
"""
# Parse the tokem
try:
ts_b36, hash = token.split("-")
except __HOLE__:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp/uid has not been tampered with
if self._make_token_with_timestamp(user, ts) != token:
return False
# Check the timestamp is within limit
if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS:
return False
return True | ValueError | dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/auth/tokens.py/PasswordResetTokenGenerator.check_token |
5,684 | def read(self):
try:
retval = self.queue.popleft()
if self.cr.balance < 0:
self.cr.send(retval)
if isinstance(retval, bomb):
retval.raise_()
return retval
except __HOLE__:
pass
return self.cr.receive() | IndexError | dataset/ETHPy150Open benoitc/flower/flower/net/udp.py/UDPConn.read |
5,685 | def check_flakes():
try:
from pyflakes.scripts.pyflakes import main as main_pyflakes
except ImportError:
print("pyflakes not installed. Did you run pip install -r requirements_tests.txt or python develop.py --install-basic-requirements?", file=sys.stderr)
return -1
stdout = sys.stdout
sys.stdout = StringIO.StringIO()
original_argv = sys.argv
sys.argv = [original_argv[0], "weblab", "test", "voodoo", "experiments"]
try:
number_of_lines = main_pyflakes()
except __HOLE__:
pass
finally:
results = sys.stdout.getvalue()
sys.stdout = stdout
sys.argv = original_argv
lines = [ line for line in results.split('\n') if line.find('generated') < 0 ]
for line in lines:
if len(line.strip()) > 0:
print(line, file=sys.stderr)
check_all_unused_exceptions()
return 0 | SystemExit | dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/develop.py/check_flakes |
5,686 | def deploy_testdb(options):
from weblab.admin.deploy import insert_required_initial_data, populate_weblab_tests, generate_create_database, insert_required_initial_coord_data
import weblab.db.model as Model
import weblab.core.coordinator.sql.model as CoordinatorModel
import voodoo.sessions.db_lock_data as DbLockData
import voodoo.sessions.sqlalchemy_data as SessionSqlalchemyData
from sqlalchemy import create_engine
try:
import MySQLdb
dbi = MySQLdb
except ImportError:
try:
import pymysql_sa
except __HOLE__:
raise Exception("Neither MySQLdb nor pymysql_sa have been installed. First install them by running 'pip install pymysql_sa' or 'pip install python-mysql'")
pymysql_sa.make_default_mysql_dialect()
t_initial = time.time()
db_dir = 'db'
if not os.path.exists(db_dir):
os.mkdir(db_dir)
db_engine = options.testdb_engine
weblab_db_username = options.testdb_user
weblab_db_password = options.testdb_passwd
weblab_admin_db_username = options.testdb_admin_user
if options.testdb_ask_admin_passwd:
weblab_admin_db_password = getpass.getpass("Database password:".encode('utf8'))
else:
weblab_admin_db_password = options.testdb_admin_passwd
if db_engine == 'mysql':
weblab_test_db_str = 'mysql://%s:%s@localhost/WebLabTests%s' % (weblab_db_username, weblab_db_password,'%s')
weblab_coord_db_str = 'mysql://%s:%s@localhost/WebLabCoordination%s' % (weblab_db_username, weblab_db_password, '%s')
weblab_sessions_db_str = 'mysql://%s:%s@localhost/WebLabSessions' % (weblab_db_username, weblab_db_password)
elif db_engine == 'sqlite':
weblab_test_db_str = 'sqlite:///db/WebLabTests%s.db'
weblab_coord_db_str = 'sqlite:///db/WebLabCoordination%s.db'
weblab_sessions_db_str = 'sqlite:///db/WebLabSessions.db'
else:
raise Exception("db engine %s not supported" % db_engine)
if options.testdb_create_db:
create_database = generate_create_database(db_engine)
if create_database is None:
raise Exception("db engine %s not supported for creating database" % db_engine)
t = time.time()
error_message = 'Could not create database. This may happen if the admin db credentials are wrong. Try --db-ask-admin-passwd'
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLab", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabTests", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabTests2", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabTests3", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabIntTests1", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabCoordination", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabCoordination2", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabCoordination3", weblab_db_username, weblab_db_password, db_dir = db_dir)
create_database(error_message, weblab_admin_db_username, weblab_admin_db_password, "WebLabSessions", weblab_db_username, weblab_db_password, db_dir = db_dir)
print("Databases created.\t\t\t\t[done] [%1.2fs]" % (time.time() - t))
#####################################################################
#
# Populating main database
#
for tests in ('','2','3'):
print("Populating 'WebLabTests%s' database... \t\t" % tests, end="")
t = time.time()
engine = create_engine(weblab_test_db_str % tests, echo = False)
metadata = Model.Base.metadata
metadata.drop_all(engine)
metadata.create_all(engine)
insert_required_initial_data(engine)
populate_weblab_tests(engine, tests)
print("[done] [%1.2fs]" % (time.time() - t))
#####################################################################
#
# Populating Coordination database
#
for coord in ('','2','3'):
print("Populating 'WebLabCoordination%s' database...\t" % coord, end="")
t = time.time()
engine = create_engine(weblab_coord_db_str % coord, echo = False)
CoordinatorModel.load()
metadata = CoordinatorModel.Base.metadata
metadata.drop_all(engine)
metadata.create_all(engine)
insert_required_initial_coord_data(engine)
print("[done] [%1.2fs]" % (time.time() - t))
#####################################################################
#
# Populating Sessions database
#
print("Populating 'WebLabSessions' database...\t\t", end="")
t = time.time()
engine = create_engine(weblab_sessions_db_str, echo = False)
metadata = DbLockData.SessionLockBase.metadata
metadata.drop_all(engine)
metadata.create_all(engine)
metadata = SessionSqlalchemyData.SessionBase.metadata
metadata.drop_all(engine)
metadata.create_all(engine)
print("[done] [%1.2fs]" % (time.time() - t))
print("Total database deployment: \t\t\t[done] [%1.2fs]" % (time.time() - t_initial)) | ImportError | dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/develop.py/deploy_testdb |
5,687 | def run_query(self, query):
try:
json_data = None
error = None
query = query.strip()
script = os.path.join(self.configuration["path"], query.split(" ")[0])
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
script = os.path.join(self.configuration["path"], query)
output = subprocess.check_output(script.split(" "), shell=False)
if output is not None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except __HOLE__:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error | KeyboardInterrupt | dataset/ETHPy150Open getredash/redash/redash/query_runner/script.py/Script.run_query |
5,688 | def hdfs_connect(host='localhost', port=50070, protocol='webhdfs',
use_https='default', auth_mechanism='NOSASL',
verify=True, **kwds):
"""
Connect to HDFS
Parameters
----------
host : string, Host name of the HDFS NameNode
port : int, NameNode's WebHDFS port (default 50070)
protocol : {'webhdfs'}
use_https : boolean, default 'default'
Connect to WebHDFS with HTTPS, otherwise plain HTTP. For secure
authentication, the default for this is True, otherwise False
auth_mechanism : string, Set to NOSASL or PLAIN for non-secure clusters.
Set to GSSAPI or LDAP for Kerberos-secured clusters.
verify : boolean, Set to False to turn off verifying SSL certificates.
(default True)
Other keywords are forwarded to hdfs library classes
Returns
-------
client : WebHDFS
"""
import requests
session = kwds.setdefault('session', requests.Session())
session.verify = verify
if auth_mechanism in ['GSSAPI', 'LDAP']:
if use_https == 'default':
prefix = 'https'
else:
prefix = 'https' if use_https else 'http'
try:
import requests_kerberos
except __HOLE__:
raise IbisError(
"Unable to import requests-kerberos, which is required for "
"Kerberos HDFS support. Install it by executing `pip install "
"requests-kerberos` or `pip install hdfs[kerberos]`.")
from hdfs.ext.kerberos import KerberosClient
# note SSL
url = '{0}://{1}:{2}'.format(prefix, host, port)
kwds.setdefault('mutual_auth', 'OPTIONAL')
hdfs_client = KerberosClient(url, **kwds)
else:
if use_https == 'default':
prefix = 'http'
else:
prefix = 'https' if use_https else 'http'
from hdfs.client import InsecureClient
url = '{0}://{1}:{2}'.format(prefix, host, port)
hdfs_client = InsecureClient(url, **kwds)
return WebHDFS(hdfs_client) | ImportError | dataset/ETHPy150Open cloudera/ibis/ibis/__init__.py/hdfs_connect |
5,689 | def _extract(d, k, default=_NoDefault):
"""Get an item from a dictionary, and remove it from the dictionary."""
try:
retval = d[k]
except __HOLE__:
if default is _NoDefault:
raise
return default
del d[k]
return retval
# Generic cipher test case | KeyError | dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Cipher/common.py/_extract |
5,690 | def decode(self, s):
try:
return json.loads(s)
except __HOLE__ as e:
self.logger.error(str(e))
self.logger.error(s.strip())
raise errors.IgnoreObject(e) | ValueError | dataset/ETHPy150Open zmap/ztag/ztag/decoders/decoders.py/JSONDecoder.decode |
5,691 | def combineDicts(master,new):
"""
instead of a dict of dicts of arbitrary depth, use a dict of tuples to store.
"""
for (keysequence, valuesequence) in flatten(new):
try:
master[keysequence] = map(sum,zip(master[keysequence],valuesequence))
except __HOLE__:
master[keysequence] = valuesequence
return dict1 | KeyError | dataset/ETHPy150Open Bookworm-project/BookwormDB/bookwormDB/MetaWorm.py/combineDicts |
5,692 | def setDefaults(self):
for specialKey in ["database","host"]:
try:
if isinstance(self.outside_dictionary[specialKey],basestring):
#coerce strings to list:
self.outside_dictionary[specialKey] = [self.outside_dictionary[specialKey]]
except __HOLE__:
#It's OK not to define host.
if specialKey=="host":
pass
if 'host' not in self.outside_dictionary:
#Build a hostlist: usually just localhost a bunch of times.
self.outside_dictionary['host'] = hostlist(self.outside_dictionary['database'])
for (target, dest) in [("database","host"),("host","database")]:
#Expand out so you can search for the same database on multiple databases, or multiple databases on the same host.
if len(self.outside_dictionary[target])==1 and len(self.outside_dictionary[dest]) != 1:
self.outside_dictionary[target] = self.outside_dictionary[target] * len(self.outside_dictionary[dest]) | KeyError | dataset/ETHPy150Open Bookworm-project/BookwormDB/bookwormDB/MetaWorm.py/MetaQuery.setDefaults |
5,693 | def cmd_options(cmd):
os.environ["_ARGCOMPLETE_IFS"] = "\n"
os.environ["_ARGCOMPLETE_WORDBREAKS"] = os.environ.get(
"COMP_WORDBREAKS", "")
os.environ["_ARGCOMPLETE"] = "2"
try:
mod = importlib.import_module(
".{0}".format(cmd), package="mesos.cli.cmds")
except __HOLE__:
return
if not hasattr(mod, 'parser'):
return
importlib.import_module("argcomplete").autocomplete(
mod.parser,
output_stream=sys.stdout,
exit_method=EXIT
) | ImportError | dataset/ETHPy150Open mesosphere/mesos-cli/mesos/cli/cmds/completion.py/cmd_options |
5,694 | def read(self, filename):
"""Read and parse single EditorConfig file"""
try:
fp = open(filename, encoding='utf-8')
except __HOLE__:
return
self._read(fp, filename)
fp.close() | IOError | dataset/ETHPy150Open editorconfig/editorconfig-vim/plugin/editorconfig-core-py/editorconfig/ini.py/EditorConfigParser.read |
5,695 | def stop(self):
"""
Cleans up the process
"""
if self._log:
self._log.close()
self._log = None
#If its dead dont worry
if self.process is None:
return
#Tell the Server to properly die in case
try:
if self.process:
self.process.send_signal(signal.SIGTERM)
self.process.wait()
except __HOLE__:
# kill may not be available under windows environment
pass | OSError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/selenium/webdriver/phantomjs/service.py/Service.stop |
5,696 | def install_request_logger(app, single_threaded, logger, *unlogged_prefixes):
def make_context(**kwargs):
location = request.path
if request.query_string:
location += "?" + request.query_string
return dict(
uuid = g.uuid,
method = request.method[:4],
location = location,
**kwargs
)
def should_skip_logging():
for prefix in unlogged_prefixes:
if request.path.startswith(prefix):
return True
return False
@app.before_request
def logging_before():
g.uuid = uuid4()
g.timer = Timer()
g.cpu = CPUTime()
g.queries = 0
if should_skip_logging():
return
try:
size = int(request.headers['Content-Length'])
except (KeyError, ValueError):
size = -1
try:
g.userid = int(session['user_id'])
except (KeyError, __HOLE__):
g.userid = -1
logger.debug('> %(method)-4s %(location)s agent=%(agent)s userid=%(userid)d size=%(size)d', make_context(
agent=request.user_agent.browser, userid=g.userid, size=size,
))
@app.after_request
def logging_after(response):
if should_skip_logging():
return response
level = thresholds(response.status_code, (
(logging.DEBUG, 300),
(logging.INFO, 400),
(logging.WARNING, 500),
(logging.ERROR, 600)
), logging.CRITICAL)
try:
size = int(response.headers['Content-Length'])
except (KeyError, ValueError):
size = -1
context = make_context(
status=response.status_code, wall=g.timer.elapsed, response_size=size, secure=request.is_secure,
agent=request.user_agent.string, ip=request.access_route[0], rqid=g.uuid.hex[:6], userid=g.userid
)
fmt = '< %(status)d %(method)-4s %(location)s wall=%(wall).1f size=%(response_size)d'
if single_threaded:
context.update(queries=g.queries, cpu=round(g.cpu.elapsed, 3))
fmt += ' cpu=%(cpu).1f queries=%(queries)d'
logger.log(level, fmt, context)
return response
def query_count_increment(*a):
g.queries += 1
return query_count_increment | ValueError | dataset/ETHPy150Open fusic-com/flask-todo/utils/flaskutils/__init__.py/install_request_logger |
5,697 | def copytree_exists(src, dst, symlinks=False,
skip_files=DEFAULT_SKIP_FILES):
if not os.path.exists(src):
return
names = os.listdir(src)
if not os.path.exists(dst):
os.mkdir(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
if skip_files.match(srcname):
logging.debug('Ignoring file \'%s\': File matches ignore regex.',
srcname)
continue
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree_exists(srcname, dstname, symlinks, skip_files=skip_files)
else:
shutil.copy2(srcname, dstname)
except (__HOLE__, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors | IOError | dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/util.py/copytree_exists |
5,698 | def copy_exists(srcname, dstname, symlinks=False):
if not os.path.exists(srcname):
return
errors = []
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
else:
shutil.copyfile(srcname, dstname)
shutil.copystat(srcname, dstname)
except (__HOLE__, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors | IOError | dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/util.py/copy_exists |
5,699 | @request_user_has_resource_db_permission(permission_type=PermissionType.API_KEY_VIEW)
@jsexpose(arg_types=[str])
def get_one(self, api_key_id_or_key):
"""
List api keys.
Handle:
GET /apikeys/1
"""
api_key_db = None
try:
api_key_db = ApiKey.get_by_key_or_id(api_key_id_or_key)
except ApiKeyNotFoundError:
msg = 'ApiKey matching %s for reference and id not found.', api_key_id_or_key
LOG.exception(msg)
abort(http_client.NOT_FOUND, msg)
try:
return ApiKeyAPI.from_model(api_key_db, mask_secrets=True)
except (__HOLE__, ValueError) as e:
LOG.exception('Failed to serialize API key.')
abort(http_client.INTERNAL_SERVER_ERROR, str(e)) | ValidationError | dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/auth.py/ApiKeyController.get_one |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.