Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
4,800
def __init__(self, env): self.env = env self.root_path = os.path.join(self.env.root_path, 'databags') self._known_bags = {} self._bags = {} try: for filename in os.listdir(self.root_path): if filename.endswith(('.ini', '.json')): self._known_bags.setdefault( filename.rsplit('.', -1)[0], []).append(filename) except __HOLE__: pass
OSError
dataset/ETHPy150Open lektor/lektor-archive/lektor/databags.py/Databags.__init__
4,801
def init_connection_file(self): """find the connection file, and load the info if found. The current working directory and the current profile's security directory will be searched for the file if it is not given by absolute path. When attempting to connect to an existing kernel and the `--existing` argument does not match an existing file, it will be interpreted as a fileglob, and the matching file in the current profile's security dir with the latest access time will be used. After this method is called, self.connection_file contains the *full path* to the connection file, never just its name. """ if self.existing: try: cf = find_connection_file(self.existing, [self.runtime_dir]) except Exception: self.log.critical("Could not find existing kernel connection file %s", self.existing) self.exit(1) self.log.debug("Connecting to existing kernel: %s" % cf) self.connection_file = cf else: # not existing, check if we are going to write the file # and ensure that self.connection_file is a full path, not just the shortname try: cf = find_connection_file(self.connection_file, [self.runtime_dir]) except Exception: # file might not exist if self.connection_file == os.path.basename(self.connection_file): # just shortname, put it in security dir cf = os.path.join(self.runtime_dir, self.connection_file) else: cf = self.connection_file self.connection_file = cf try: self.connection_file = filefind(self.connection_file, ['.', self.runtime_dir]) except __HOLE__: self.log.debug("Connection File not found: %s", self.connection_file) return # should load_connection_file only be used for existing? # as it is now, this allows reusing ports if an existing # file is requested try: self.load_connection_file() except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1)
IOError
dataset/ETHPy150Open jupyter/jupyter_client/jupyter_client/consoleapp.py/JupyterConsoleApp.init_connection_file
4,802
def test_supportsThreads(self): """ L{Platform.supportsThreads} returns C{True} if threads can be created in this runtime, C{False} otherwise. """ # It's difficult to test both cases of this without faking the threading # module. Perhaps an adequate test is to just test the behavior with # the current runtime, whatever that happens to be. try: namedModule('threading') except __HOLE__: self.assertFalse(Platform().supportsThreads()) else: self.assertTrue(Platform().supportsThreads())
ImportError
dataset/ETHPy150Open twisted/twisted/twisted/python/test/test_runtime.py/PlatformTests.test_supportsThreads
4,803
def request(host, port, url, method="get", *args, **kwargs): """ | General wrapper around the "Request" methods | Used by Server object when sending request to the main server, can also | be used by any worker/specific requests. :param host: hostname to reach :param port: port to use :param url: end part of the url to reach :param method: a string indicating wich method to use [get,put,post,delete] :return: a json or text data depending of the webservice response :raise RequestError: for any error that occured related to the network :raise RequestTimeoutError: when a request timeout occur """ try: baseUrl = "http://%s:%d" % (host, port) url = '/'.join([baseUrl, url]) if method == "get": r = requests.get(url, *args, **kwargs) elif method == "post": r = requests.post(url, *args, **kwargs) elif method == "put": r = requests.put(url, *args, **kwargs) elif method == "delete": r = requests.delete(url, *args, **kwargs) else: msg = "Unkown HTTP method called: %s" % method logging.error(msg) raise RequestError(msg) if r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]: # # Request returned successfully # try: result = r.json() except __HOLE__: result = r.text return result elif r.status_code in [requests.codes.bad, requests.codes.unauthorized, requests.codes.forbidden, requests.codes.not_found, requests.codes.not_allowed, requests.codes.not_acceptable, requests.codes.internal_server_error, requests.codes.not_implemented, requests.codes.unavailable, requests.codes.conflict]: try: msg = r.text except: msg = "" errMsg = "Error return code: %s, response message: '%s'" % (r.status_code, msg) logging.error(errMsg) raise RequestError(errMsg) else: raise RequestError except requests.exceptions.Timeout as e: errMsg = "Timeout: %s" % e logging.error(errMsg) raise RequestTimeoutError(errMsg) except requests.exceptions.ConnectionError as e: errMsg = "Network problem occured: the host you're trying to reach is probably down (%s)" % baseUrl logging.error(errMsg) raise RequestError(errMsg) except requests.exceptions.RequestException as e: errMsg = "Unhandled request exception: %s" % e logging.error(errMsg) raise RequestError(errMsg) except RequestError as e: raise e except Exception as e: errMsg = "Unhandled exception: %s" % e logging.error(errMsg) raise e
ValueError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/puliclient/server/server.py/request
4,804
def __new__(cls, *args, **kwargs): """ Initializes class attributes for subsequent constructor calls. :note: *args and **kwargs are not explicitly used in this function, but needed for Python 2 compatibility. """ if not cls.__initialized__: cls.__initialized__ = True try: _meta = getattr(cls, 'Meta') except __HOLE__: raise AttributeError( 'Missing Meta class in {0}.'.format( cls.__name__)) for attr in ['series_name', 'fields', 'tags']: try: setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: raise AttributeError( 'Missing {0} in {1} Meta class.'.format( attr, cls.__name__)) cls._autocommit = getattr(_meta, 'autocommit', False) cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( 'In {0}, autocommit is set to True, but no client is set.' .format(cls.__name__)) try: cls._bulk_size = getattr(_meta, 'bulk_size') if cls._bulk_size < 1 and cls._autocommit: warn( 'Definition of bulk_size in {0} forced to 1, ' 'was less than 1.'.format(cls.__name__)) cls._bulk_size = 1 except AttributeError: cls._bulk_size = -1 else: if not cls._autocommit: warn( 'Definition of bulk_size in {0} has no affect because' ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) if 'time' in cls._fields: cls._fields.remove('time') cls._type = namedtuple(cls.__name__, cls._fields + cls._tags + ['time']) return super(SeriesHelper, cls).__new__(cls)
AttributeError
dataset/ETHPy150Open influxdata/influxdb-python/influxdb/helper.py/SeriesHelper.__new__
4,805
def is_series(self): copy = list(self.items) for k, _ in enumerate(copy): try: if not self.is_successor(copy[k], copy[k + 1]): yield False except __HOLE__: continue yield True
IndexError
dataset/ETHPy150Open christabor/MoAL/MOAL/maths/set_theory.py/Set.is_series
4,806
def __init__(self, obj, name=None, pure=False): if name is None: try: name = obj.__name__ + tokenize(obj, pure=pure) except __HOLE__: name = type(obj).__name__ + tokenize(obj, pure=pure) object.__setattr__(self, '_dasks', [{name: obj}]) object.__setattr__(self, 'pure', pure)
AttributeError
dataset/ETHPy150Open dask/dask/dask/delayed.py/DelayedLeaf.__init__
4,807
def scale_image(img_upload, img_max_size): """Crop and scale an image file.""" try: img = Image.open(img_upload) except __HOLE__: return None src_width, src_height = img.size src_ratio = float(src_width) / float(src_height) dst_width, dst_height = img_max_size dst_ratio = float(dst_width) / float(dst_height) if dst_ratio < src_ratio: crop_height = src_height crop_width = crop_height * dst_ratio x_offset = int(float(src_width - crop_width) / 2) y_offset = 0 else: crop_width = src_width crop_height = crop_width / dst_ratio x_offset = 0 y_offset = int(float(src_height - crop_height) / 2) img = img.crop((x_offset, y_offset, x_offset + int(crop_width), y_offset + int(crop_height))) img = img.resize((dst_width, dst_height), Image.ANTIALIAS) # If the mode isn't RGB or RGBA we convert it. If it's not one # of those modes, then we don't know what the alpha channel should # be so we convert it to "RGB". if img.mode not in ("RGB", "RGBA"): img = img.convert("RGB") new_img = StringIO() img.save(new_img, "PNG") img_data = new_img.getvalue() return ContentFile(img_data) # Taken from http://stackoverflow.com/a/4019144
IOError
dataset/ETHPy150Open mozilla/django-badger/badger/models.py/scale_image
4,808
def to_python(self, value): """Convert our string value to JSON after we load it from the DB""" if not value: return dict() try: if (isinstance(value, basestring) or type(value) is unicode): return json.loads(value) except __HOLE__: return dict() return value
ValueError
dataset/ETHPy150Open mozilla/django-badger/badger/models.py/JSONField.to_python
4,809
def bake_obi_image(self, request=None): """Bake the OBI JSON badge award assertion into a copy of the original badge's image, if one exists.""" if request: base_url = request.build_absolute_uri('/') else: base_url = 'http://%s' % (Site.objects.get_current().domain,) if self.badge.image: # Make a duplicate of the badge image self.badge.image.open() img_copy_fh = StringIO(self.badge.image.file.read()) else: # Make a copy of the default badge image img_copy_fh = StringIO(open(DEFAULT_BADGE_IMAGE, 'rb').read()) try: # Try processing the image copy, bail if the image is bad. img = Image.open(img_copy_fh) except IOError: return False # Here's where the baking gets done. JSON representation of the OBI # assertion gets written into the "openbadges" metadata field # see: http://blog.client9.com/2007/08/python-pil-and-png-metadata-take-2.html # see: https://github.com/mozilla/openbadges/blob/development/lib/baker.js # see: https://github.com/mozilla/openbadges/blob/development/controllers/baker.js try: from PIL import PngImagePlugin except __HOLE__: import PngImagePlugin meta = PngImagePlugin.PngInfo() # TODO: Will need this, if we stop doing hosted assertions # assertion = self.as_obi_assertion(request) # meta.add_text('openbadges', json.dumps(assertion)) hosted_assertion_url = '%s%s' % ( base_url, reverse('badger.award_detail_json', args=(self.badge.slug, self.id))) meta.add_text('openbadges', hosted_assertion_url) # And, finally save out the baked image. new_img = StringIO() img.save(new_img, "PNG", pnginfo=meta) img_data = new_img.getvalue() name_before = self.image.name self.image.save('', ContentFile(img_data), False) if (self.image.storage.exists(name_before)): self.image.storage.delete(name_before) # Update the image field with the new image name # NOTE: Can't do a full save(), because this gets called in save() Award.objects.filter(pk=self.pk).update(image=self.image) return True
ImportError
dataset/ETHPy150Open mozilla/django-badger/badger/models.py/Award.bake_obi_image
4,810
def get_value(key): try: value = os.environ[key] except __HOLE__: msg = u"You must define the {} " \ u"environment variable.".format(key) raise Exception(msg) return value
KeyError
dataset/ETHPy150Open tsuru/healthcheck-as-a-service/healthcheck/backends/__init__.py/get_value
4,811
@permission_required("core.manage_shop") def manage_discounts(request): """Dispatches to the first discount or to the add discount method form if there is no discount yet. """ try: discount = Discount.objects.all()[0] except __HOLE__: url = reverse("lfs_manage_no_discounts") else: url = reverse("lfs_manage_discount", kwargs={"id": discount.id}) return HttpResponseRedirect(url)
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/discounts/views.py/manage_discounts
4,812
@permission_required("core.manage_shop") def navigation(request, template_name="manage/discounts/navigation.html"): """Returns the navigation for the discount view. """ try: current_id = int(request.path.split("/")[-1]) except __HOLE__: current_id = "" return render_to_string(template_name, RequestContext(request, { "current_id": current_id, "discounts": Discount.objects.all(), }))
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/discounts/views.py/navigation
4,813
@permission_required("core.manage_shop") @require_POST def delete_discount(request, id): """Deletes discount with passed id. """ try: discount = Discount.objects.get(pk=id) except __HOLE__: pass else: discount.delete() return lfs.core.utils.set_message_cookie( url=reverse("lfs_manage_discounts"), msg=_(u"Discount has been deleted."), )
ObjectDoesNotExist
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/discounts/views.py/delete_discount
4,814
def log_error(request, view, action, errors, exc_info=None): # We only log the first error, send the rest as data; it's simpler this way error_msg = "Error %s: %s" % (action, format_error(errors[0])) log_kwargs = {} if not exc_info: try: exc_info = full_exc_info() except: exc_info = None if exc_info and not isinstance(exc_info, tuple) or not len(exc_info) or not exc_info[0]: exc_info = None if exc_info: log_kwargs["exc_info"] = exc_info extra_data = { 'errors': errors, 'process_id': os.getpid() } try: import psutil, math, time, thread except __HOLE__: pass else: p = psutil.Process(os.getpid()) proc_timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(p.create_time)) try: create_usec = six.text_type(p.create_time - math.floor(p.create_time))[1:5] except: create_usec = '' proc_timestamp += create_usec extra_data['process_create_date'] = proc_timestamp extra_data['thread_id'] = thread.get_ident() if isinstance(errors[0], CropDusterUrlException): urlconf = get_urlconf() resolver = get_resolver(urlconf) extra_data['resolver_data'] = { "regex": resolver.regex, "urlconf_name": resolver.urlconf_name, "default_kwargs": resolver.default_kwargs, "namespace": resolver.namespace, "urlconf_module": resolver.urlconf_module } resolver_reverse_dict = dict( [(force_unicode(k), resolver.reverse_dict[k]) for k in resolver.reverse_dict]) resolver_namespace_dict = dict( [(force_unicode(k), resolver.namespace_dict[k]) for k in resolver.namespace_dict]) extra_data.update({ 'resolver_data': { "regex": resolver.regex, "urlconf_name": resolver.urlconf_name, "default_kwargs": resolver.default_kwargs, "namespace": resolver.namespace, "urlconf_module": resolver.urlconf_module }, 'resolver_reverse_dict': resolver_reverse_dict, 'resolver_namespace_dict': resolver_namespace_dict, 'resolver_app_dict': resolver.app_dict, 'resolver_url_patterns': resolver.url_patterns, 'urlconf': urlconf, 'view': 'cropduster.views.%s' % view, }) raven_kwargs = {'request': request, 'extra': extra_data, 'data': {'message': error_msg}} if raven_client: if exc_info: return raven_client.get_ident( raven_client.captureException(exc_info=exc_info, **raven_kwargs)) else: return raven_client.get_ident( raven_client.captureMessage(error_msg, **raven_kwargs)) else: extra_data.update({ 'request': request, 'url': request.path_info, }) logger.error(error_msg, extra=extra_data, **log_kwargs) return None
ImportError
dataset/ETHPy150Open theatlantic/django-cropduster/cropduster/exceptions.py/log_error
4,815
def is_valid_ip(ip): # stackoverflow.com/a/4017219/1707152 def is_valid_ipv4_address(address): try: socket.inet_pton(socket.AF_INET, address) except __HOLE__: try: socket.inet_aton(address) except socket.error: return False return address.count('.') == 3 except socket.error: return False return True def is_valid_ipv6_address(address): try: socket.inet_pton(socket.AF_INET6, address) except socket.error: # not a valid address return False return True return is_valid_ipv4_address(ip) or is_valid_ipv6_address(ip)
AttributeError
dataset/ETHPy150Open opendns/OpenResolve/resolverapi/util/__init__.py/is_valid_ip
4,816
def testInstallHandler(self): default_handler = signal.getsignal(signal.SIGINT) unittest.installHandler() self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler) try: pid = os.getpid() os.kill(pid, signal.SIGINT) except __HOLE__: self.fail("KeyboardInterrupt not handled") self.assertTrue(unittest.signals._interrupt_handler.called)
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testInstallHandler
4,817
def testInterruptCaught(self): default_handler = signal.getsignal(signal.SIGINT) result = unittest.TestResult() unittest.installHandler() unittest.registerResult(result) self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler) def test(result): pid = os.getpid() os.kill(pid, signal.SIGINT) result.breakCaught = True self.assertTrue(result.shouldStop) try: test(result) except __HOLE__: self.fail("KeyboardInterrupt not handled") self.assertTrue(result.breakCaught)
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testInterruptCaught
4,818
def testSecondInterrupt(self): result = unittest.TestResult() unittest.installHandler() unittest.registerResult(result) def test(result): pid = os.getpid() os.kill(pid, signal.SIGINT) result.breakCaught = True self.assertTrue(result.shouldStop) os.kill(pid, signal.SIGINT) self.fail("Second KeyboardInterrupt not raised") try: test(result) except __HOLE__: pass else: self.fail("Second KeyboardInterrupt not raised") self.assertTrue(result.breakCaught)
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testSecondInterrupt
4,819
def testTwoResults(self): unittest.installHandler() result = unittest.TestResult() unittest.registerResult(result) new_handler = signal.getsignal(signal.SIGINT) result2 = unittest.TestResult() unittest.registerResult(result2) self.assertEqual(signal.getsignal(signal.SIGINT), new_handler) result3 = unittest.TestResult() def test(result): pid = os.getpid() os.kill(pid, signal.SIGINT) try: test(result) except __HOLE__: self.fail("KeyboardInterrupt not handled") self.assertTrue(result.shouldStop) self.assertTrue(result2.shouldStop) self.assertFalse(result3.shouldStop)
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testTwoResults
4,820
def testHandlerReplacedButCalled(self): # If our handler has been replaced (is no longer installed) but is # called by the *new* handler, then it isn't safe to delay the # SIGINT and we should immediately delegate to the default handler unittest.installHandler() handler = signal.getsignal(signal.SIGINT) def new_handler(frame, signum): handler(frame, signum) signal.signal(signal.SIGINT, new_handler) try: pid = os.getpid() os.kill(pid, signal.SIGINT) except __HOLE__: pass else: self.fail("replaced but delegated handler doesn't raise interrupt")
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testHandlerReplacedButCalled
4,821
def testRemoveResult(self): result = unittest.TestResult() unittest.registerResult(result) unittest.installHandler() self.assertTrue(unittest.removeResult(result)) # Should this raise an error instead? self.assertFalse(unittest.removeResult(unittest.TestResult())) try: pid = os.getpid() os.kill(pid, signal.SIGINT) except __HOLE__: pass self.assertFalse(result.shouldStop)
KeyboardInterrupt
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/unittest/test/test_break.py/TestBreak.testRemoveResult
4,822
def call_hdevtools_and_wait(arg_list, filename = None, cabal = None): """ Calls hdevtools with the given arguments. Shows a sublime error message if hdevtools is not available. """ ghc_opts_args = get_ghc_opts_args(filename, cabal = cabal) hdevtools_socket = get_setting_async('hdevtools_socket') source_dir = get_source_dir(filename) if hdevtools_socket: arg_list.append('--socket={0}'.format(hdevtools_socket)) try: exit_code, out, err = call_and_wait(['hdevtools'] + arg_list + ghc_opts_args, cwd = source_dir) if exit_code != 0: raise Exception("hdevtools exited with status %d and stderr: %s" % (exit_code, err)) return crlf2lf(out) except __HOLE__ as e: if e.errno == errno.ENOENT: show_hdevtools_error_and_disable() return None except Exception as e: log('calling to hdevtools fails with {0}'.format(e), log_error) return None
OSError
dataset/ETHPy150Open SublimeHaskell/SublimeHaskell/hdevtools.py/call_hdevtools_and_wait
4,823
def admin(cmds, wait = False, **popen_kwargs): if not hdevtools_enabled(): return None hdevtools_socket = get_setting_async('hdevtools_socket') if hdevtools_socket: cmds.append('--socket={0}'.format(hdevtools_socket)) command = ["hdevtools", "admin"] + cmds try: if wait: (exit_code, stdout, stderr) = call_and_wait(command, **popen_kwargs) if exit_code == 0: return stdout return '' else: call_no_wait(command, **popen_kwargs) return '' except __HOLE__ as e: if e.errno == errno.ENOENT: show_hdevtools_error_and_disable() set_setting_async('enable_hdevtools', False) return None except Exception as e: log('calling to hdevtools fails with {0}'.format(e)) return None
OSError
dataset/ETHPy150Open SublimeHaskell/SublimeHaskell/hdevtools.py/admin
4,824
def resource_from_etree(self, etree, resource_class): """Construct a Resource from an etree Parameters: etree - the etree to parse resource_class - class of Resource object to create The parsing is properly namespace aware but we search just for the elements wanted and leave everything else alone. Will raise an error if there are multiple <loc> or multiple <lastmod> elements. Otherwise, provided there is a <loc> element then will go ahead and extract as much as possible. All errors raised are SitemapParseError with messages intended to help debug problematic sitemap XML. """ loc_elements = etree.findall('{'+SITEMAP_NS+"}loc") if (len(loc_elements)>1): raise SitemapParseError("Multiple <loc> elements while parsing <url> in sitemap"); elif (len(loc_elements)==0): raise SitemapParseError("Missing <loc> element while parsing <url> in sitemap") else: loc = loc_elements[0].text if (loc is None or loc==''): raise SitemapParseError("Bad <loc> element with no content while parsing <url> in sitemap") # must at least have a URI, make this object resource=resource_class(uri=loc) # and hopefully a lastmod datetime (but none is OK) lastmod_elements = etree.findall('{'+SITEMAP_NS+"}lastmod") if (len(lastmod_elements)>1): raise SitemapParseError("Multiple <lastmod> elements while parsing <url> in sitemap"); elif (len(lastmod_elements)==1): resource.lastmod=lastmod_elements[0].text # proceed to look for other resource attributes in an rs:md element md_elements = etree.findall('{'+RS_NS+"}md") if (len(md_elements)>1): raise SitemapParseError("Found multiple (%d) <rs:md> elements for %s", (len(md_elements),loc)) elif (len(md_elements)==1): # have on element, look at attributes md = self.md_from_etree(md_elements[0],context=loc) # simple attributes that map directly to Resource object attributes for att in ('capability','change','length','path','mime_type'): if (att in md): setattr(resource,att,md[att]) # The ResourceSync beta spec lists md5, sha-1 and sha-256 fixity # digest types. Parse and warn of errors ignored. if ('hash' in md): try: resource.hash = md['hash'] except __HOLE__ as e: self.logger.warning("%s in <rs:md> for %s" % (str(e),loc)) # look for rs:ln elements (optional) ln_elements = etree.findall('{'+RS_NS+"}ln") if (len(ln_elements)>0): resource.ln = [] for ln_element in ln_elements: resource.ln.append(self.ln_from_etree(ln_element,loc)) return(resource)
ValueError
dataset/ETHPy150Open resync/resync/resync/sitemap.py/Sitemap.resource_from_etree
4,825
def md_from_etree(self, md_element, context=''): """Parse rs:md attributes returning a dict of the data Parameters: md_element - etree element <rs:md> context - context for error reporting """ md = {} # grab all understood attributes into md dict for att in ('capability','change','hash','length','path','mime_type', 'md_at','md_completed','md_from','md_until'): xml_att = self._xml_att_name(att) val = md_element.attrib.get(xml_att,None) if (val is not None): md[att] = val # capability. Allow this to be missing but do a very simple syntax # check on plausible values if present if ('capability' in md): if (re.match(r"^[\w\-]+$", md['capability']) is None): raise SitemapParseError("Bad capability name '%s' in %s" % (capability,context)) # change should be one of defined values if ('change' in md): if (md['change'] not in ['created','updated','deleted'] ): self.logger.warning("Bad change attribute in <rs:md> for %s" % (context)) # length should be an integer if ('length' in md): try: md['length']=int(md['length']) except __HOLE__ as e: raise SitemapParseError("Invalid length element in <rs:md> for %s" % (context)) return(md)
ValueError
dataset/ETHPy150Open resync/resync/resync/sitemap.py/Sitemap.md_from_etree
4,826
def ln_from_etree(self,ln_element,context=''): """Parse rs:ln element from an etree, returning a dict of the data Parameters: md_element - etree element <rs:md> context - context string for error reporting """ ln = {} # grab all understood attributes into ln dict for att in ('hash','href','length','modified','path','rel','pri','mime_type'): xml_att = self._xml_att_name(att) val = ln_element.attrib.get(xml_att,None) if (val is not None): ln[att] = val # now do some checks and conversions... # href (MANDATORY) if ('href' not in ln): raise SitemapParseError("Missing href in <rs:ln> in %s" % (context)) # rel (MANDATORY) if ('rel' not in ln): raise SitemapParseError("Missing rel in <rs:ln> in %s" % (context)) # length in bytes if ('length' in ln): try: ln['length']=int(ln['length']) except __HOLE__ as e: raise SitemapParseError("Invalid length attribute value in <rs:ln> for %s" % (context)) # pri - priority, must be a number between 1 and 999999 if ('pri' in ln): try: ln['pri']=int(ln['pri']) except ValueError as e: raise SitemapParseError("Invalid pri attribute in <rs:ln> for %s" % (context)) if (ln['pri']<1 or ln['pri']>999999): raise SitemapParseError("Bad pri attribute value in <rs:ln> for %s" % (context)) return(ln) ##### Metadata and link elements #####
ValueError
dataset/ETHPy150Open resync/resync/resync/sitemap.py/Sitemap.ln_from_etree
4,827
def main(): silent = 0 verbose = 0 if sys.argv[1:]: if sys.argv[1] == '-v': verbose = 1 elif sys.argv[1] == '-s': silent = 1 MAGIC = imp.get_magic() if not silent: print 'Using MAGIC word', repr(MAGIC) for dirname in sys.path: try: names = os.listdir(dirname) except os.error: print 'Cannot list directory', repr(dirname) continue if not silent: print 'Checking ', repr(dirname), '...' names.sort() for name in names: if name[-3:] == '.py': name = os.path.join(dirname, name) try: st = os.stat(name) except os.error: print 'Cannot stat', repr(name) continue if verbose: print 'Check', repr(name), '...' name_c = name + 'c' try: f = open(name_c, 'r') except __HOLE__: print 'Cannot open', repr(name_c) continue magic_str = f.read(4) mtime_str = f.read(4) f.close() if magic_str <> MAGIC: print 'Bad MAGIC word in ".pyc" file', print repr(name_c) continue mtime = get_long(mtime_str) if mtime == 0 or mtime == -1: print 'Bad ".pyc" file', repr(name_c) elif mtime <> st[ST_MTIME]: print 'Out-of-date ".pyc" file', print repr(name_c)
IOError
dataset/ETHPy150Open francelabs/datafari/windows/python/Tools/Scripts/checkpyc.py/main
4,828
def find_module(module_name, path=None): """ Returns the filename for the specified module. """ components = module_name.split('.') try: # Look up the first component of the module name (of course it could be # the *only* component). f, filename, description = imp.find_module(components[0], path) # If the module is in a package then go down each level in the package # hierarchy in turn. if len(components) > 0: for component in components[1:]: f,filename,description = imp.find_module(component, [filename]) except __HOLE__: filename = None return filename #### EOF ######################################################################
ImportError
dataset/ETHPy150Open enthought/envisage/envisage/developer/code_browser/enclbr.py/find_module
4,829
def get_object(self, name, **args): try: return self.resources[name].dataobject() except __HOLE__: raise NoSuchObjectError(name)
KeyError
dataset/ETHPy150Open Stiivi/bubbles/bubbles/datapackage.py/DataPackageCollectionStore.get_object
4,830
def get_ip(request): """ Retrieves the remote IP address from the request data. If the user is behind a proxy, they may have a comma-separated list of IP addresses, so we need to account for that. In such a case, only the first IP in the list will be retrieved. Also, some hosts that use a proxy will put the REMOTE_ADDR into HTTP_X_FORWARDED_FOR. This will handle pulling back the IP from the proper place. """ # if neither header contain a value, just use local loopback ip_address = request.META.get('HTTP_X_FORWARDED_FOR', request.META.get('REMOTE_ADDR', '127.0.0.1')) if ip_address: # make sure we have one and only one IP try: ip_address = IP_RE.match(ip_address) if ip_address: ip_address = ip_address.group(0) else: # no IP, probably from some dirty proxy or other device # throw in some bogus IP ip_address = '10.0.0.1' except __HOLE__: pass return ip_address
IndexError
dataset/ETHPy150Open bashu/django-tracking/tracking/utils.py/get_ip
4,831
def u_clean(s): """A strange attempt at cleaning up unicode""" uni = '' try: # try this first uni = str(s).decode('iso-8859-1') except UnicodeDecodeError: try: # try utf-8 next uni = str(s).decode('utf-8') except UnicodeDecodeError: # last resort method... one character at a time (ugh) if s and type(s) in (str, unicode): for c in s: try: uni += unicodedata.normalize('NFKC', unicode(c)) except __HOLE__: uni += '-' return uni.encode('ascii', 'xmlcharrefreplace')
UnicodeDecodeError
dataset/ETHPy150Open bashu/django-tracking/tracking/utils.py/u_clean
4,832
def check_path(filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Check the given path, printing out any warnings detected.""" try: with open(filename, 'U') as f: codestr = f.read() + '\n' except UnicodeError: reporter.unexpected_error(filename, 'problem decoding source') return 1 except __HOLE__: msg = sys.exc_info()[1] reporter.unexpected_error(filename, msg.args[1]) return 1 return check(codestr, filename, reporter, settings_path, **setting_overrides)
IOError
dataset/ETHPy150Open timothycrosley/frosted/frosted/api.py/check_path
4,833
def getStyleAttribute(elem, attr): try: if hasattr(elem.style, 'getProperty'): return elem.style.getProperty(mash_name_for_glib(attr)) return elem.style.getAttribute(attr) except __HOLE__: return getattr(elem.style, attr, None)
AttributeError
dataset/ETHPy150Open anandology/pyjamas/library/gwt/DOM.py/getStyleAttribute
4,834
def get_node_version(self): if not self.project_dir: return self.default_version package_file = os.path.join(self.project_dir, 'package.json') try: package_json = json.load(open(package_file)) except (__HOLE__, ValueError): logger.debug( "cannot find custom node version in package.json, using default" ) else: node_version = package_json.get('engines', {}).get('node', '') if node_version.startswith('=='): return node_version.replace('==', '') return self.default_version
IOError
dataset/ETHPy150Open elbaschid/virtual-node/setup.py/node_build.get_node_version
4,835
def run_npm(self, env_dir): package_file = os.path.join(self.project_dir, 'package.json') try: package = json.load(open(package_file)) except __HOLE__: logger.warning("Could not find 'package.json', ignoring NPM " "dependencies.") return for name, version in package.get('dependencies', {}).items(): # packages are installed globally to make sure that they are # installed in the virtualenv rather than the current directory. # it is also necessary for packages containing scripts, e.g. less dep_name = '%s@%s' % (name, version) self.run_cmd(['npm', 'install', '-g', dep_name], self.project_dir)
IOError
dataset/ETHPy150Open elbaschid/virtual-node/setup.py/node_build.run_npm
4,836
def install_node(self, env_dir, version=None): """ Download source code for node.js, unpack it and install it in virtual environment. """ logger.info( ' * Install node.js (%s' % self.version, extra={'continued': True} ) node_name = 'node-v%s' % (self.version) node_url = self.get_node_src_url(self.version) src_dir = os.path.join(env_dir, 'src') node_src_dir = os.path.join(src_dir, node_name) env_dir = os.path.abspath(env_dir) if not os.path.exists(node_src_dir): os.makedirs(node_src_dir) try: filename, __ = urllib.urlretrieve(node_url) except __HOLE__: raise IOError( "cannot download node source from '%s'" % (node_url,) ) else: logger.info(') ', extra=dict(continued=True)) tarball = tarfile.open(filename, 'r:gz') tarball.extractall(src_dir) tarball.close() logger.info('.', extra=dict(continued=True)) conf_cmd = [ './configure', '--prefix=%s' % (env_dir), ] self.run_cmd(conf_cmd, node_src_dir) logger.info('.', extra=dict(continued=True)) self.run_cmd(['make'], node_src_dir) logger.info('.', extra=dict(continued=True)) self.run_cmd(['make install'], node_src_dir) logger.info('.', extra=dict(continued=True)) self.run_cmd(['rm -rf "%s"' % node_src_dir]) logger.info(' done.')
IOError
dataset/ETHPy150Open elbaschid/virtual-node/setup.py/node_build.install_node
4,837
def _list_all_covered_modules(logger, module_names, modules_exceptions): modules = [] for module_name in module_names: if module_name in modules_exceptions: logger.debug("Module '%s' was excluded", module_name) continue try: module = sys.modules[module_name] except __HOLE__: logger.warn("Module '%s' was not imported by the covered tests", module_name) try: module = __import__(module_name) except SyntaxError as e: logger.warn("Coverage for module '%s' cannot be established - syntax error: %s", module_name, e) continue if module not in modules and hasattr(module, "__file__"): modules.append(module) return modules
KeyError
dataset/ETHPy150Open pybuilder/pybuilder/src/main/python/pybuilder/plugins/python/coverage_plugin.py/_list_all_covered_modules
4,838
def _delete_module(module_name, module): del sys.modules[module_name] try: delattr(module, module_name) except __HOLE__: pass
AttributeError
dataset/ETHPy150Open pybuilder/pybuilder/src/main/python/pybuilder/plugins/python/coverage_plugin.py/_delete_module
4,839
def get_socket(self, force=False): """Get a socket from the pool. Returns a :class:`SocketInfo` object wrapping a connected :class:`socket.socket`, and a bool saying whether the socket was from the pool or freshly created. :Parameters: - `force`: optional boolean, forces a connection to be returned without blocking, even if `max_size` has been reached. """ # We use the pid here to avoid issues with fork / multiprocessing. # See test.test_client:TestClient.test_fork for an example of # what could go wrong otherwise if self.pid != os.getpid(): self.reset() # Have we opened a socket for this request? req_state = self._get_request_state() if req_state not in (NO_SOCKET_YET, NO_REQUEST): # There's a socket for this request, check it and return it checked_sock = self._check(req_state) if checked_sock != req_state: self._set_request_state(checked_sock) checked_sock.last_checkout = time.time() return checked_sock forced = False # We're not in a request, just get any free socket or create one if force: # If we're doing an internal operation, attempt to play nicely with # max_size, but if there is no open "slot" force the connection # and mark it as forced so we don't release the semaphore without # having acquired it for this socket. if not self._socket_semaphore.acquire(False): forced = True elif not self._socket_semaphore.acquire(True, self.wait_queue_timeout): self._raise_wait_queue_timeout() # We've now acquired the semaphore and must release it on error. try: sock_info, from_pool = None, None try: try: # set.pop() isn't atomic in Jython less than 2.7, see # http://bugs.jython.org/issue1854 self.lock.acquire() sock_info, from_pool = self.sockets.pop(), True finally: self.lock.release() except __HOLE__: sock_info, from_pool = self.connect(), False if from_pool: sock_info = self._check(sock_info) sock_info.forced = forced if req_state == NO_SOCKET_YET: # start_request has been called but we haven't assigned a # socket to the request yet. Let's use this socket for this # request until end_request. self._set_request_state(sock_info) except: if not forced: self._socket_semaphore.release() raise sock_info.last_checkout = time.time() return sock_info
KeyError
dataset/ETHPy150Open blynch/CloudMemeBackend/pymongo/pool.py/Pool.get_socket
4,840
def make_list_of_list(txt): def make_num(x): try: return int(x) except ValueError: try: return float(x) except __HOLE__: try: return complex(x) except ValueError: return x return x ut = [] flag = False for rad in [x for x in txt.split("\r\n") if x != ""]: raden=[make_num(x) for x in rad.split("\t")] if str in list(map(type,raden)): flag = True ut.append(raden) return ut, flag
ValueError
dataset/ETHPy150Open Ali-Razmjoo/OWASP-ZSC/module/readline_windows/pyreadline/clipboard/__init__.py/make_list_of_list
4,841
def __init__(self, backend, algorithm, ctx=None): self._algorithm = algorithm self._backend = backend if ctx is None: try: methods = self._backend._hash_mapping[self.algorithm.name] except __HOLE__: raise UnsupportedAlgorithm( "{0} is not a supported hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) ctx = self._backend._ffi.new(methods.ctx) res = methods.hash_init(ctx) assert res == 1 self._ctx = ctx
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cryptography-1.3.1/src/cryptography/hazmat/backends/commoncrypto/hashes.py/_HashContext.__init__
4,842
def main(args): try: opts, args = getopt.getopt(args, "hbrdag", ["hash", "btree", "recno", "dbm", "anydbm", "gdbm"]) except getopt.error: usage() return 1 if len(args) == 0 or len(args) > 2: usage() return 1 elif len(args) == 1: pfile = sys.stdin dbfile = args[0] else: try: pfile = open(args[0], 'rb') except IOError: sys.stderr.write("Unable to open %s\n" % args[0]) return 1 dbfile = args[1] dbopen = None for opt, arg in opts: if opt in ("-h", "--hash"): try: dbopen = bsddb.hashopen except __HOLE__: sys.stderr.write("bsddb module unavailable.\n") return 1 elif opt in ("-b", "--btree"): try: dbopen = bsddb.btopen except AttributeError: sys.stderr.write("bsddb module unavailable.\n") return 1 elif opt in ("-r", "--recno"): try: dbopen = bsddb.rnopen except AttributeError: sys.stderr.write("bsddb module unavailable.\n") return 1 elif opt in ("-a", "--anydbm"): try: dbopen = anydbm.open except AttributeError: sys.stderr.write("anydbm module unavailable.\n") return 1 elif opt in ("-g", "--gdbm"): try: dbopen = gdbm.open except AttributeError: sys.stderr.write("gdbm module unavailable.\n") return 1 elif opt in ("-d", "--dbm"): try: dbopen = dbm.open except AttributeError: sys.stderr.write("dbm module unavailable.\n") return 1 if dbopen is None: if bsddb is None: sys.stderr.write("bsddb module unavailable - ") sys.stderr.write("must specify dbtype.\n") return 1 else: dbopen = bsddb.hashopen try: db = dbopen(dbfile, 'c') except bsddb.error: sys.stderr.write("Unable to open %s. " % dbfile) sys.stderr.write("Check for format or version mismatch.\n") return 1 else: for k in db.keys(): del db[k] while 1: try: (key, val) = pickle.load(pfile) except EOFError: break db[key] = val db.close() pfile.close() return 0
AttributeError
dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Tools/Scripts/pickle2db.py/main
4,843
def _(module): ''' Get inspectlib module for the lazy loader. :param module: :return: ''' mod = None # pylint: disable=E0598 try: # importlib is in Python 2.7+ and 3+ import importlib mod = importlib.import_module("salt.modules.inspectlib.{0}".format(module)) except __HOLE__ as err: # No importlib around (2.6) mod = getattr(__import__("salt.modules.inspectlib", globals(), locals(), fromlist=[str(module)]), module) # pylint: enable=E0598 mod.__grains__ = __grains__ mod.__pillar__ = __pillar__ mod.__salt__ = __salt__ return mod
ImportError
dataset/ETHPy150Open saltstack/salt/salt/modules/node.py/_
4,844
def _create_hls_streams(self, url): try: streams = HLSStream.parse_variant_playlist(self.session, url) return streams.items() except __HOLE__ as err: self.logger.warning("Failed to extract HLS streams: {0}", err)
IOError
dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/plugins/streamlive.py/StreamLive._create_hls_streams
4,845
def _fetch(self, count): # Try to fill ``self.items`` with at least ``count`` objects. have = len(self.items) while self.iterator is not None and have < count: try: item = next(self.iterator) except StopIteration: self.iterator = None break except (__HOLE__, SystemExit): raise except Exception as exc: have += 1 self._append(exc) self.iterator = None break else: have += 1 self._append(item)
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridTable._fetch
4,846
def sort(self, key, reverse=False): """ Sort the current list of items using the key function ``key``. If ``reverse`` is true the sort order is reversed. """ row = self.GetGridCursorRow() col = self.GetGridCursorCol() curitem = self.table.items[row] # Remember where the cursor is now # Sort items def realkey(item): try: return key(item) except (KeyboardInterrupt, __HOLE__): raise except Exception: return None try: self.table.items = ipipe.deque(sorted(self.table.items, key=realkey, reverse=reverse)) except TypeError as exc: self.error_output("Exception encountered: %s" % exc) return # Find out where the object under the cursor went for (i, item) in enumerate(self.table.items): if item is curitem: self.SetGridCursor(i,col) self.MakeCellVisible(i,col) self.Refresh()
SystemExit
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.sort
4,847
def sortattrasc(self): """ Sort in ascending order; sorting criteria is the current attribute """ col = self.GetGridCursorCol() attr = self.table._displayattrs[col] frame = self.GetParent().GetParent().GetParent() if attr is ipipe.noitem: self.error_output("no column under cursor") return frame.SetStatusText("sort by %s (ascending)" % attr.name()) def key(item): try: return attr.value(item) except (__HOLE__, SystemExit): raise except Exception: return None self.sort(key)
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.sortattrasc
4,848
def sortattrdesc(self): """ Sort in descending order; sorting criteria is the current attribute """ col = self.GetGridCursorCol() attr = self.table._displayattrs[col] frame = self.GetParent().GetParent().GetParent() if attr is ipipe.noitem: self.error_output("no column under cursor") return frame.SetStatusText("sort by %s (descending)" % attr.name()) def key(item): try: return attr.value(item) except (__HOLE__, SystemExit): raise except Exception: return None self.sort(key, reverse=True)
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.sortattrdesc
4,849
def _getvalue(self, row, col): """ Gets the text which is displayed at ``(row, col)`` """ try: value = self.table._displayattrs[col].value(self.table.items[row]) (align, width, text) = ipipe.xformat(value, "cell", self.maxchars) except __HOLE__: raise IndexError except Exception as exc: (align, width, text) = ipipe.xformat(exc, "cell", self.maxchars) return text
IndexError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid._getvalue
4,850
def searchexpression(self, searchexp, startrow=None, search_forward=True ): """ Find by expression """ frame = self.GetParent().GetParent().GetParent() if searchexp: if search_forward: if not startrow: row = self.GetGridCursorRow()+1 else: row = startrow + 1 while True: try: foo = self.table.GetValue(row, 0) item = self.table.items[row] try: globals = ipipe.getglobals(None) if eval(searchexp, globals, ipipe.AttrNamespace(item)): self.SetGridCursor(row, 0) # found something self.MakeCellVisible(row, 0) break except (KeyboardInterrupt, SystemExit): raise except Exception as exc: frame.SetStatusText(str(exc)) wx.Bell() break # break on error except IndexError: return row += 1 else: if not startrow: row = self.GetGridCursorRow() - 1 else: row = startrow - 1 while True: try: foo = self.table.GetValue(row, 0) item = self.table.items[row] try: globals = ipipe.getglobals(None) if eval(searchexp, globals, ipipe.AttrNamespace(item)): self.SetGridCursor(row, 0) # found something self.MakeCellVisible(row, 0) break except (__HOLE__, SystemExit): raise except Exception as exc: frame.SetStatusText(str(exc)) wx.Bell() break # break on error except IndexError: return row -= 1
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.searchexpression
4,851
def search(self, searchtext, startrow=None, startcol=None, search_forward=True): """ search for ``searchtext``, starting in ``(startrow, startcol)``; if ``search_forward`` is true the direction is "forward" """ searchtext = searchtext.lower() if search_forward: if startrow is not None and startcol is not None: row = startrow else: startcol = self.GetGridCursorCol() + 1 row = self.GetGridCursorRow() if startcol >= self.GetNumberCols(): startcol = 0 row += 1 while True: for col in range(startcol, self.table.GetNumberCols()): try: foo = self.table.GetValue(row, col) text = self._getvalue(row, col) if searchtext in text.string().lower(): self.SetGridCursor(row, col) self.MakeCellVisible(row, col) return except __HOLE__: return startcol = 0 row += 1 else: if startrow is not None and startcol is not None: row = startrow else: startcol = self.GetGridCursorCol() - 1 row = self.GetGridCursorRow() if startcol < 0: startcol = self.GetNumberCols() - 1 row -= 1 while True: for col in range(startcol, -1, -1): try: foo = self.table.GetValue(row, col) text = self._getvalue(row, col) if searchtext in text.string().lower(): self.SetGridCursor(row, col) self.MakeCellVisible(row, col) return except IndexError: return startcol = self.table.GetNumberCols()-1 row -= 1
IndexError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.search
4,852
def key_pressed(self, event): """ Maps pressed keys to functions """ frame = self.GetParent().GetParent().GetParent() frame.SetStatusText("") sh = event.ShiftDown() ctrl = event.ControlDown() keycode = event.GetKeyCode() if keycode == ord("P"): row = self.GetGridCursorRow() if sh: col = self.GetGridCursorCol() self.pickattr(row, col) else: self.pick(row) elif keycode == ord("M"): if ctrl: col = self.GetGridCursorCol() self.pickrowsattr(sorted(self.current_selection), col) else: self.pickrows(sorted(self.current_selection)) elif keycode in (wx.WXK_BACK, wx.WXK_DELETE, ord("X")) and not (ctrl or sh): self.delete_current_notebook() elif keycode in (ord("E"), ord("\r")): row = self.GetGridCursorRow() if sh: col = self.GetGridCursorCol() self.enterattr(row, col) else: self.enter(row) elif keycode == ord("E") and ctrl: row = self.GetGridCursorRow() self.SetGridCursor(row, self.GetNumberCols()-1) elif keycode == wx.WXK_HOME or (keycode == ord("A") and ctrl): row = self.GetGridCursorRow() self.SetGridCursor(row, 0) elif keycode == ord("C") and sh: col = self.GetGridCursorCol() attr = self.table._displayattrs[col] result = [] for i in range(self.GetNumberRows()): result.append(self.table._displayattrs[col].value(self.table.items[i])) self.quit(result) elif keycode in (wx.WXK_ESCAPE, ord("Q")) and not (ctrl or sh): self.quit() elif keycode == ord("<"): row = self.GetGridCursorRow() col = self.GetGridCursorCol() if not event.ShiftDown(): newcol = col - 1 if newcol >= 0: self.SetGridCursor(row, col - 1) else: newcol = col + 1 if newcol < self.GetNumberCols(): self.SetGridCursor(row, col + 1) elif keycode == ord("D"): col = self.GetGridCursorCol() row = self.GetGridCursorRow() if not sh: self.detail(row, col) else: self.detail_attr(row, col) elif keycode == ord("F") and ctrl: if sh: frame.enter_searchexpression(event) else: frame.enter_searchtext(event) elif keycode == wx.WXK_F3: if sh: frame.find_previous(event) else: frame.find_next(event) elif keycode == ord("V"): if sh: self.sortattrdesc() else: self.sortattrasc() elif keycode == wx.WXK_DOWN: row = self.GetGridCursorRow() try: item = self.table.items[row+1] except __HOLE__: item = self.table.items[row] self.set_footer(item) event.Skip() elif keycode == wx.WXK_UP: row = self.GetGridCursorRow() if row >= 1: item = self.table.items[row-1] else: item = self.table.items[row] self.set_footer(item) event.Skip() elif keycode == wx.WXK_RIGHT: row = self.GetGridCursorRow() item = self.table.items[row] self.set_footer(item) event.Skip() elif keycode == wx.WXK_LEFT: row = self.GetGridCursorRow() item = self.table.items[row] self.set_footer(item) event.Skip() elif keycode == ord("R") or keycode == wx.WXK_F5: self.table.refresh_content(event) elif keycode == ord("I"): row = self.GetGridCursorRow() if not sh: self.pickinput(row) else: col = self.GetGridCursorCol() self.pickinputattr(row, col) else: event.Skip()
IndexError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.key_pressed
4,853
def _doenter(self, value, *attrs): """ "enter" a special item resulting in a new notebook tab """ panel = self.GetParent() nb = panel.GetParent() frame = nb.GetParent() current = nb.GetSelection() count = nb.GetPageCount() try: # if we want to enter something non-iterable, e.g. a function if current + 1 == count and value is not self.input: # we have an event in the last tab frame._add_notebook(value, *attrs) elif value != self.input: # we have to delete all tabs newer than [panel] first for i in range(count-1, current, -1): # some tabs don't close if we don't close in *reverse* order nb.DeletePage(i) frame._add_notebook(value) except __HOLE__ as exc: if exc.__class__.__module__ == "exceptions": msg = "%s: %s" % (exc.__class__.__name__, exc) else: msg = "%s.%s: %s" % (exc.__class__.__module__, exc.__class__.__name__, exc) frame.SetStatusText(msg)
TypeError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid._doenter
4,854
def pickrowsattr(self, rows, col): """" pick one column from multiple rows """ values = [] try: attr = self.table._displayattrs[col] for row in rows: try: values.append(attr.value(self.table.items[row])) except (SystemExit, __HOLE__): raise except Exception: raise #pass except Exception as exc: self.error_output(str(exc)) else: self.quit(values)
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridGrid.pickrowsattr
4,855
def refresh_interval(self, event): table = self.notebook.GetPage(self.notebook.GetSelection()).grid.table dlg = wx.TextEntryDialog(self, "Enter refresh interval (milliseconds):", "Refresh timer:", defaultValue=str(self.refresh_interval)) if dlg.ShowModal() == wx.ID_OK: try: milliseconds = int(dlg.GetValue()) except __HOLE__ as exc: self.SetStatusText(str(exc)) else: table.timer.Start(milliseconds=milliseconds, oneShot=False) self.SetStatusText("Refresh timer set to %s ms" % milliseconds) self.SetStatusText("Refresh interval: %s ms" % milliseconds, 1) self.refresh_interval = milliseconds dlg.Destroy()
ValueError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/igrid.py/IGridFrame.refresh_interval
4,856
def get_user_names(self, fullname='', first_name='', last_name=''): # Avoid None values fullname = fullname or '' first_name = first_name or '' last_name = last_name or '' if fullname and not (first_name or last_name): try: first_name, last_name = fullname.split(' ', 1) except __HOLE__: first_name = first_name or fullname or '' last_name = last_name or '' fullname = fullname or ' '.join((first_name, last_name)) return fullname.strip(), first_name.strip(), last_name.strip()
ValueError
dataset/ETHPy150Open omab/python-social-auth/social/backends/base.py/BaseAuth.get_user_names
4,857
def merge(args): """ %prog merge map1 map2 map3 ... Convert csv maps to bed format. Each input map is csv formatted, for example: ScaffoldID,ScaffoldPosition,LinkageGroup,GeneticPosition scaffold_2707,11508,1,0 scaffold_2707,11525,1,1.2 scaffold_759,81336,1,9.7 """ p = OptionParser(merge.__doc__) p.add_option("-w", "--weightsfile", default="weights.txt", help="Write weights to file") p.set_outfile("out.bed") opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) maps = args outfile = opts.outfile fp = must_open(maps) b = Bed() mapnames = set() for row in fp: mapname = filename_to_mapname(fp.filename()) mapnames.add(mapname) try: m = CSVMapLine(row, mapname=mapname) if m.cm < 0: logging.error("Ignore marker with negative genetic distance") print >> sys.stderr, row.strip() else: b.append(BedLine(m.bedline)) except (__HOLE__, ValueError): # header or mal-formed line continue b.print_to_file(filename=outfile, sorted=True) logging.debug("A total of {0} markers written to `{1}`.".\ format(len(b), outfile)) assert len(maps) == len(mapnames), "You have a collision in map names" write_weightsfile(mapnames, weightsfile=opts.weightsfile)
IndexError
dataset/ETHPy150Open tanghaibao/jcvi/assembly/allmaps.py/merge
4,858
def mergebed(args): """ %prog mergebed map1.bed map2.bed map3.bed ... Combine bed maps to bed format, adding the map name. """ p = OptionParser(mergebed.__doc__) p.add_option("-w", "--weightsfile", default="weights.txt", help="Write weights to file") p.set_outfile("out.bed") opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) maps = args outfile = opts.outfile fp = must_open(maps) b = Bed() mapnames = set() for row in fp: mapname = filename_to_mapname(fp.filename()) mapnames.add(mapname) try: m = BedLine(row) m.accn = "{0}-{1}".format(mapname, m.accn) m.extra = ["{0}:{1}".format(m.seqid, m.start)] b.append(m) except (IndexError, __HOLE__): # header or mal-formed line continue b.print_to_file(filename=outfile, sorted=True) logging.debug("A total of {0} markers written to `{1}`.".\ format(len(b), outfile)) assert len(maps) == len(mapnames), "You have a collision in map names" write_weightsfile(mapnames, weightsfile=opts.weightsfile)
ValueError
dataset/ETHPy150Open tanghaibao/jcvi/assembly/allmaps.py/mergebed
4,859
@value.setter def value(self, value): if self._invalid: raise ValueError( 'The value of invalid/unparseable cards cannot set. Either ' 'delete this card from the header or replace it.') if value is None: value = '' oldvalue = self._value if oldvalue is None: oldvalue = '' if not isinstance(value, string_types + integer_types + (float, complex, bool, Undefined, np.floating, np.integer, np.complexfloating, np.bool_)): raise ValueError('Illegal value: %r.' % value) if isinstance(value, float) and (np.isnan(value) or np.isinf(value)): raise ValueError( "Floating point %r values are not allowed in FITS headers." % value) elif isinstance(value, text_type): m = self._ascii_text_re.match(value) if not m: raise ValueError( 'FITS header values must contain standard printable ASCII ' 'characters; %r contains characters not representable in ' 'ASCII or non-printable characters.' % value) elif isinstance(value, binary_type): # Allow str, but only if they can be decoded to ASCII text; note # this is not even allowed on Python 3 since the `bytes` type is # not included in `six.string_types`. Presently we simply don't # allow bytes to be assigned to headers, as doing so would too # easily mask potential user error valid = True try: text_value = value.decode('ascii') except UnicodeDecodeError: valid = False else: # Check against the printable characters regexp as well m = self._ascii_text_re.match(text_value) valid = m is not None if not valid: raise ValueError( 'FITS header values must contain standard printable ASCII ' 'characters; %r contains characters/bytes that do not ' 'represent printable characters in ASCII.' % value) elif isinstance(value, np.bool_): value = bool(value) if (pyfits.STRIP_HEADER_WHITESPACE and (isinstance(oldvalue, string_types) and isinstance(value, string_types))): # Ignore extra whitespace when comparing the new value to the old different = oldvalue.rstrip() != value.rstrip() elif isinstance(oldvalue, bool) or isinstance(value, bool): different = oldvalue is not value else: different = (oldvalue != value or not isinstance(value, type(oldvalue))) if different: self._value = value self._rawvalue = None self._modified = True self._valuestring = None self._valuemodified = True if self.field_specifier: try: self._value = _int_or_float(self._value) except __HOLE__: raise ValueError('value %s is not a float' % self._value)
ValueError
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/card.py/Card.value
4,860
def _split(self): """ Split the card image between the keyword and the rest of the card. """ if self._image is not None: # If we already have a card image, don't try to rebuild a new card # image, which self.image would do image = self._image else: image = self.image if self.keyword in self._commentary_keywords.union(['CONTINUE']): keyword, valuecomment = image.split(' ', 1) else: try: delim_index = image.index(self._value_indicator) except __HOLE__: delim_index = None # The equal sign may not be any higher than column 10; anything # past that must be considered part of the card value if delim_index is None: keyword = image[:KEYWORD_LENGTH] valuecomment = image[KEYWORD_LENGTH:] elif delim_index > 10 and image[:9] != 'HIERARCH ': keyword = image[:8] valuecomment = image[8:] else: keyword, valuecomment = image.split(self._value_indicator, 1) return keyword.strip(), valuecomment.strip()
ValueError
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/card.py/Card._split
4,861
def _fix_value(self): """Fix the card image for fixable non-standard compliance.""" value = None keyword, valuecomment = self._split() m = self._value_NFSC_RE.match(valuecomment) # for the unparsable case if m is None: try: value, comment = valuecomment.split('/', 1) self.value = value.strip() self.comment = comment.strip() except (__HOLE__, IndexError): self.value = valuecomment self._valuestring = self._value return elif m.group('numr') is not None: numr = self._number_NFSC_RE.match(m.group('numr')) value = translate(numr.group('digt'), FIX_FP_TABLE, ' ') if numr.group('sign') is not None: value = numr.group('sign') + value elif m.group('cplx') is not None: real = self._number_NFSC_RE.match(m.group('real')) rdigt = translate(real.group('digt'), FIX_FP_TABLE, ' ') if real.group('sign') is not None: rdigt = real.group('sign') + rdigt imag = self._number_NFSC_RE.match(m.group('imag')) idigt = translate(imag.group('digt'), FIX_FP_TABLE, ' ') if imag.group('sign') is not None: idigt = imag.group('sign') + idigt value = '(%s, %s)' % (rdigt, idigt) self._valuestring = value # The value itself has not been modified, but its serialized # representation (as stored in self._valuestring) has been changed, so # still set this card as having been modified (see ticket #137) self._modified = True
ValueError
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/card.py/Card._fix_value
4,862
def _int_or_float(s): """ Converts an a string to an int if possible, or to a float. If the string is neither a string or a float a value error is raised. """ if isinstance(s, float): # Already a float so just pass through return s try: return int(s) except (ValueError, __HOLE__): try: return float(s) except (ValueError, TypeError) as exc: raise ValueError(*exc.args)
TypeError
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/card.py/_int_or_float
4,863
def get_page(self, suffix): """ A function which will be monkeypatched onto the request to get the current integer representing the current page. """ try: return int(self.GET['page%s' % suffix]) except (KeyError, ValueError, __HOLE__): return 1
TypeError
dataset/ETHPy150Open amarandon/smeuhsocial/apps/pagination/middleware.py/get_page
4,864
def paginate(request, queryset, results_per_page=20): paginator = Paginator(queryset, results_per_page) try: page = paginator.page(int(request.GET.get('page', 1))) except InvalidPage: raise Http404("Sorry, that page of results does not exist.") except __HOLE__: raise PermissionDenied() return page, paginator
ValueError
dataset/ETHPy150Open mozilla/source/source/base/utils.py/paginate
4,865
def _extract_metrics(self, results, metrics, tag_by, wmi, tag_queries, constant_tags): if len(results) > 1 and tag_by is None: raise Exception('WMI query returned multiple rows but no `tag_by` value was given. ' 'metrics=%s' % metrics) for res in results: tags = [] # include any constant tags... if constant_tags: tags.extend(constant_tags) # if tag_queries is specified then get attributes from other classes and use as a tags if tag_queries: for query in tag_queries: link_source_property = int(getattr(res, query[0])) target_class = query[1] link_target_class_property = query[2] target_property = query[3] link_results = \ wmi.query("SELECT {0} FROM {1} WHERE {2} = {3}" .format(target_property, target_class, link_target_class_property, link_source_property)) if len(link_results) != 1: self.log.warning("Failed to find {0} for {1} {2}. No metrics gathered" .format(target_class, link_target_class_property, link_source_property)) continue link_value = str(getattr(link_results[0], target_property)).lower() tags.append("{0}:{1}".format(target_property.lower(), "_".join(link_value.split()))) # Grab the tag from the result if there's a `tag_by` value (e.g.: "name:jenkins") # Strip any #instance off the value when `tag_queries` is set (gives us unique tags) if tag_by: tag_value = str(getattr(res, tag_by)).lower() if tag_queries and tag_value.find("#") > 0: tag_value = tag_value[:tag_value.find("#")] tags.append('%s:%s' % (tag_by.lower(), tag_value)) if len(tags) == 0: tags = None for wmi_property, name, mtype in metrics: if wmi_property == UP_METRIC: # Special-case metric will just submit 1 for every value # returned in the result. val = 1 elif getattr(res, wmi_property): val = float(getattr(res, wmi_property)) else: self.log.warning("When extracting metrics with wmi, found a null value" " for property '{0}'. Metric type of property is {1}." .format(wmi_property, mtype)) continue # Submit the metric to Datadog try: func = getattr(self, mtype) except __HOLE__: raise Exception('Invalid metric type: {0}'.format(mtype)) func(name, val, tags=tags)
AttributeError
dataset/ETHPy150Open serverdensity/sd-agent/checks.d/wmi_check.py/WMICheck._extract_metrics
4,866
def __hash_new(name, string=''): """new(name, string='') - Return a new hashing object using the named algorithm; optionally initialized with a string. """ try: return _hashlib.new(name, string) except __HOLE__: # If the _hashlib module (OpenSSL) doesn't support the named # hash, try using our builtin implementations. # This allows for SHA224/256 and SHA384/512 support even though # the OpenSSL library prior to 0.9.8 doesn't provide them. return __get_builtin_constructor(name)(string)
ValueError
dataset/ETHPy150Open babble/babble/include/jython/Lib/hashlib.py/__hash_new
4,867
def save(description, branchname, bugnumber, gitflow=False): try: data = json.load(open(os.path.expanduser(config.SAVE_FILE))) except __HOLE__: data = {} repo_name = get_repo_name() data['%s:%s' % (repo_name, branchname)] = { 'description': description, 'bugnumber': bugnumber, 'gitflow': gitflow, 'date': datetime.datetime.now().isoformat() } json.dump(data, open(os.path.expanduser(config.SAVE_FILE), 'w'), indent=2)
IOError
dataset/ETHPy150Open peterbe/bgg/bgg/lib/start.py/save
4,868
@register.filter def best_selling_products_list(count): """Get a list of best selling products""" try: ct = int(count) except __HOLE__: ct = config_value('PRODUCT','NUM_PAGINATED') return bestsellers(ct)
ValueError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/product/templatetags/satchmo_product.py/best_selling_products_list
4,869
@register.filter def recent_products_list(count): """Get a list of recent products""" try: ct = int(count) except __HOLE__: ct = config_value('PRODUCT','NUM_PAGINATED') query = Product.objects.recent_by_site() return query[:ct]
ValueError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/product/templatetags/satchmo_product.py/recent_products_list
4,870
def get_tokens_unprocessed(self, text): """ Since ERB doesn't allow "<%" and other tags inside of ruby blocks we have to use a split approach here that fails for that too. """ tokens = self._block_re.split(text) tokens.reverse() state = idx = 0 try: while True: # text if state == 0: val = tokens.pop() yield idx, Other, val idx += len(val) state = 1 # block starts elif state == 1: tag = tokens.pop() # literals if tag in ('<%%', '%%>'): yield idx, Other, tag idx += 3 state = 0 # comment elif tag == '<%#': yield idx, Comment.Preproc, tag val = tokens.pop() yield idx + 3, Comment, val idx += 3 + len(val) state = 2 # blocks or output elif tag in ('<%', '<%=', '<%-'): yield idx, Comment.Preproc, tag idx += len(tag) data = tokens.pop() r_idx = 0 for r_idx, r_token, r_value in \ self.ruby_lexer.get_tokens_unprocessed(data): yield r_idx + idx, r_token, r_value idx += len(data) state = 2 elif tag in ('%>', '-%>'): yield idx, Error, tag idx += len(tag) state = 0 # % raw ruby statements else: yield idx, Comment.Preproc, tag[0] r_idx = 0 for r_idx, r_token, r_value in \ self.ruby_lexer.get_tokens_unprocessed(tag[1:]): yield idx + 1 + r_idx, r_token, r_value idx += len(tag) state = 0 # block ends elif state == 2: tag = tokens.pop() if tag not in ('%>', '-%>'): yield idx, Other, tag else: yield idx, Comment.Preproc, tag idx += len(tag) state = 0 except __HOLE__: return
IndexError
dataset/ETHPy150Open adieu/allbuttonspressed/pygments/lexers/templates.py/ErbLexer.get_tokens_unprocessed
4,871
def spider(init, max=-1, ignore_qs=False, post_func=None, excluded_func=None, hosts=None): """ Spider a request by following some links. init - The initial request(s) max - The maximum of request to execute post_func - A hook to be executed after each new page fetched hosts - A lists of authorised hosts to spider on. By default only the hostname of r_init is allowed. excluded_func - A predicate that must indicates if a Request should be executed. """ nb = 0 checked = [] if isinstance(init, Request): q = deque([init, ]) hs = [ init.hostname, ] elif isinstance(init, RequestSet): q = deque(init) hs = list(set(init.extract("hostname"))) else: raise TypeError("init must be a Request or a RequestSet") if hosts: hs += hosts try: while nb != max and q: to_add = [] r = q.popleft() print str(len(checked)) + "/" + str(len(q)), clear_line() if not r.response: r() if r.response.content_type: if re.match(r'text/html', r.response.content_type): to_add += _follow_redirect(r) to_add += _get_links(r) else: print "\nIgnoring", r.response.content_type checked.append(r) if post_func: post_func(r) for nr in to_add: if nr.hostname not in hs: continue if excluded_func and excluded_func(nr): continue if not ignore_qs and any(nr == rc for rc in checked + list(q)): continue if ignore_qs and any(nr.similar(rc) for rc in checked + list(q)): continue q.append(nr) nb += 1 except __HOLE__: print str(len(checked)) + "/" + str(len(q)) return RequestSet(checked)
KeyboardInterrupt
dataset/ETHPy150Open tweksteen/burst/burst/spider.py/spider
4,872
def test_package_import__semantics(self): # Generate a couple of broken modules to try importing. # ...try loading the module when there's a SyntaxError self.rewrite_file('for') try: __import__(self.module_name) except SyntaxError: pass else: raise RuntimeError, 'Failed to induce SyntaxError' self.assertTrue(self.module_name not in sys.modules) self.assertFalse(hasattr(sys.modules[self.package_name], 'foo')) # ...make up a variable name that isn't bound in __builtins__ import __builtin__ var = 'a' while var in dir(__builtin__): var += random.choose(string.letters) # ...make a module that just contains that self.rewrite_file(var) try: __import__(self.module_name) except __HOLE__: pass else: raise RuntimeError, 'Failed to induce NameError.' # ...now change the module so that the NameError doesn't # happen self.rewrite_file('%s = 1' % var) module = __import__(self.module_name).foo self.assertEqual(getattr(module, var), 1)
NameError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_pkgimport.py/TestImport.test_package_import__semantics
4,873
def execute(cmd, cmd_timeout, sigterm_timeout, sigkill_timeout, proc_poll_interval): start_time = time.time() returncode = -1 stdout = '' stderr = '' try: proc = subprocess.Popen(u' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) except Exception: print >> sys.stderr, u"Failed to execute %s" % (repr(cmd)) raise try: returncode = poll_proc(proc, proc_poll_interval, cmd_timeout) stdout, stderr = proc.communicate() duration = time.time() - start_time except Timeout: duration = time.time() - start_time try: proc.terminate() sigterm_start = time.time() try: print >> sys.stderr, "Command timed out after %.2fs, killing with SIGTERM" % (time.time() - start_time) poll_proc(proc, proc_poll_interval, sigterm_timeout) returncode = Timeout except Timeout: print >> sys.stderr, "SIGTERM timeout failed after %.2fs, killing with SIGKILL" % (time.time() - sigterm_start) proc.kill() poll_proc(proc, proc_poll_interval, sigkill_timeout) returncode = Timeout except __HOLE__ as e: # Ignore OSError 3: no process found. if e.errno != 3: raise return returncode, stdout, stderr, duration
OSError
dataset/ETHPy150Open DataDog/dogapi/src/dogshell/wrap.py/execute
4,874
def render(self, context): try: obj = template.resolve_variable(self.object_name, context) template_name = '%s.%s.html' % (obj._meta.app_label, obj._meta.module_name) template_list = [ '%s/%s' % (self.template_dir, template_name), '%s/default.html' % self.template_dir ] context['object'] = obj return render_to_string(template_list, context) except __HOLE__: if (type(obj) in (int, unicode, str)): return obj return '' except template.VariableDoesNotExist: return ''
AttributeError
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tools/templatetags/objectutils.py/RenderTemplateNode.render
4,875
def process_complex_get(req_dict): mime_type = "application/json" # Parse out params into single dict-GET data not in body param_dict = {} try: param_dict = req_dict['body'] except __HOLE__: pass # no params in the body param_dict.update(req_dict['params']) format = param_dict['format'] # Set language if one pull from req_dict since language is from a header, not an arg language = None if 'headers' in req_dict and ('format' in param_dict and param_dict['format'] == "canonical"): if 'language' in req_dict['headers']: language = req_dict['headers']['language'] else: language = settings.LANGUAGE_CODE # If auth is in req dict, add it to param dict if 'auth' in req_dict: param_dict['auth'] = req_dict['auth'] # Get limit if one limit = None if 'params' in req_dict and 'limit' in req_dict['params']: limit = int(req_dict['params']['limit']) elif 'body' in req_dict and 'limit' in req_dict['body']: limit = int(req_dict['body']['limit']) # See if attachments should be included try: attachments = req_dict['params']['attachments'] except Exception: attachments = False # Create returned stmt list from the req dict stmt_result = complex_get(param_dict, limit, language, format, attachments) # Get the length of the response - make sure in string format to count every character if isinstance(stmt_result, dict): content_length = len(json.dumps(stmt_result)) else: content_length = len(stmt_result) # If attachments=True in req_dict then include the attachment payload and return different mime type if attachments: stmt_result, mime_type, content_length = build_response(stmt_result) resp = HttpResponse(stmt_result, content_type=mime_type, status=200) # Else attachments are false for the complex get so just dump the stmt_result else: if isinstance(stmt_result, dict): stmt_result = json.dumps(stmt_result) resp = HttpResponse(stmt_result, content_type=mime_type, status=200) return resp, content_length
KeyError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_process.py/process_complex_get
4,876
def build_response(stmt_result): sha2s = [] mime_type = "application/json" if isinstance(stmt_result, dict): statements = stmt_result['statements'] else: statements = json.loads(stmt_result)['statements'] # Iterate through each attachment in each statement for stmt in statements: if 'attachments' in stmt: st_atts = Statement.objects.get(statement_id=stmt['id']).stmt_attachments if st_atts: for att in st_atts.all(): if att.payload: sha2s.append((att.canonical_data['sha2'], att.payload, att.canonical_data['contentType'])) # If attachments have payloads if sha2s: # Create multipart message and attach json message to it string_list =[] line_feed = "\r\n" boundary = "======ADL_LRS======" string_list.append(line_feed + "--" + boundary + line_feed) string_list.append("Content-Type:application/json" + line_feed + line_feed) if isinstance(stmt_result, dict): string_list.append(json.dumps(stmt_result) + line_feed) else: string_list.append(stmt_result + line_feed) for sha2 in sha2s: string_list.append("--" + boundary + line_feed) string_list.append("Content-Type:%s" % str(sha2[2]) + line_feed) string_list.append("Content-Transfer-Encoding:binary" + line_feed) string_list.append("X-Experience-API-Hash:" + str(sha2[0]) + line_feed + line_feed) chunks = [] try: # Default chunk size is 64kb for chunk in sha2[1].chunks(): chunks.append(chunk) except __HOLE__: raise OSError(2, "No such file or directory", sha2[1].name.split("/")[1]) string_list.append("".join(chunks) + line_feed) string_list.append("--" + boundary + "--") mime_type = "multipart/mixed; boundary=" + boundary attachment_body = "".join([str(s) for s in string_list]) return attachment_body, mime_type, len(attachment_body) # Has attachments but no payloads so just dump the stmt_result else: if isinstance(stmt_result, dict): res = json.dumps(stmt_result) return res, mime_type, len(res) else: return stmt_result, mime_type, len(stmt_result)
OSError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_process.py/build_response
4,877
def activity_profile_get(req_dict): # Instantiate ActivityProfile ap = ActivityProfileManager() # Get profileId and activityId profile_id = req_dict['params'].get('profileId', None) if 'params' in req_dict else None activity_id = req_dict['params'].get('activityId', None) if 'params' in req_dict else None #If the profileId exists, get the profile and return it in the response if profile_id: resource = ap.get_profile(profile_id, activity_id) if resource.profile: try: response = HttpResponse(resource.profile.read(), content_type=resource.content_type) except __HOLE__: response = HttpResponseNotFound("Error reading file, could not find: %s" % profile_id) else: response = HttpResponse(resource.json_profile, content_type=resource.content_type) response['ETag'] = '"%s"' % resource.etag return response #Return IDs of profiles stored since profileId was not submitted since = req_dict['params'].get('since', None) if 'params' in req_dict else None resource = ap.get_profile_ids(activity_id, since) response = JsonResponse([k for k in resource], safe=False) response['since'] = since # If it's a HEAD request if req_dict['method'].lower() != 'get': response.body = '' return response
IOError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/utils/req_process.py/activity_profile_get
4,878
def read_images(path, image_size=None): """Reads the images in a given folder, resizes images on the fly if size is given. Args: path: Path to a folder with subfolders representing the subjects (persons). sz: A tuple with the size Resizes Returns: A list [X, y, folder_names] X: The images, which is a Python list of numpy arrays. y: The corresponding labels (the unique number of the subject, person) in a Python list. folder_names: The names of the folder, so you can display it in a prediction. """ c = 0 X = [] y = [] folder_names = [] for dirname, dirnames, filenames in os.walk(path): for subdirname in dirnames: folder_names.append(subdirname) subject_path = os.path.join(dirname, subdirname) for filename in os.listdir(subject_path): try: im = cv2.imread(os.path.join(subject_path, filename), cv2.IMREAD_GRAYSCALE) # resize to given size (if given) if (image_size is not None): im = cv2.resize(im, image_size) X.append(np.asarray(im, dtype=np.uint8)) y.append(c) except __HOLE__, (errno, strerror): print "I/O error({0}): {1}".format(errno, strerror) except: print "Unexpected error:", sys.exc_info()[0] raise c = c+1 return [X,y,folder_names]
IOError
dataset/ETHPy150Open bytefish/facerec/py/apps/videofacerec/simple_videofacerec.py/read_images
4,879
@register.tag('experiment') def experiment(parser, token): """ Split Testing experiment tag has the following syntax : {% experiment <experiment_name> <alternative> %} experiment content goes here {% endexperiment %} If the alternative name is neither 'test' nor 'control' an exception is raised during rendering. """ try: token_contents = token.split_contents() experiment_name, alternative, weight, user_variable = _parse_token_contents(token_contents) node_list = parser.parse(('endexperiment', )) parser.delete_first_token() except __HOLE__: raise template.TemplateSyntaxError("Syntax should be like :" "{% experiment experiment_name alternative [weight=val] [user=val] %}") return ExperimentNode(node_list, experiment_name, alternative, weight, user_variable)
ValueError
dataset/ETHPy150Open mixcloud/django-experiments/experiments/templatetags/experiments.py/experiment
4,880
def setdefault(self, key, x=None): key = str(key).title() try: return self[key] except __HOLE__: self[key] = x return x
KeyError
dataset/ETHPy150Open zynga/jasy/jasy/core/Types.py/CaseInsensitiveDict.setdefault
4,881
def post(self, request, *args, **kwargs): self.mapper = self.get_mapper(self.model()) self.data = self.get_request_data() try: self.object = self.mapper._apply(self.data) except __HOLE__ as e: return self.post_invalid(e.error_dict) return self.post_valid()
ValidationError
dataset/ETHPy150Open funkybob/django-nap/nap/rest/views.py/ListPostMixin.post
4,882
def put(self, request, *args, **kwargs): self.object = self.get_object() self.mapper = self.get_mapper(self.object) self.data = self.get_request_data({}) try: self.mapper._apply(self.data) except __HOLE__ as e: return self.put_invalid(e.error_dict) return self.put_valid()
ValidationError
dataset/ETHPy150Open funkybob/django-nap/nap/rest/views.py/ObjectPutMixin.put
4,883
def patch(self, request, *args, **kwargs): self.object = self.get_object() self.mapper = self.get_mapper(self.object) self.data = self.get_request_data({}) try: self.mapper._patch(self.data) except __HOLE__ as e: return self.patch_invalid(e.error_dict) return self.patch_valid()
ValidationError
dataset/ETHPy150Open funkybob/django-nap/nap/rest/views.py/ObjectPatchMixin.patch
4,884
def showmessage(message, mapping): try: del (mapping['self']) except (__HOLE__, ): pass items = mapping.items() items.sort() print '### %s' % (message, ) for k, v in items: print ' %s:%s' % (k, v)
KeyError
dataset/ETHPy150Open CarterBain/Medici/ib/client/sync_wrapper.py/showmessage
4,885
def can_connect(port): sock = socket.socket() sock.settimeout(0.1) # Always localhost, should be wayy faster than this. try: sock.connect(('127.0.0.1', port)) return True except __HOLE__: return False
OSError
dataset/ETHPy150Open dcos/dcos/pytest/test_ssh_integration.py/can_connect
4,886
def lineReceived(self, line): parts = line.split(',') if len(parts) != 2: self.invalidQuery() else: try: portOnServer, portOnClient = map(int, parts) except __HOLE__: self.invalidQuery() else: if _MIN_PORT <= portOnServer <= _MAX_PORT and _MIN_PORT <= portOnClient <= _MAX_PORT: self.validQuery(portOnServer, portOnClient) else: self._ebLookup(failure.Failure(InvalidPort()), portOnServer, portOnClient)
ValueError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/protocols/ident.py/IdentServer.lineReceived
4,887
def _end(self): """End the orchestration play by waiting for all the action threads to complete.""" for t in self._threads: try: while not self._error and t.isAlive(): t.join(1) except __HOLE__: self._error = (exceptions.MaestroException, exceptions.MaestroException('Manual abort'), None) except Exception: self._error = sys.exc_info() finally: self._cv.acquire() self._cv.notifyAll() self._cv.release() self._om.end() # Display and raise any error that occurred if self._error: if self._auditor: self._auditor.error(action=self._play, what=self._containers, message=str(self._error[1])) exceptions.raise_with_tb(self._error) else: if self._auditor: self._auditor.success(level=audit.INFO, action=self._play, what=self._containers)
KeyboardInterrupt
dataset/ETHPy150Open signalfx/maestro-ng/maestro/plays/__init__.py/BaseOrchestrationPlay._end
4,888
def _close(self): """ Disconnect from Riemann. """ try: self.client.disconnect() except __HOLE__: pass
AttributeError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/diamond/handler/riemann.py/RiemannHandler._close
4,889
def _process_run_command_output(raw_output): if raw_output is None: return raw_output try: _output = raw_output.decode('utf-8') except __HOLE__: return raw_output else: return _output[:-1]
ValueError
dataset/ETHPy150Open sodastsai/taskr/taskr/contrib/system/__init__.py/_process_run_command_output
4,890
def get_model_parser(top_rule, comments_model, **kwargs): """ Creates model parser for the given language. """ class TextXModelParser(Parser): """ Parser created from textual textX language description. Semantic actions for this parser will construct object graph representing model on the given language. """ def __init__(self, *args, **kwargs): super(TextXModelParser, self).__init__(*args, **kwargs) # By default first rule is starting rule # and must be followed by the EOF self.parser_model = Sequence( nodes=[top_rule, EOF()], rule_name='ModelFile', root=True) self.comments_model = comments_model # Stack for metaclass instances self._inst_stack = [] # Dict for cross-ref resolving # { id(class): { obj.name: obj}} self._instances = {} def _parse(self): try: return self.parser_model.parse(self) except NoMatch as e: line, col = e.parser.pos_to_linecol(e.position) raise TextXSyntaxError(text(e), line, col) def get_model_from_file(self, file_name, encoding, debug): """ Creates model from the parse tree from the previous parse call. If file_name is given file will be parsed before model construction. """ with codecs.open(file_name, 'r', encoding) as f: model_str = f.read() model = self.get_model_from_str(model_str, file_name=file_name, debug=debug) # Register filename of the model for later use. try: model._filename = file_name except __HOLE__: # model is some primitive python type (e.g. str) pass return model def get_model_from_str(self, model_str, file_name=None, debug=None): """ Parses given string and creates model object graph. """ old_debug_state = self.debug try: if debug is not None: self.debug = debug if self.debug: self.dprint("*** PARSING MODEL ***") self.parse(model_str, file_name=file_name) # Transform parse tree to model. Skip root node which # represents the whole file ending in EOF. model = parse_tree_to_objgraph(self, self.parse_tree[0]) finally: if debug is not None: self.debug = old_debug_state try: model._filename = None except AttributeError: # model is some primitive python type (e.g. str) pass return model return TextXModelParser(**kwargs)
AttributeError
dataset/ETHPy150Open igordejanovic/textX/textx/model.py/get_model_parser
4,891
def parse_tree_to_objgraph(parser, parse_tree): """ Transforms parse_tree to object graph representing model in a new language. """ metamodel = parser.metamodel def process_match(nt): """ Process subtree for match rules. """ if isinstance(nt, Terminal): return convert(nt.value, nt.rule_name) else: # If RHS of assignment is NonTerminal it is a product of # complex match rule. Convert nodes to text and do the join. if len(nt) > 1: return "".join([text(process_match(n)) for n in nt]) else: return process_match(nt[0]) def process_node(node): if isinstance(node, Terminal): return convert(node.value, node.rule_name) assert node.rule.root,\ "Not a root node: {}".format(node.rule.rule_name) # If this node is created by some root rule # create metaclass instance. inst = None if not node.rule_name.startswith('__asgn'): # If not assignment # Get class mclass = metamodel[node.rule_name] if mclass._tx_type == RULE_ABSTRACT: # If this meta-class is product of abstract rule replace it # with matched concrete meta-class down the inheritance tree. # Abstract meta-class should never be instantiated. return process_node(node[0]) elif mclass._tx_type == RULE_MATCH: # If this is a product of match rule handle it as a RHS # of assignment and return plain python type. return process_match(node) if parser.debug: parser.dprint("CREATING INSTANCE {}".format(node.rule_name)) # If user class is given # use it instead of generic one if node.rule_name in metamodel.user_classes: user_class = metamodel.user_classes[node.rule_name] # Object initialization will be done afterwards # At this point we need object to be allocated # So that nested object get correct reference inst = user_class.__new__(user_class) # Initialize object attributes for user class parser.metamodel._init_obj_attrs(inst, user=True) else: # Generic class will call attributes init # from the constructor inst = mclass.__new__(mclass) # Initialize object attributes parser.metamodel._init_obj_attrs(inst) # Collect attributes directly on meta-class instance obj_attrs = inst inst._tx_position = node.position # Push real obj. and dummy attr obj on the instance stack parser._inst_stack.append((inst, obj_attrs)) for n in node: if parser.debug: parser.dprint("Recursing into {} = '{}'" .format(type(n).__name__, text(n))) process_node(n) parser._inst_stack.pop() # If this object is nested add 'parent' reference if parser._inst_stack: if node.rule_name in metamodel.user_classes: obj_attrs._txa_parent = parser._inst_stack[-1][0] else: obj_attrs.parent = parser._inst_stack[-1][0] # If the class is user supplied we need to done # proper initialization at this point. if node.rule_name in metamodel.user_classes: try: # Get only attributes defined by the grammar as well # as `parent` if exists attrs = {} if hasattr(obj_attrs, '_txa_parent'): attrs['parent'] = obj_attrs._txa_parent del obj_attrs._txa_parent for a in obj_attrs.__class__._tx_attrs: attrs[a] = getattr(obj_attrs, "_txa_%s" % a) delattr(obj_attrs, "_txa_%s" % a) inst.__init__(**attrs) except __HOLE__ as e: # Add class name information in case of # wrong constructor parameters # print("Constructor params: {}".format(text(obj_attrs.__dict__))) e.args += ("for class %s" % inst.__class__.__name__,) parser.dprint(traceback.print_exc()) raise e # Special case for 'name' attrib. It is used for cross-referencing if hasattr(inst, 'name') and inst.name: # Objects of each class are in its own namespace if not id(inst.__class__) in parser._instances: parser._instances[id(inst.__class__)] = {} parser._instances[id(inst.__class__)][inst.name] = inst if parser.debug: parser.dprint("LEAVING INSTANCE {}".format(node.rule_name)) else: # Handle assignments attr_name = node.rule._attr_name op = node.rule_name.split('_')[-1] model_obj, obj_attr = parser._inst_stack[-1] cls = metamodel[model_obj.__class__.__name__] metaattr = cls._tx_attrs[attr_name] # Mangle attribute name to prevent name clashing with property # setters on user classes if cls.__name__ in metamodel.user_classes: txa_attr_name = "_txa_%s" % attr_name else: txa_attr_name = attr_name if parser.debug: parser.dprint('Handling assignment: {} {}...' .format(op, txa_attr_name)) if op == 'optional': setattr(obj_attr, txa_attr_name, True) elif op == 'plain': attr_value = getattr(obj_attr, txa_attr_name) if attr_value and type(attr_value) is not list: raise TextXSemanticError( "Multiple assignments to attribute {} at {}" .format(attr_name, parser.pos_to_linecol(node.position))) # Convert tree bellow assignment to proper value value = process_node(node[0]) if metaattr.ref and not metaattr.cont: # If this is non-containing reference create ObjCrossRef value = ObjCrossRef(obj_name=value, cls=metaattr.cls, position=node[0].position) if type(attr_value) is list: attr_value.append(value) else: setattr(obj_attr, txa_attr_name, value) elif op in ['list', 'oneormore', 'zeroormore']: for n in node: # If the node is separator skip if n.rule_name != 'sep': # Convert node to proper type # Rule links will be resolved later value = process_node(n) if metaattr.ref and not metaattr.cont: # If this is non-containing reference # create ObjCrossRef value = ObjCrossRef(obj_name=value, cls=metaattr.cls, position=node[0].position) if not hasattr(obj_attr, txa_attr_name) or \ getattr(obj_attr, txa_attr_name) is None: setattr(obj_attr, txa_attr_name, []) getattr(obj_attr, txa_attr_name).append(value) else: # This shouldn't happen assert False return inst def resolve_refs(model): """ Resolves obj cross refs. """ # TODO: Scoping and name-space rules. resolved_set = set() metamodel = parser.metamodel def _resolve_ref(obj_ref): if obj_ref is None: return assert type(obj_ref) is ObjCrossRef, type(obj_ref) if parser.debug: parser.dprint("Resolving obj crossref: {}:{}" .format(obj_ref.cls, obj_ref.obj_name)) def _resolve_ref_abstract(obj_cls): """ Depth-first resolving of abstract rules. """ for inherited in obj_cls._tx_inh_by: if inherited._tx_type == RULE_ABSTRACT: return _resolve_ref_abstract(inherited) elif inherited._tx_type == RULE_COMMON: if id(inherited) in parser._instances: objs = parser._instances[id(inherited)] if obj_ref.obj_name in objs: return objs[obj_ref.obj_name] if obj_ref.cls._tx_type == RULE_COMMON: if id(obj_ref.cls) in parser._instances: objs = parser._instances[id(obj_ref.cls)] if obj_ref.obj_name in objs: return objs[obj_ref.obj_name] elif obj_ref.cls._tx_type == RULE_ABSTRACT: # For abstract rule ref do a depth first search on # the inheritance tree to find common rules # and return a first instance of that meta-class instance with # the referred name. obj = _resolve_ref_abstract(obj_ref.cls) if obj: return obj else: pass # TODO: Match rules cannot be referred. This is # an error in language description. # As a fall-back search builtins if given if metamodel.builtins: if obj_ref.obj_name in metamodel.builtins: # TODO: Classes must match return metamodel.builtins[obj_ref.obj_name] line, col = parser.pos_to_linecol(obj_ref.position) raise TextXSemanticError( 'Unknown object "{}" of class "{}" at {}' .format(obj_ref.obj_name, obj_ref.cls.__name__, (line, col)), line=line, col=col) def _resolve(o): if parser.debug: parser.dprint("RESOLVING CLASS: {}" .format(o.__class__.__name__)) if o in resolved_set: return resolved_set.add(o) # If this object has attributes (created using a common rule) if hasattr(o.__class__, "_tx_attrs"): for attr in o.__class__._tx_attrs.values(): if parser.debug: parser.dprint("RESOLVING ATTR: {}".format(attr.name)) parser.dprint("mult={}, ref={}, con={}".format( attr.mult, attr.ref, attr.cont)) attr_value = getattr(o, attr.name) if attr.mult in [MULT_ONEORMORE, MULT_ZEROORMORE]: for idx, list_attr_value in enumerate(attr_value): if attr.ref: if attr.cont: _resolve(list_attr_value) else: attr_value[idx] = \ _resolve_ref(list_attr_value) else: if attr.ref: if attr.cont: _resolve(attr_value) else: setattr(o, attr.name, _resolve_ref(attr_value)) _resolve(model) def call_obj_processors(model_obj): """ Depth-first model object processing. """ if type(model_obj) in PRIMITIVE_PYTHON_TYPES: metaclass = type(model_obj) else: metaclass = metamodel[model_obj.__class__.__name__] for metaattr in metaclass._tx_attrs.values(): # If attribute is containment reference go down if metaattr.ref and metaattr.cont: attr = getattr(model_obj, metaattr.name) if attr: if metaattr.mult != MULT_ONE: for obj in attr: if obj: call_obj_processors(obj) else: call_obj_processors(attr) obj_processor = metamodel.obj_processors.get(metaclass.__name__, None) if obj_processor: obj_processor(model_obj) model = process_node(parse_tree) resolve_refs(model) assert not parser._inst_stack # We have model loaded and all link resolved # So we shall do a depth-first call of object # processors if any processor is defined. if metamodel.obj_processors: if parser.debug: parser.dprint("CALLING OBJECT PROCESSORS") call_obj_processors(model) return model
TypeError
dataset/ETHPy150Open igordejanovic/textX/textx/model.py/parse_tree_to_objgraph
4,892
def _RegistryQuery(key, value=None): """Use reg.exe to read a particular key through _RegistryQueryBase. First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If that fails, it falls back to System32. Sysnative is available on Vista and up and available on Windows Server 2003 and XP through KB patch 942589. Note that Sysnative will always fail if using 64-bit python due to it being a virtual directory and System32 will work correctly in the first place. KB 942589 - http://support.microsoft.com/kb/942589/en-us. Arguments: key: The registry key. value: The particular registry value to read (optional). Return: stdout from reg.exe, or None for failure. """ text = None try: text = _RegistryQueryBase('Sysnative', key, value) except __HOLE__, e: if e.errno == errno.ENOENT: text = _RegistryQueryBase('System32', key, value) else: raise return text
OSError
dataset/ETHPy150Open adobe/brackets-shell/gyp/pylib/gyp/MSVSVersion.py/_RegistryQuery
4,893
def _trans_binary(self, value): """ Given value is expected to be a binary - 0/1 """ try: conv = int(value) except __HOLE__: return 0 if conv not in [0, 1]: return 0 return conv
ValueError
dataset/ETHPy150Open Yelp/fullerite/src/diamond/collectors/icinga_stats/icinga_stats.py/IcingaStatsCollector._trans_binary
4,894
def _trans_dtime(self, value): """ Translate scheduled downtime """ try: conv = int(value) except __HOLE__: return 0 if conv < 1: return 0 return conv
ValueError
dataset/ETHPy150Open Yelp/fullerite/src/diamond/collectors/icinga_stats/icinga_stats.py/IcingaStatsCollector._trans_dtime
4,895
def get_field(self, field_name, args, kwargs): try: return super(PartialFormatter, self).get_field(field_name, args, kwargs) except (KeyError, __HOLE__): return None, field_name
AttributeError
dataset/ETHPy150Open kwikteam/phy/phy/utils/event.py/PartialFormatter.get_field
4,896
def format_field(self, value, spec): if value is None: return '?' try: return super(PartialFormatter, self).format_field(value, spec) except __HOLE__: return '?'
ValueError
dataset/ETHPy150Open kwikteam/phy/phy/utils/event.py/PartialFormatter.format_field
4,897
def convert_linkable_to_choice(linkable): key = get_composite_key(linkable) try: value = u'%s (%s)' % (force_text(linkable), linkable.get_absolute_url()) except __HOLE__: value = force_text(linkable) return (key, value)
AttributeError
dataset/ETHPy150Open fusionbox/django-widgy/widgy/models/links.py/convert_linkable_to_choice
4,898
def _unicodeExpand(s): try: return r_unicodeEscape.sub( lambda m: unichr(int(m.group(0)[2:], 16)), s) except __HOLE__: warnings.warn( 'Encountered a unicode char > 0xFFFF in a narrow python build. ' 'Trying to degrade gracefully, but this can cause problems ' 'later when working with the string:\n%s' % s) return r_unicodeEscape.sub( lambda m: codecs.decode(m.group(0), 'unicode_escape'), s)
ValueError
dataset/ETHPy150Open RDFLib/rdflib/rdflib/py3compat.py/_unicodeExpand
4,899
def copyDirectory(src, dest): try: shutil.copytree(src, dest) except shutil.Error as e: print('Error: %s' % e) except __HOLE__ as e: print('Error: %s' % e)
OSError
dataset/ETHPy150Open ActiDoo/gamification-engine/gengine/scripts/quickstart.py/copyDirectory