Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,800
def _create_stylecmds(self): t2c = self.ttype2cmd = {Token: ''} c2d = self.cmd2def = {} cp = self.commandprefix letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' first = iter(letters) second = iter(letters) firstl = first.next() def rgbcolor(col): if col: return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0) for i in (0, 2, 4)]) else: return '1,1,1' for ttype, ndef in self.style: cmndef = '#1' if ndef['bold']: cmndef = r'\textbf{' + cmndef + '}' if ndef['italic']: cmndef = r'\textit{' + cmndef + '}' if ndef['underline']: cmndef = r'\underline{' + cmndef + '}' if ndef['roman']: cmndef = r'\textrm{' + cmndef + '}' if ndef['sans']: cmndef = r'\textsf{' + cmndef + '}' if ndef['mono']: cmndef = r'\texttt{' + cmndef + '}' if ndef['color']: cmndef = r'\textcolor[rgb]{%s}{%s}' % ( rgbcolor(ndef['color']), cmndef ) if ndef['border']: cmndef = r'\fcolorbox[rgb]{%s}{%s}{%s}' % ( rgbcolor(ndef['border']), rgbcolor(ndef['bgcolor']), cmndef ) elif ndef['bgcolor']: cmndef = r'\colorbox[rgb]{%s}{%s}' % ( rgbcolor(ndef['bgcolor']), cmndef ) if cmndef == '#1': continue try: alias = cp + firstl + second.next() except __HOLE__: firstl = first.next() second = iter(letters) alias = cp + firstl + second.next() t2c[ttype] = alias c2d[alias] = cmndef
StopIteration
dataset/ETHPy150Open joeyb/joeyb-blog/externals/pygments/formatters/latex.py/LatexFormatter._create_stylecmds
5,801
def load_hashers(password_hashers=None): global HASHERS global PREFERRED_HASHER hashers = [] if not password_hashers: password_hashers = settings.PASSWORD_HASHERS for backend in password_hashers: try: mod_path, cls_name = backend.rsplit('.', 1) mod = importlib.import_module(mod_path) hasher_cls = getattr(mod, cls_name) except (AttributeError, __HOLE__, ValueError): raise ImproperlyConfigured("hasher not found: %s" % backend) hasher = hasher_cls() if not getattr(hasher, 'algorithm'): raise ImproperlyConfigured("hasher doesn't specify an " "algorithm name: %s" % backend) hashers.append(hasher) HASHERS = dict([(hasher.algorithm, hasher) for hasher in hashers]) PREFERRED_HASHER = hashers[0]
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/auth/hashers.py/load_hashers
5,802
def _load_library(self): if self.library is not None: if isinstance(self.library, (tuple, list)): name, mod_path = self.library else: name = mod_path = self.library try: module = importlib.import_module(mod_path) except __HOLE__: raise ValueError("Couldn't load %s password algorithm " "library" % name) return module raise ValueError("Hasher '%s' doesn't specify a library attribute" % self.__class__)
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/auth/hashers.py/BasePasswordHasher._load_library
5,803
def _get_server_version_info(self, connection): dbapi_con = connection.connection version = [] r = re.compile('[.\-]') for n in r.split(dbapi_con.dbversion): try: version.append(int(n)) except __HOLE__: version.append(n) return tuple(version)
ValueError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py/MySQLDialect_zxjdbc._get_server_version_info
5,804
def _parse_headers(self): header_arr = utf8_clean(self.header_row).split(delimiter) summary_line = utf8_clean(self.form_row).split(delimiter) # These are always consistent try: self.headers['form'] = clean_entry(summary_line[0]) self.headers['fec_id'] = clean_entry(summary_line[1]) self.headers['report_num'] = None except __HOLE__: return False # amendment number - not sure what version it starts in. if len(summary_line) > 6: self.headers['report_num'] = clean_entry(summary_line[6])[:3] # Version number is always the third item self.version = clean_entry(header_arr[2]) headers_list = new_headers if float(self.version) <= 5: headers_list = old_headers header_hash = {} for i in range(0, len(headers_list)): # It's acceptable for header rows to leave off delimiters, so enter missing trailing args as blanks this_arg = "" try: this_arg = clean_entry(header_arr[i]) except IndexError: pass self.headers[headers_list[i]] = this_arg # figure out if this is an amendment, and if so, what's being amended. form_last_char = self.headers['form'][-1].upper() if form_last_char == 'A': self.is_amendment = True #print "Found amendment %s : %s " % (self.filing_number, self.headers['report_id']) amendment_match = re.search('^FEC\s*-\s*(\d+)', self.headers['report_id']) if amendment_match: original = amendment_match.group(1) #print "Amends filing: %s" % original self.headers['filing_amended'] = original else: raise Exception("Can't find original filing in amended report %s" % (self.filing_number)) else: self.is_amendment = False self.headers['is_amendment'] = self.is_amendment return True
IndexError
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/parsing/filing.py/filing._parse_headers
5,805
def get_form_type(self): """ Get the base form -- remove the A, N or T (amended, new, termination) designations""" try: raw_form_type = self.headers['form'] a = re.search('(.*?)[A|N|T]', raw_form_type) if (a): return a.group(1) else: return raw_form_type except __HOLE__: return None
KeyError
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/parsing/filing.py/filing.get_form_type
5,806
def get_version(self): try: return self.version except __HOLE__: return None
KeyError
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/parsing/filing.py/filing.get_version
5,807
def _return(value, old_agent_version): ctx.returns(value) # Due to bug in celery: # https://github.com/celery/celery/issues/897 if os.name == 'nt' and old_agent_version.startswith('3.2'): from celery import current_task try: from cloudify_agent.app import app except __HOLE__: from cloudify.celery import celery as app app.backend.mark_as_done(current_task.request.id, value)
ImportError
dataset/ETHPy150Open cloudify-cosmo/cloudify-manager/resources/rest-service/cloudify/install_agent.py/_return
5,808
def _flatten(implements, include_None=0): try: r = implements.flattened() except __HOLE__: if implements is None: r=() else: r = Declaration(implements).flattened() if not include_None: return r r = list(r) r.append(None) return r
AttributeError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/zope/zope/interface/_flatten.py/_flatten
5,809
def post(self, request, *args, **kwargs): offset = request.POST.get('offset', None) if not offset: return HttpResponse("No 'offset' parameter provided", status=400) try: offset = int(offset) except __HOLE__: return HttpResponse("Invalid 'offset' value provided", status=400) request.session['detected_tz'] = int(offset) return HttpResponse("OK")
ValueError
dataset/ETHPy150Open adamcharnock/django-tz-detect/tz_detect/views.py/SetOffsetView.post
5,810
def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('negate', node) if value is not None: if value in ('true', '1'): self.negate = True elif value in ('false', '0'): self.negate = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('idref', node) if value is not None: self.idref = value value = find_attr_value_('id', node) if value is not None: self.id = value value = find_attr_value_('sighting_count', node) if value is not None: try: self.sighting_count = int(value) except __HOLE__ as exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) if self.sighting_count <= 0: raise_parse_error(node, 'Invalid PositiveInteger')
ValueError
dataset/ETHPy150Open CybOXProject/python-cybox/cybox/bindings/cybox_core.py/ObservableType.buildAttributes
5,811
def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('units', node) if value is not None: self.units = value value = find_attr_value_('trend', node) if value is not None: self.trend = value value = find_attr_value_('rate', node) if value is not None: try: self.rate = float(value) except __HOLE__ as exp: raise ValueError('Bad float/double attribute (rate): %s' % exp) value = find_attr_value_('scale', node) if value is not None: self.scale = value
ValueError
dataset/ETHPy150Open CybOXProject/python-cybox/cybox/bindings/cybox_core.py/FrequencyType.buildAttributes
5,812
def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('timestamp', node) if value is not None: try: self.timestamp = self.gds_parse_datetime(value, node, 'timestamp') except ValueError as exp: raise ValueError('Bad date-time attribute (timestamp): %s' % exp) value = find_attr_value_('action_status', node) if value is not None: self.action_status = value value = find_attr_value_('ordinal_position', node) if value is not None: try: self.ordinal_position = int(value) except __HOLE__ as exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) if self.ordinal_position <= 0: raise_parse_error(node, 'Invalid PositiveInteger') value = find_attr_value_('context', node) if value is not None: self.context = value value = find_attr_value_('idref', node) if value is not None: self.idref = value value = find_attr_value_('id', node) if value is not None: self.id = value
ValueError
dataset/ETHPy150Open CybOXProject/python-cybox/cybox/bindings/cybox_core.py/ActionType.buildAttributes
5,813
def _readline_from_keyboard(self): c=self.console def nop(e): pass while 1: self._update_line() lbuf=self.l_buffer log_sock("point:%d mark:%d selection_mark:%d"%(lbuf.point,lbuf.mark,lbuf.selection_mark)) try: event = c.getkeypress() log_sock(u">>%s"%event) except __HOLE__: from pyreadline.keysyms.common import KeyPress from pyreadline.console.event import Event event=Event(0,0) event.char="c" event.keyinfo=KeyPress("c",shift=False,control=True,meta=False,keyname=None) log_sock("KBDIRQ") if self.allow_ctrl_c: now=time.time() if (now-self.ctrl_c_timeout)<self.ctrl_c_tap_time_interval: raise else: self.ctrl_c_timeout=now pass else: raise if self.next_meta: self.next_meta = False control, meta, shift, code = event.keyinfo event.keyinfo = (control, True, shift, code) #Process exit keys. Only exit on empty line keyinfo=event.keyinfo.tuple() if keyinfo in self.exit_dispatch: if lineobj.EndOfLine(self.l_buffer) == 0: raise EOFError if len(keyinfo[-1])>1: default=nop else: default=self.self_insert dispatch_func = self.key_dispatch.get(keyinfo,default) log("readline from keyboard:%s,%s"%(keyinfo,dispatch_func)) log_sock((u"%s|%s"%(ensure_unicode(format(keyinfo)),dispatch_func.__name__)),"bound_function") r = None if dispatch_func: r = dispatch_func(event) self._keylog(dispatch_func,self.l_buffer) self.l_buffer.push_undo() self.previous_func = dispatch_func if r: self._update_line() break
KeyboardInterrupt
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/site-packages/pyreadline/modes/emacs.py/EmacsMode._readline_from_keyboard
5,814
def b16_slug_to_arguments(b16_slug): """ Raises B16DecodingFail exception on """ try: url = b16decode(b16_slug.decode('utf-8')) except BinaryError: raise B16DecodingFail except __HOLE__: raise B16DecodingFail('Non-base16 digit found') except AttributeError: raise B16DecodingFail("b16_slug must have a 'decode' method.") try: app, model, pk = url.decode('utf-8').split('/')[0:3] except UnicodeDecodeError: raise B16DecodingFail("Invalid b16_slug passed") return app, model, pk
TypeError
dataset/ETHPy150Open pydanny/dj-spam/spam/utils.py/b16_slug_to_arguments
5,815
def get_app(defs, add_help=True): """Small wrapper function to returns an instance of :class:`Application` which serves the objects in the defs. Usually this is called with return value globals() from the module where the resources are defined. The returned WSGI application will serve all subclasses of :class:`wsgiservice.Resource` found in the dictionary. :param defs: Each :class:`wsgiservice.Resource` object found in the values of this dictionary is used as application resource. The other values are discarded. :type defs: dict :param add_help: Whether to add the Help resource which will expose the documentation of this service at /_internal/help :type add_help: boolean :rtype: :class:`Application` """ def is_resource(d): try: if issubclass(d, wsgiservice.Resource) and hasattr(d, '_path'): return True except __HOLE__: pass # d wasn't a class return False resources = [d for d in defs.values() if is_resource(d)] if add_help: resources.append(wsgiservice.resource.Help) return Application(resources)
TypeError
dataset/ETHPy150Open pneff/wsgiservice/wsgiservice/application.py/get_app
5,816
def tree_image(tree, fout=None): try: import pydot import a_reliable_dot_rendering except __HOLE__: return None dot_data = StringIO() export_graphviz(tree, out_file=dot_data) data = re.sub(r"gini = 0\.[0-9]+\\n", "", dot_data.getvalue()) data = re.sub(r"samples = [0-9]+\\n", "", data) data = re.sub(r"\\nsamples = [0-9]+", "", data) graph = pydot.graph_from_dot_data(data) if fout is None: fout = "tmp.png" graph.write_png(fout) return imread(fout)
ImportError
dataset/ETHPy150Open amueller/nyu_ml_lectures/plots/plot_interactive_tree.py/tree_image
5,817
def get_parser(): p = Parser() try: os.mkdir("/tmp/Test Dir") except __HOLE__: pass # dir exists open("/tmp/Test Dir/test.txt", "w").close() return p
OSError
dataset/ETHPy150Open Calysto/metakernel/metakernel/tests/test_parser.py/get_parser
5,818
@permission_required("core.manage_shop") def manage_delivery_times(request): """Dispatches to the first delivery time or to the form to add a delivery time (if there is no delivery time yet). """ try: delivery_time = DeliveryTime.objects.all()[0] url = reverse("lfs_manage_delivery_time", kwargs={"id": delivery_time.id}) except __HOLE__: url = reverse("lfs_no_delivery_times") return HttpResponseRedirect(url)
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/delivery_times/views.py/manage_delivery_times
5,819
def get_product_delivery_time(request, product, for_cart=False): """Returns the delivery time object for the product. If the ``for_cart`` parameter is False, the default delivery time for product is calculated. This is at the moment the first valid (iow with the hightest priority) shipping method. If the ``for_cart parameter`` is True, the delivery time for the product within the cart is calculated. This can differ because the shop customer has the opportunity to select a shipping method within the cart. If this shipping method is valid for the given product this one is taken, if not the default one - the default one is the first valid shipping method. """ # TODO: Need a reasonable chaching here if for_cart: cache_key = "%s-shipping-delivery-time-cart-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, request.user.id) else: cache_key = "%s-shipping-delivery-time" % settings.CACHE_MIDDLEWARE_KEY_PREFIX shippings = None # cache.get(cache_key) if shippings is None: shippings = {} product_key = "product-%s" % product.id shipping = shippings.get(product_key) if shipping is not None: return shipping # if the product is a product with variants we switch to the default # variant to calculate the delivery time. Please note that in this case # the default variant is also displayed. if product.sub_type == PRODUCT_WITH_VARIANTS: variant = product.get_default_variant() if variant is not None: product = variant delivery_time = product.get_manual_delivery_time() if not delivery_time: if for_cart: sm = get_selected_shipping_method(request) # Within the cart we have to take care of the selected shipping # method. sms = get_valid_shipping_methods(request, product) if sm in sms: try: delivery_time = sm.delivery_time except __HOLE__: delivery_time = None else: sm = get_default_shipping_method(request) try: delivery_time = sm.delivery_time except AttributeError: delivery_time = None else: # For the product we take the standard shipping method, which is the # first valid shipping method at the moment. try: shipping_method = get_first_valid_shipping_method(request, product) delivery_time = shipping_method.delivery_time except AttributeError: delivery_time = None if delivery_time is None: delivery_time = lfs.core.utils.get_default_shop(request).delivery_time or \ DeliveryTime(min=1, max=2, unit=DELIVERY_TIME_UNIT_DAYS) # Calculate the total delivery time if the product is not on stock. if (product.stock_amount <= 0) and (product.order_time): # Calculate how much days are left until the product is going to be # delivered. if product.ordered_at: order_delta = datetime.now().date() - product.ordered_at order_delta = order_delta.days else: order_delta = 0 # Calculate the rest of the origin order time. order_time_left = product.order_time.subtract_days(order_delta) # Calculate the total delivery time. delivery_time += order_time_left delivery_time = delivery_time.as_reasonable_unit() delivery_time = delivery_time.round() shippings[product_key] = delivery_time cache.set(cache_key, shippings) return delivery_time
AttributeError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/shipping/utils.py/get_product_delivery_time
5,820
def search_notemodel(self, note_model): content_words = note_model.wordset try: content_tags = note_model.metadata['tags'] except (__HOLE__, TypeError): content_tags = '' has_tag_filters = len(self.use_tags + self.ignore_tags) > 0 # are there tags in the search term has_word_filters = len(self.use_words + self.ignore_words) > 0 # are there words in the search term yay_tags = all([tag in content_tags for tag in self.use_tags]) if has_tag_filters else True boo_tags = all([tag not in content_tags for tag in self.ignore_tags]) if has_tag_filters else True yay_words = all([word in content_words for word in self.use_words]) if has_word_filters else True boo_words = all([word not in content_words for word in self.ignore_words]) if has_word_filters else True return all([yay_words, boo_words, yay_tags, boo_tags])
KeyError
dataset/ETHPy150Open akehrer/Motome/Motome/Models/Search.py/SearchModel.search_notemodel
5,821
def remove_settings(self, filename, is_dir=False): test_dir = os.path.dirname(os.path.dirname(__file__)) full_name = os.path.join(test_dir, filename) if is_dir: shutil.rmtree(full_name) else: os.remove(full_name) # Also try to remove the compiled file; if it exists, it could # mess up later tests that depend upon the .py file not existing try: if sys.platform.startswith('java'): # Jython produces module$py.class files os.remove(re.sub(r'\.py$', '$py.class', full_name)) else: # CPython produces module.pyc files os.remove(full_name + 'c') except __HOLE__: pass
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/admin_scripts/tests.py/AdminScriptTestCase.remove_settings
5,822
def run_test(self, script, args, settings_file=None, apps=None): test_dir = os.path.dirname(os.path.dirname(__file__)) project_dir = os.path.dirname(test_dir) base_dir = os.path.dirname(project_dir) ext_backend_base_dirs = self._ext_backend_paths() # Remember the old environment old_django_settings_module = os.environ.get('DJANGO_SETTINGS_MODULE', None) if sys.platform.startswith('java'): python_path_var_name = 'JYTHONPATH' else: python_path_var_name = 'PYTHONPATH' old_python_path = os.environ.get(python_path_var_name, None) old_cwd = os.getcwd() # Set the test environment if settings_file: os.environ['DJANGO_SETTINGS_MODULE'] = settings_file elif 'DJANGO_SETTINGS_MODULE' in os.environ: del os.environ['DJANGO_SETTINGS_MODULE'] python_path = [test_dir, base_dir] python_path.extend(ext_backend_base_dirs) os.environ[python_path_var_name] = os.pathsep.join(python_path) # Build the command line executable = sys.executable arg_string = ' '.join(['%s' % arg for arg in args]) if ' ' in executable: cmd = '""%s" "%s" %s"' % (executable, script, arg_string) else: cmd = '%s "%s" %s' % (executable, script, arg_string) # Move to the test directory and run os.chdir(test_dir) try: from subprocess import Popen, PIPE p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) stdin, stdout, stderr = (p.stdin, p.stdout, p.stderr) p.wait() except __HOLE__: stdin, stdout, stderr = os.popen3(cmd) out, err = stdout.read(), stderr.read() # Restore the old environment if old_django_settings_module: os.environ['DJANGO_SETTINGS_MODULE'] = old_django_settings_module if old_python_path: os.environ[python_path_var_name] = old_python_path # Move back to the old working directory os.chdir(old_cwd) return out, err
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/admin_scripts/tests.py/AdminScriptTestCase.run_test
5,823
def setup_labels(self, size=None, color=None, shadow=None): """Sets up coordinates for labels wrt SVG file (2D flatmap)""" # Recursive call for multiple layers if self.layer == 'multi_layer': label_layers = [] for L in self.layer_names: label_layers.append(self.layers[L].setup_labels()) self.svg.getroot().insert(0, label_layers[-1]) return label_layers if self.layer in config.sections(): dlayer = self.layer else: # Unknown display layer; default to values for ROIs import warnings warnings.warn('No defaults set for display layer %s; Using defaults for ROIs in options.cfg file'%self.layer) dlayer = 'rois' if size is None: size = config.get(dlayer, "labelsize") if color is None: color = tuple(map(float, config.get(dlayer, "labelcolor").split(","))) if shadow is None: shadow = self.shadow alpha = color[3] color = "rgb(%d, %d, %d)"%(color[0]*255, color[1]*255, color[2]*255) try: layer = _find_layer(self.svg, "%s_labels"%self.layer) except __HOLE__: # Changed in _find_layer below... AssertionError: # Why assertion error? layer = _make_layer(self.svg.getroot(), "%s_labels"%self.layer) labelpos, candidates = [], [] for roi in list(self.rois.values()): for i, pos in enumerate(roi.get_labelpos()): labelpos.append(pos) candidates.append((roi, i)) w, h = self.svgshape nolabels = set(candidates) txtstyle = "font-family:sans;font-size:%s;font-weight:bold;font-style:italic;fill:%s;fill-opacity:%f;text-anchor:middle;"%(size, color, alpha) for text in layer.findall(".//{%s}text"%svgns): x = float(text.get('x')) y = float(text.get('y')) text.attrib['style'] = txtstyle text.attrib['data-ptidx'] = str(self.kdt.query((x / w, 1-(y / h)))[1]) pts, cand = [], [] for p, c in zip(labelpos, candidates): if c[0].name == text.text: pts.append((p[0]*w, (1-p[1])*h)) cand.append(c) d, idx = cKDTree(pts).query((x,y)) nolabels.remove(cand[idx]) for roi, i in nolabels: x, y = roi.get_labelpos()[i] text = etree.SubElement(layer, "{%s}text"%svgns) text.text = roi.name text.attrib["x"] = str(x*w) text.attrib["y"] = str((1-y)*h) if self.shadow > 0: text.attrib['filter'] = "url(#dropshadow)" text.attrib['style'] = txtstyle text.attrib['data-ptidx'] = str(self.kdt.query((x, y))[1]) self.labels = layer return layer
ValueError
dataset/ETHPy150Open gallantlab/pycortex/cortex/svgroi.py/ROIpack.setup_labels
5,824
def _parse_svg_pts(self, datastr): data = list(_tokenize_path(datastr)) #data = data.replace(",", " ").split() if data.pop(0).lower() != "m": raise ValueError("Unknown path format") #offset = np.array([float(x) for x in data[1].split(',')]) offset = np.array(map(float, [data.pop(0), data.pop(0)])) mode = "l" pts = [[offset[0], offset[1]]] def canfloat(n): try: float(n) return True except __HOLE__: return False lastlen = len(data) while len(data) > 0: #print mode, data if not canfloat(data[0]): mode = data.pop(0) continue if mode == "l": offset += list(map(float, [data.pop(0), data.pop(0)])) elif mode == "L": offset = np.array(list(map(float, [data.pop(0), data.pop(0)]))) elif mode == "c": data = data[4:] offset += list(map(float, [data.pop(0), data.pop(0)])) elif mode == "C": data = data[4:] offset = np.array(list(map(float, [data.pop(0), data.pop(0)]))) ## Check to see if nothing has happened, and, if so, fail if len(data) == lastlen: raise ValueError("Error parsing path.") else: lastlen = len(data) pts.append([offset[0],offset[1]]) pts = np.array(pts) pts /= self.parent.svgshape pts[:,1] = 1-pts[:,1] return pts
ValueError
dataset/ETHPy150Open gallantlab/pycortex/cortex/svgroi.py/ROI._parse_svg_pts
5,825
def scrub(svgfile): """Remove data layers from an svg object prior to rendering Returns etree-parsed svg object """ svg = etree.parse(svgfile, parser=parser) try: rmnode = _find_layer(svg, "data") rmnode.getparent().remove(rmnode) except __HOLE__: pass svgtag = svg.getroot() svgtag.attrib['id'] = "svgroi" inkver = "{%s}version"%inkns if inkver in svgtag.attrib: del svgtag.attrib[inkver] try: for tagname in ["{%s}namedview"%sodins, "{%s}metadata"%svgns]: for tag in svg.findall(".//%s"%tagname): tag.getparent().remove(tag) except: import traceback traceback.print_exc() return svg
ValueError
dataset/ETHPy150Open gallantlab/pycortex/cortex/svgroi.py/scrub
5,826
def get_object_type(self, obj): try: return SupportedServices.get_name_for_model(obj.object_content_type.model_class()) except __HOLE__: return '.'.join(obj.object_content_type.natural_key())
AttributeError
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/template/serializers.py/TemplateSerializer.get_object_type
5,827
def load_backend(path): i = path.rfind('.') module, attr = path[:i], path[i + 1:] try: mod = import_module(module) except __HOLE__ as e: raise ImproperlyConfigured('Error importing authentication backend %s: "%s"' % (path, e)) except ValueError: raise ImproperlyConfigured('Error importing authentication backends. Is AUTHENTICATION_BACKENDS a correctly defined list or tuple?') try: cls = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a "%s" authentication backend' % (module, attr)) return cls()
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/auth/__init__.py/load_backend
5,828
def authenticate(**credentials): """ If the given credentials are valid, return a User object. """ for backend in get_backends(): try: user = backend.authenticate(**credentials) except __HOLE__: # This backend doesn't accept these credentials as arguments. Try the next one. continue if user is None: continue # Annotate the user object with the path of the backend. user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__) return user # The credentials supplied are invalid to all backends, fire signal user_login_failed.send(sender=__name__, credentials=_clean_credentials(credentials))
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/auth/__init__.py/authenticate
5,829
def get_user_model(): "Return the User model that is active in this project" from django.conf import settings from django.db.models import get_model try: app_label, model_name = settings.AUTH_USER_MODEL.split('.') except __HOLE__: raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'") user_model = get_model(app_label, model_name) if user_model is None: raise ImproperlyConfigured("AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL) return user_model
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/auth/__init__.py/get_user_model
5,830
def get_user(request): from django.contrib.auth.models import AnonymousUser try: user_id = request.session[SESSION_KEY] backend_path = request.session[BACKEND_SESSION_KEY] backend = load_backend(backend_path) user = backend.get_user(user_id) or AnonymousUser() except __HOLE__: user = AnonymousUser() return user
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/auth/__init__.py/get_user
5,831
def get_matching_features(datasource, where_clause, page_number, include_geom): ''' ''' layer, offset, count = datasource.GetLayer(0), (page_number - 1) * 25, 25 try: layer.SetAttributeFilter(where_clause) except __HOLE__, e: raise QueryError('Bad where clause: ' + str(e)) return layer_features(layer, include_geom, offset, count)
RuntimeError
dataset/ETHPy150Open codeforamerica/US-Census-Area-API/geo.py/get_matching_features
5,832
def cut_tree(Z, n_clusters=None, height=None): """ Given a linkage matrix Z, return the cut tree. Parameters ---------- Z : scipy.cluster.linkage array The linkage matrix. n_clusters : array_like, optional Number of clusters in the tree at the cut point. height : array_like, optional The height at which to cut the tree. Only possible for ultrametric trees. Returns ------- cutree : array An array indicating group membership at each agglomeration step. I.e., for a full cut tree, in the first column each data point is in its own cluster. At the next step, two nodes are merged. Finally all singleton and non-singleton clusters are in one group. If `n_clusters` or `height` is given, the columns correspond to the columns of `n_clusters` or `height`. Examples -------- >>> from scipy import cluster >>> np.random.seed(23) >>> X = np.random.randn(50, 4) >>> Z = cluster.hierarchy.ward(X) >>> cutree = cluster.hierarchy.cut_tree(Z, n_clusters=[5, 10]) >>> cutree[:10] array([[0, 0], [1, 1], [2, 2], [3, 3], [3, 4], [2, 2], [0, 0], [1, 5], [3, 6], [4, 7]]) """ nobs = num_obs_linkage(Z) nodes = _order_cluster_tree(Z) if height is not None and n_clusters is not None: raise ValueError("At least one of either height or n_clusters " "must be None") elif height is None and n_clusters is None: # return the full cut tree cols_idx = np.arange(nobs) elif height is not None: heights = np.array([x.dist for x in nodes]) cols_idx = np.searchsorted(heights, height) else: cols_idx = nobs - np.searchsorted(np.arange(nobs), n_clusters) try: n_cols = len(cols_idx) except __HOLE__: # scalar n_cols = 1 cols_idx = np.array([cols_idx]) groups = np.zeros((n_cols, nobs), dtype=int) last_group = np.arange(nobs) if 0 in cols_idx: groups[0] = last_group for i, node in enumerate(nodes): idx = node.pre_order() this_group = last_group.copy() this_group[idx] = last_group[idx].min() this_group[this_group > last_group[idx].max()] -= 1 if i + 1 in cols_idx: groups[np.where(i + 1 == cols_idx)[0]] = this_group last_group = this_group return groups.T
TypeError
dataset/ETHPy150Open scipy/scipy/scipy/cluster/hierarchy.py/cut_tree
5,833
def _plot_dendrogram(icoords, dcoords, ivl, p, n, mh, orientation, no_labels, color_list, leaf_font_size=None, leaf_rotation=None, contraction_marks=None, ax=None, above_threshold_color='b'): # Import matplotlib here so that it's not imported unless dendrograms # are plotted. Raise an informative error if importing fails. try: # if an axis is provided, don't use pylab at all if ax is None: import matplotlib.pylab import matplotlib.patches import matplotlib.collections except __HOLE__: raise ImportError("You must install the matplotlib library to plot " "the dendrogram. Use no_plot=True to calculate the " "dendrogram without plotting.") if ax is None: ax = matplotlib.pylab.gca() # if we're using pylab, we want to trigger a draw at the end trigger_redraw = True else: trigger_redraw = False # Independent variable plot width ivw = len(ivl) * 10 # Dependent variable plot height dvw = mh + mh * 0.05 iv_ticks = np.arange(5, len(ivl) * 10 + 5, 10) if orientation in ('top', 'bottom'): if orientation == 'top': ax.set_ylim([0, dvw]) ax.set_xlim([0, ivw]) else: ax.set_ylim([dvw, 0]) ax.set_xlim([0, ivw]) xlines = icoords ylines = dcoords if no_labels: ax.set_xticks([]) ax.set_xticklabels([]) else: ax.set_xticks(iv_ticks) if orientation == 'top': ax.xaxis.set_ticks_position('bottom') else: ax.xaxis.set_ticks_position('top') # Make the tick marks invisible because they cover up the links for line in ax.get_xticklines(): line.set_visible(False) leaf_rot = float(_get_tick_rotation(len(ivl))) if ( leaf_rotation is None) else leaf_rotation leaf_font = float(_get_tick_text_size(len(ivl))) if ( leaf_font_size is None) else leaf_font_size ax.set_xticklabels(ivl, rotation=leaf_rot, size=leaf_font) elif orientation in ('left', 'right'): if orientation == 'left': ax.set_xlim([dvw, 0]) ax.set_ylim([0, ivw]) else: ax.set_xlim([0, dvw]) ax.set_ylim([0, ivw]) xlines = dcoords ylines = icoords if no_labels: ax.set_yticks([]) ax.set_yticklabels([]) else: ax.set_yticks(iv_ticks) if orientation == 'left': ax.yaxis.set_ticks_position('right') else: ax.yaxis.set_ticks_position('left') # Make the tick marks invisible because they cover up the links for line in ax.get_yticklines(): line.set_visible(False) leaf_font = float(_get_tick_text_size(len(ivl))) if ( leaf_font_size is None) else leaf_font_size if leaf_rotation is not None: ax.set_yticklabels(ivl, rotation=leaf_rotation, size=leaf_font) else: ax.set_yticklabels(ivl, size=leaf_font) # Let's use collections instead. This way there is a separate legend item # for each tree grouping, rather than stupidly one for each line segment. colors_used = _remove_dups(color_list) color_to_lines = {} for color in colors_used: color_to_lines[color] = [] for (xline, yline, color) in zip(xlines, ylines, color_list): color_to_lines[color].append(list(zip(xline, yline))) colors_to_collections = {} # Construct the collections. for color in colors_used: coll = matplotlib.collections.LineCollection(color_to_lines[color], colors=(color,)) colors_to_collections[color] = coll # Add all the groupings below the color threshold. for color in colors_used: if color != above_threshold_color: ax.add_collection(colors_to_collections[color]) # If there's a grouping of links above the color threshold, it goes last. if above_threshold_color in colors_to_collections: ax.add_collection(colors_to_collections[above_threshold_color]) if contraction_marks is not None: Ellipse = matplotlib.patches.Ellipse for (x, y) in contraction_marks: if orientation in ('left', 'right'): e = Ellipse((y, x), width=dvw / 100, height=1.0) else: e = Ellipse((x, y), width=1.0, height=dvw / 100) ax.add_artist(e) e.set_clip_box(ax.bbox) e.set_alpha(0.5) e.set_facecolor('k') if trigger_redraw: matplotlib.pylab.draw_if_interactive()
ImportError
dataset/ETHPy150Open scipy/scipy/scipy/cluster/hierarchy.py/_plot_dendrogram
5,834
def save_as(self, version): """"the save action for fusion environment uses Fusions own python binding """ # set the extension to '.comp' from stalker import Version assert isinstance(version, Version) # its a new version please update the paths version.update_paths() version.extension = '.comp' version.created_with = self.name # set project_directory self.project_directory = os.path.dirname(version.absolute_path) # create the main write node self.create_main_saver_node(version) # replace read and write node paths #self.replace_external_paths() # create the path before saving try: os.makedirs(version.absolute_path) except __HOLE__: # path already exists OSError pass version_full_path = os.path.normpath(version.absolute_full_path) self.comp.Lock() self.comp.Save(version_full_path.encode()) self.comp.Unlock() rfm = RecentFileManager() rfm.add(self.name, version.absolute_full_path) return True
OSError
dataset/ETHPy150Open eoyilmaz/anima/anima/env/fusion/__init__.py/Fusion.save_as
5,835
def get_version_from_recent_files(self): """It will try to create a :class:`~oyProjectManager.models.version.Version` instance by looking at the recent files list. It will return None if it can not find one. :return: :class:`~oyProjectManager.models.version.Version` """ # full_path = self.fusion_prefs["LastCompFile"] # return self.get_version_from_full_path(full_path) version = None rfm = RecentFileManager() try: recent_files = rfm[self.name] except __HOLE__: logger.debug('no recent files') recent_files = None if recent_files is not None: for i in range(len(recent_files)): version = self.get_version_from_full_path(recent_files[i]) if version is not None: break logger.debug("version from recent files is: %s" % version) return version
KeyError
dataset/ETHPy150Open eoyilmaz/anima/anima/env/fusion/__init__.py/Fusion.get_version_from_recent_files
5,836
def create_main_saver_node(self, version): """Creates the default saver node if there is no created before. Creates the default saver nodes if there isn't any existing outputs, and updates the ones that is already created """ def output_path_generator(file_format): """helper function to generate the output path :param file_format: :return: """ # generate the data needed # the output path file_name_buffer = [] template_kwargs = {} # if this is a shot related task set it to shots resolution version_sig_name = self.get_significant_name(version) file_name_buffer.append( '%(version_sig_name)s.001.%(format)s' ) template_kwargs.update({ 'version_sig_name': version_sig_name, 'format': file_format }) output_file_name = ''.join(file_name_buffer) % template_kwargs # check if it is a stereo comp # if it is enable separate view rendering output_file_full_path = os.path.join( version.absolute_path, 'Outputs', version.take_name, 'v%03d' % version.version_number, file_format, output_file_name ).replace('\\', '/') # set the output path return '%s' % os.path.normpath( output_file_full_path ).encode() def output_node_name_generator(file_format): return '%s_%s' % (self._main_output_node_name, file_format) random_ref_id = uuid.uuid4().hex output_format_data = [ { 'name': 'png', 'node_tree': { 'type': 'Saver', 'attr': { 'TOOLS_Name': output_node_name_generator('png'), }, 'input_list': { 'Clip': output_path_generator('png'), 'ProcessRed': 1, 'ProcessGreen': 1, 'ProcessBlue': 1, 'ProcessAlpha': 0, 'OutputFormat': 'PNGFormat', 'PNGFormat.SaveAlpha': 0, 'PNGFormat.Depth': 1, 'PNGFormat.CompressionLevel': 9, 'PNGFormat.GammaMode': 0, }, 'connected_to': { 'Input': { 'type': 'ColorCurves', 'ref_id': random_ref_id, 'input_list': { 'EditAlpha': 0.0, }, 'connected_to': { 'Input': { 'type': 'CineonLog', 'input_list': { 'Mode': 1, 'RedBlackLevel': 0.0, 'RedWhiteLevel': 1023.0, 'RedFilmStockGamma': 1.0 }, 'connected_to': { 'Input': { 'type': 'TimeSpeed', 'attr': { 'TOOLB_PassThrough': True, }, 'input_list': { 'Speed': 12.0/25.0, 'InterpolateBetweenFrames': 0 }, } } } } } } } }, { 'name': 'exr', 'node_tree': { 'type': 'Saver', 'attr': { 'TOOLS_Name': output_node_name_generator('exr'), }, 'input_list': { 'Clip': output_path_generator('exr'), 'ProcessRed': 1, 'ProcessGreen': 1, 'ProcessBlue': 1, 'ProcessAlpha': 0, 'OutputFormat': 'OpenEXRFormat', 'OpenEXRFormat.Depth': 1, # 16-bit float 'OpenEXRFormat.RedEnable': 1, 'OpenEXRFormat.GreenEnable': 1, 'OpenEXRFormat.BlueEnable': 1, 'OpenEXRFormat.AlphaEnable': 0, 'OpenEXRFormat.ZEnable': 0, 'OpenEXRFormat.CovEnable': 0, 'OpenEXRFormat.ObjIDEnable': 0, 'OpenEXRFormat.MatIDEnable': 0, 'OpenEXRFormat.UEnable': 0, 'OpenEXRFormat.VEnable': 0, 'OpenEXRFormat.XNormEnable': 0, 'OpenEXRFormat.YNormEnable': 0, 'OpenEXRFormat.ZNormEnable': 0, 'OpenEXRFormat.XVelEnable': 0, 'OpenEXRFormat.YVelEnable': 0, 'OpenEXRFormat.XRevVelEnable': 0, 'OpenEXRFormat.YRevVelEnable': 0, 'OpenEXRFormat.XPosEnable': 0, 'OpenEXRFormat.YPosEnable': 0, 'OpenEXRFormat.ZPosEnable': 0, 'OpenEXRFormat.XDispEnable': 0, 'OpenEXRFormat.YDispEnable': 0, }, 'connected_to': { 'ref_id': random_ref_id } } } ] # selectively generate output format saver_nodes = self.get_main_saver_node() for data in output_format_data: format_name = data['name'] node_tree = data['node_tree'] # now check if a node with the same name exists format_node = None format_node_name = output_node_name_generator(format_name) for node in saver_nodes: node_name = node.GetAttrs('TOOLS_Name') if node_name.startswith(format_node_name): format_node = node break # create the saver node for this format if missing if not format_node: self.create_node_tree(node_tree) else: # just update the input_lists if 'input_list' in node_tree: input_list = node_tree['input_list'] for key in input_list: node_input_list = format_node.GetInputList() for input_entry_key in node_input_list.keys(): input_entry = node_input_list[input_entry_key] input_id = input_entry.GetAttrs()['INPS_ID'] if input_id == key: value = input_list[key] input_entry[0] = value break try: os.makedirs( os.path.dirname( output_path_generator(format_name) ) ) except __HOLE__: # path already exists pass
OSError
dataset/ETHPy150Open eoyilmaz/anima/anima/env/fusion/__init__.py/Fusion.create_main_saver_node
5,837
def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. """ try: parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except (__HOLE__, AttributeError) as e: raise ImportError('Could not import {} for API setting {}. {}: {}.' .format(val, setting_name, e.__class__.__name__, e))
ImportError
dataset/ETHPy150Open ankitpopli1891/django-autotranslate/autotranslate/utils.py/import_from_string
5,838
def __init__(self, app, conf, logger=None): self.app = app self.conf = conf self.logger = logger or get_logger(conf, log_route='container_sync') self.realms_conf = ContainerSyncRealms( os.path.join( conf.get('swift_dir', '/etc/swift'), 'container-sync-realms.conf'), self.logger) self.allow_full_urls = config_true_value( conf.get('allow_full_urls', 'true')) # configure current realm/cluster for /info self.realm = self.cluster = None current = conf.get('current', None) if current: try: self.realm, self.cluster = (p.upper() for p in current.strip('/').split('/')) except __HOLE__: self.logger.error('Invalid current //REALM/CLUSTER (%s)', current) self.register_info()
ValueError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/container_sync.py/ContainerSync.__init__
5,839
def register_info(self): dct = {} for realm in self.realms_conf.realms(): clusters = self.realms_conf.clusters(realm) if clusters: dct[realm] = {'clusters': dict((c, {}) for c in clusters)} if self.realm and self.cluster: try: dct[self.realm]['clusters'][self.cluster]['current'] = True except __HOLE__: self.logger.error('Unknown current //REALM/CLUSTER (%s)', '//%s/%s' % (self.realm, self.cluster)) register_swift_info('container_sync', realms=dct)
KeyError
dataset/ETHPy150Open openstack/swift/swift/common/middleware/container_sync.py/ContainerSync.register_info
5,840
def _load_output(output_dir, func_name, timestamp=None, metadata=None, mmap_mode=None, verbose=0): """Load output of a computation.""" if verbose > 1: signature = "" try: if metadata is not None: args = ", ".join(['%s=%s' % (name, value) for name, value in metadata['input_args'].items()]) signature = "%s(%s)" % (os.path.basename(func_name), args) else: signature = os.path.basename(func_name) except __HOLE__: pass if timestamp is not None: t = "% 16s" % format_time(time.time() - timestamp) else: t = "" if verbose < 10: print('[Memory]%s: Loading %s...' % (t, str(signature))) else: print('[Memory]%s: Loading %s from %s' % ( t, str(signature), output_dir)) filename = os.path.join(output_dir, 'output.pkl') if not os.path.isfile(filename): raise KeyError( "Non-existing cache value (may have been cleared).\n" "File %s does not exist" % filename) return numpy_pickle.load(filename, mmap_mode=mmap_mode) # An in-memory store to avoid looking at the disk-based function # source code to check if a function definition has changed
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/memory.py/_load_output
5,841
def _write_func_code(self, filename, func_code, first_line): """ Write the function code and the filename to a file. """ # We store the first line because the filename and the function # name is not always enough to identify a function: people # sometimes have several functions named the same way in a # file. This is bad practice, but joblib should be robust to bad # practice. func_code = u'%s %i\n%s' % (FIRST_LINE_TEXT, first_line, func_code) with io.open(filename, 'w', encoding="UTF-8") as out: out.write(func_code) # Also store in the in-memory store of function hashes is_named_callable = False if PY3_OR_LATER: is_named_callable = (hasattr(self.func, '__name__') and self.func.__name__ != '<lambda>') else: is_named_callable = (hasattr(self.func, 'func_name') and self.func.func_name != '<lambda>') if is_named_callable: # Don't do this for lambda functions or strange callable # objects, as it ends up being too fragile func_hash = self._hash_func() try: _FUNCTION_HASHES[self.func] = func_hash except __HOLE__: # Some callable are not hashable pass
TypeError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/memory.py/MemorizedFunc._write_func_code
5,842
def _check_previous_func_code(self, stacklevel=2): """ stacklevel is the depth a which this function is called, to issue useful warnings to the user. """ # First check if our function is in the in-memory store. # Using the in-memory store not only makes things faster, but it # also renders us robust to variations of the files when the # in-memory version of the code does not vary try: if self.func in _FUNCTION_HASHES: # We use as an identifier the id of the function and its # hash. This is more likely to falsely change than have hash # collisions, thus we are on the safe side. func_hash = self._hash_func() if func_hash == _FUNCTION_HASHES[self.func]: return True except __HOLE__: # Some callables are not hashable pass # Here, we go through some effort to be robust to dynamically # changing code and collision. We cannot inspect.getsource # because it is not reliable when using IPython's magic "%run". func_code, source_file, first_line = get_func_code(self.func) func_dir = self._get_func_dir() func_code_file = os.path.join(func_dir, 'func_code.py') try: with io.open(func_code_file, encoding="UTF-8") as infile: old_func_code, old_first_line = \ extract_first_line(infile.read()) except IOError: self._write_func_code(func_code_file, func_code, first_line) return False if old_func_code == func_code: return True # We have differing code, is this because we are referring to # different functions, or because the function we are referring to has # changed? _, func_name = get_func_name(self.func, resolv_alias=False, win_characters=False) if old_first_line == first_line == -1 or func_name == '<lambda>': if not first_line == -1: func_description = '%s (%s:%i)' % (func_name, source_file, first_line) else: func_description = func_name warnings.warn(JobLibCollisionWarning( "Cannot detect name collisions for function '%s'" % func_description), stacklevel=stacklevel) # Fetch the code at the old location and compare it. If it is the # same than the code store, we have a collision: the code in the # file has not changed, but the name we have is pointing to a new # code block. if not old_first_line == first_line and source_file is not None: possible_collision = False if os.path.exists(source_file): _, func_name = get_func_name(self.func, resolv_alias=False) num_lines = len(func_code.split('\n')) with open_py_source(source_file) as f: on_disk_func_code = f.readlines()[ old_first_line - 1:old_first_line - 1 + num_lines - 1] on_disk_func_code = ''.join(on_disk_func_code) possible_collision = (on_disk_func_code.rstrip() == old_func_code.rstrip()) else: possible_collision = source_file.startswith('<doctest ') if possible_collision: warnings.warn(JobLibCollisionWarning( 'Possible name collisions between functions ' "'%s' (%s:%i) and '%s' (%s:%i)" % (func_name, source_file, old_first_line, func_name, source_file, first_line)), stacklevel=stacklevel) # The function has changed, wipe the cache directory. # XXX: Should be using warnings, and giving stacklevel if self._verbose > 10: _, func_name = get_func_name(self.func, resolv_alias=False) self.warn("Function %s (stored in %s) has changed." % (func_name, func_dir)) self.clear(warn=True) return False
TypeError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/memory.py/MemorizedFunc._check_previous_func_code
5,843
def _persist_output(self, output, dir): """ Persist the given output tuple in the directory. """ try: mkdirp(dir) filename = os.path.join(dir, 'output.pkl') numpy_pickle.dump(output, filename, compress=self.compress) if self._verbose > 10: print('Persisting in %s' % dir) except __HOLE__: " Race condition in the creation of the directory "
OSError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/externals/joblib/memory.py/MemorizedFunc._persist_output
5,844
def updateMessages(self, parameters): """Modify the messages created by internal validation for each tool parameter. This method is called after internal validation.""" input_parameter = parameters[0] variable_parameter = parameters[1] dimension_parameter = parameters[2] output_parameter = parameters[3] output_var_parameter = parameters[4] type_parameter = parameters[5] dataset = None # Open dataset and populate variable names if input_parameter.value is not None: try: dataset = mds.netcdf.Dataset(input_parameter.valueAsText, '') except __HOLE__, exception: if "No such file or directory" in str(exception) or \ "Invalid argument" in str(exception): input_parameter.setErrorMessage( mds.messages.INPUT_DATASET_DOES_NOT_RESOLVE_TO_FILENAME.format( input_parameter.valueAsText)) elif "Malformed or inaccessible DAP DDS" in str(exception): input_parameter.setErrorMessage( mds.messages.INPUT_DATASET_URL_MALFORMED.format( input_parameter.valueAsText)) else: input_parameter.setErrorMessage( mds.messages.INPUT_DATASET_GENERIC_ERROR.format( input_parameter.valueAsText, str(exception))) except Exception, exception: input_parameter.setErrorMessage( mds.messages.INPUT_DATASET_GENERIC_ERROR.format( input_parameter.valueAsText, str(exception))) if dataset is not None: # Fill variable list variable_parameter.filter.type = "ValueList" variable_parameter.filter.list = list(dataset.variable_names()) else: # Clear variable list if no input specified variable_parameter.filter.type = "ValueList" variable_parameter.filter.list = [] variable_parameter.value = "" # Clear dimension list if no input specified dimension_parameter.filter.type = "ValueList" dimension_parameter.filter.list = [] dimension_parameter.value = "" # Update dimension list if (variable_parameter.value is not None) and (dataset is not None): # Fill dimensions list dimension_parameter.filter.type = "ValueList" dimension_parameter.filter.list = list( dataset.variable_dimension_names(variable_parameter.valueAsText)) else: # Clear dimension list if no input specified dimension_parameter.filter.type = "ValueList" dimension_parameter.filter.list = [] dimension_parameter.value = "" # Ensure an output variable name is entered if (output_var_parameter.altered) and (output_var_parameter.value is None): output_var_parameter.setErrorMessage( '%s: Must input a variable name.' % output_var_parameter.name) # Ensure output variable name is not the same as an existing variable's if (output_var_parameter.value is not None) and \ (dataset is not None) and (output_var_parameter.value in \ dataset.variable_names()): output_var_parameter.setErrorMessage( '%s: Name cannot be the same as that of an existing variable.' \ % output_var_parameter.name) # Populate a default output variable name and update it with changes # to other parameters as long as the user hasn't modified it themself if (variable_parameter.value is not None) and \ (dimension_parameter.value is not None) and \ (not output_var_parameter.altered): if type_parameter.value is None: output_var_parameter.value = variable_parameter.value + \ "_MEAN" + dimension_parameter.value else: output_var_parameter.value = variable_parameter.value + \ "_" + type_parameter.value + dimension_parameter.value # Ensure output file has a .nc extension if output_parameter.value is not None: output_filename = output_parameter.valueAsText if os.path.splitext(output_filename)[1] != ".nc": output_parameter.setErrorMessage( mds.messages.OUTPUT_FILE_EXTENSION_MUST_BE_NC) return # --------------------------------------------------------- # Statistics
RuntimeError
dataset/ETHPy150Open Esri/solutions-geoprocessing-toolbox/suitability/toolboxes/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/tools/get_variable_statistics_over_dimension.py/GetVariableStatisticsOverDimension.updateMessages
5,845
def execute(self, parameters, messages): """The source code of the tool.""" input_parameter = parameters[0] variable_parameter = parameters[1] dimension_parameter = parameters[2] output_parameter = parameters[3] output_var_parameter = parameters[4] type_parameter = parameters[5] dataset_name = input_parameter.valueAsText # Open dataset try: dataset = mds.netcdf.Dataset(dataset_name,'') except __HOLE__, exception: # Handle errors not detected by updateMessages. messages.addErrorMessage(str(exception)) raise arcpy.ExecuteError # Variable of interest var1 = dataset.variable(variable_parameter.valueAsText) # Dimension of interest dim1 = var1.dimensions.index(dimension_parameter.valueAsText) # Perform statistic result1 = self.calculate_statistic(var1[:], dim1, \ type_parameter.valueAsText) # Collect output dataset information output_dims = list(dataset.variable_dimension_names( variable_parameter.valueAsText)) output_dims.remove(dimension_parameter.valueAsText) output_dims = tuple(output_dims) output_filename = output_parameter.valueAsText output_name = output_var_parameter.valueAsText # Create new dataset dataset.xcopy(dataset.data_variable_names(), output_filename) # Create new variable in dataset with netCDF4.Dataset(output_filename, mode="a") as newdataset: newvar = newdataset.createVariable(output_name, var1.dtype, \ output_dims) for attribute_name in var1.ncattrs(): newvar.setncattr(attribute_name, var1.getncattr(attribute_name)) newvar[:] = result1 # Output new variable name arcpy.SetParameter(5, output_name) return
RuntimeError
dataset/ETHPy150Open Esri/solutions-geoprocessing-toolbox/suitability/toolboxes/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/tools/get_variable_statistics_over_dimension.py/GetVariableStatisticsOverDimension.execute
5,846
def _parse_range_header(range_header): """Parse HTTP Range header. Args: range_header: A str representing the value of a range header as retrived from Range or X-AppEngine-BlobRange. Returns: Tuple (start, end): start: Start index of blob to retrieve. May be negative index. end: None or end index. End index is exclusive. (None, None) if there is a parse error. """ if not range_header: return None, None try: # ValueError if <1 split. range_type, ranges = range_header.split('=', 1) if range_type != 'bytes': return None, None ranges = ranges.lstrip() if ',' in ranges: return None, None end = None if ranges.startswith('-'): start = int(ranges) if start == 0: return None, None else: split_range = ranges.split('-', 1) start = int(split_range[0]) if len(split_range) == 2 and split_range[1].strip(): end = int(split_range[1]) + 1 if start > end: return None, None return start, end except __HOLE__: return None, None
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/devappserver2/blob_download.py/_parse_range_header
5,847
def authenticate(self, **credentials): User = get_user_model() try: lookup_kwargs = get_user_lookup_kwargs({ "{username}__iexact": credentials["username"] }) user = User.objects.get(**lookup_kwargs) except (User.DoesNotExist, __HOLE__): return None else: try: if user.check_password(credentials["password"]): return user except KeyError: return None
KeyError
dataset/ETHPy150Open pinax/django-user-accounts/account/auth_backends.py/UsernameAuthenticationBackend.authenticate
5,848
def authenticate(self, **credentials): qs = EmailAddress.objects.filter(Q(primary=True) | Q(verified=True)) try: email_address = qs.get(email__iexact=credentials["username"]) except (EmailAddress.DoesNotExist, KeyError): return None else: user = email_address.user try: if user.check_password(credentials["password"]): return user except __HOLE__: return None
KeyError
dataset/ETHPy150Open pinax/django-user-accounts/account/auth_backends.py/EmailAuthenticationBackend.authenticate
5,849
def testProbabilisticEncryptDecryptUnicodeString(self): logging.debug('Running testProbabilisticEncryptDecryptUtf8String method.') # test success with different plaintexts for plaintext in (u'22', u'this is test string one', u'-1.3', u'5545', u"""this is a longer test string that should go on for more than two AES blocks or perhaps many more of them also."""): ciphertext = self.cipher.Encrypt(plaintext) self.assertEqual(plaintext, self.cipher.Decrypt(ciphertext)) # non string type should raise an error. try: self.cipher.Encrypt(22) self.fail() except __HOLE__: pass # success
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/ebq_crypto_test.py/ProbabilisticCiphertTest.testProbabilisticEncryptDecryptUnicodeString
5,850
def testPseudonymEncryptDecryptUnicodeString(self): logging.debug('Running testPseudonymEncryptDecryptUtf8String method.') # test success with different plaintexts for plaintext in (u'22', u'this is test string one', u'-1.3', u'5545', u"""this is a longer test string that should go on for more than two AES blocks or perhaps many more of them also."""): ciphertext = self.cipher.Encrypt(plaintext) self.assertEqual(plaintext, self.cipher.Decrypt(ciphertext)) # non string type should raise an error. try: self.cipher.Encrypt(22) self.fail() except __HOLE__: pass # success
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/ebq_crypto_test.py/PseudonymCiphertTest.testPseudonymEncryptDecryptUnicodeString
5,851
def testHomomorphicEncryptIntDecryptInt(self): logging.debug('Running testHomomorphicEncryptIntDecryptInt method.') # test success with different plaintexts for plaintext in (2, 5, 55, 333333333, 44444444444): ciphertext = self.cipher.Encrypt(plaintext) self.assertEqual(plaintext, self.cipher.Decrypt(ciphertext)) # non int/long type should raise an error. try: self.cipher.Encrypt('22') self.fail() except __HOLE__: pass # success try: self.cipher.Encrypt(22222222222222222222222) self.fail() except ValueError: pass # success
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/ebq_crypto_test.py/HomomorphicIntCiphertTest.testHomomorphicEncryptIntDecryptInt
5,852
def testHomomorphicEncryptFloatDecryptFloat(self): logging.debug('Running testHomomorphicEncryptFloatDecryptFloat method.') # test success with different plaintexts for plaintext in (1.22, 0.4565, 55.45, 33.3333333, 444444444.44): ciphertext = self.cipher.Encrypt(plaintext) self.assertEqual(plaintext, self.cipher.Decrypt(ciphertext)) # encrypting a too large float should raise an error. try: self.cipher.Encrypt(1.0*2**400) self.fail() except __HOLE__: pass # success # non int/long type should raise an error. try: self.cipher.Encrypt('22') self.fail() except ValueError: pass # success
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/ebq_crypto_test.py/HomomorphicFloatCipherTest.testHomomorphicEncryptFloatDecryptFloat
5,853
def build_image(): openstack_client = init() flavor_name = cfg.CONF.flavor_name image_name = cfg.CONF.image_name if nova.does_flavor_exist(openstack_client.nova, flavor_name): LOG.info('Using existing flavor: %s', flavor_name) else: try: nova.create_flavor(openstack_client.nova, name=flavor_name, ram=512, vcpus=1, disk=3) LOG.info('Created flavor %s', flavor_name) except nova.ForbiddenException: LOG.error('User does not have permissions to create the flavor. ' 'Specify user with admin privileges or specify existing ' 'flavor via --flavor-name parameter.') exit(1) if glance.get_image(openstack_client.glance, image_name): LOG.info('Using existing image: %s', image_name) else: template = None template_filename = cfg.CONF.image_builder_template try: am = lambda f: config.IMAGE_BUILDER_TEMPLATES + '%s.yaml' % f template = utils.read_file(template_filename, alias_mapper=am) except __HOLE__: LOG.error('Error reading template file: %s. ' 'Please verify correctness of --image-builder-template ' 'parameter', template_filename) exit(1) external_net = (cfg.CONF.external_net or neutron.choose_external_net(openstack_client.neutron)) stack_name = 'shaker_%s' % uuid.uuid4() stack_parameters = {'external_net': external_net, 'flavor': flavor_name} stack_id = None try: stack_id = heat.create_stack(openstack_client.heat, stack_name, template, stack_parameters) outputs = heat.get_stack_outputs(openstack_client.heat, stack_id) LOG.debug('Stack outputs: %s', outputs) LOG.debug('Waiting for server to shutdown') server_id = outputs['server_info'].get('id') nova.wait_server_shutdown(openstack_client.nova, server_id) LOG.debug('Making snapshot') openstack_client.nova.servers.create_image( server_id, image_name) LOG.debug('Waiting for server to snapshot') nova.wait_server_snapshot(openstack_client.nova, server_id) LOG.info('Created image: %s', image_name) except BaseException as e: if isinstance(e, KeyboardInterrupt): LOG.info('Caught SIGINT. Terminating') else: error_msg = 'Error while building the image: %s' % e LOG.error(error_msg) LOG.exception(e) finally: if stack_id and cfg.CONF.cleanup_on_error: LOG.debug('Cleaning up the stack: %s', stack_id) openstack_client.heat.stacks.delete(stack_id)
IOError
dataset/ETHPy150Open openstack/shaker/shaker/engine/image_builder.py/build_image
5,854
def xss_strip_all_tags(s): """ Strips out all HTML. """ return s def fixup(m): text = m.group(0) if text[:1] == "<": return "" # ignore tags if text[:2] == "&#": try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass elif text[:1] == "&": import htmlentitydefs entity = htmlentitydefs.entitydefs.get(text[1:-1]) if entity: if entity[:2] == "&#": try: return unichr(int(entity[2:-1])) except __HOLE__: pass else: return unicode(entity, "iso-8859-1") return text # leave as is return re.sub("(?s)<[^>]*>|&#?\w+;", fixup, s)
ValueError
dataset/ETHPy150Open benadida/helios-server/helios/utils.py/xss_strip_all_tags
5,855
@staticmethod def is_reg(name): try: return name.upper() in x86_regs + x86_16bits_regs except __HOLE__: # Not a string return False
AttributeError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/X86.is_reg
5,856
def mem(data): """Parse a memory access string of format ``[EXPR]`` or ``seg:[EXPR]`` ``EXPR`` may describe: ``BASE | INDEX * SCALE | DISPLACEMENT`` or any combinaison (in this order) """ if not isinstance(data, str): raise TypeError("mem need a string to parse") data = data.strip() prefix = None if not (data.startswith("[") and data.endswith("]")): if data[2] != ":": raise ValueError("mem acces expect <[EXPR]> or <seg:[EXPR]") prefix_name = data[:2].upper() if prefix_name not in x86_segment_selectors: raise ValueError("Unknow segment selector {0}".format(prefix_name)) prefix = prefix_name data = data[3:] if not (data.startswith("[") and data.endswith("]")): raise ValueError("mem acces expect <[EXPR]> or <seg:[EXPR]") # A l'arrache.. j'aime pas le parsing de trucs data = data[1:-1] items = data.split("+") parsed_items = {'prefix': prefix} for item in items: item = item.strip() # Index * scale if "*" in item: if 'index' in parsed_items: raise ValueError("Multiple index / index*scale in mem expression <{0}>".format(data)) sub_items = item.split("*") if len(sub_items) != 2: raise ValueError("Invalid item <{0}> in mem access".format(item)) index, scale = sub_items index, scale = index.strip(), scale.strip() if not X86.is_reg(index): raise ValueError("Invalid index <{0}> in mem access".format(index)) if X86.reg_size(index) == 16: raise NotImplementedError("16bits modrm") try: scale = int(scale, 0) except ValueError: raise ValueError("Invalid scale <{0}> in mem access".format(scale)) parsed_items['scale'] = scale parsed_items['index'] = index else: # displacement / base / index alone if X86.is_reg(item): if X86.reg_size(item) == 16: raise NotImplementedError("16bits modrm") if 'base' not in parsed_items: parsed_items['base'] = item continue # Already have base + index -> cannot avec another register in expression if 'index' in parsed_items: raise ValueError("Multiple index / index*scale in mem expression <{0}>".format(data)) parsed_items['index'] = item continue try: disp = int(item, 0) except __HOLE__: raise ValueError("Invalid base/index or displacement <{0}> in mem access".format(item)) if 'disp' in parsed_items: raise ValueError("Multiple displacement in mem expression <{0}>".format(data)) parsed_items['disp'] = disp return create_displacement(**parsed_items) # Helper to get the BitArray associated to a register
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/mem
5,857
def accept_arg(self, args, instr_state): x = args[0] try: return (1, self.reg_opcode[x.upper()]) except (KeyError, __HOLE__): return (None, None)
AttributeError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/X86RegisterSelector.accept_arg
5,858
def accept_arg(self, args, instr_state): try: x = int(args[0]) except (__HOLE__, TypeError): return (None, None) try: imm8 = accept_as_8immediat(x) except ImmediatOverflow: return None, None return (1, BitArray.from_string(imm8))
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/Imm8.accept_arg
5,859
def accept_arg(self, args, instr_state): try: x = int(args[0]) except (__HOLE__, TypeError): return (None, None) try: imm16 = accept_as_16immediat(x) except ImmediatOverflow: return None, None return (1, BitArray.from_string(imm16))
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/Imm16.accept_arg
5,860
def accept_arg(self, args, instr_state): try: x = int(args[0]) except (__HOLE__, TypeError): return (None, None) try: imm32 = accept_as_32immediat(x) except ImmediatOverflow: return None, None return (1, BitArray.from_string(imm32))
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/Imm32.accept_arg
5,861
def accept_arg(self, args, instr_state): writecr = self.writecr if len(args) < 2: return None, None reg = args[writecr] cr = args[not writecr] if not isinstance(cr, str): return None, None if not cr.lower().startswith("cr"): return None, None try: cr_number = int(cr[2:], 10) except __HOLE__ as e: raise ValueError("Invalid ControlRegister {0}".format(cr)) if cr_number > 7: raise ValueError("Invalid ControlRegister {0}".format(cr)) modrm_params = [reg, x86_regs[cr_number]] + args[2:] return ModRM([ModRM_REG__REG], has_direction_bit=False).accept_arg(modrm_params, instr_state)
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/ControlRegisterModRM.accept_arg
5,862
def accept_arg(self, args, instr_state): try: jump_size = int(args[0]) except (__HOLE__, TypeError): return (None, None) jump_size -= self.sub try: jmp_imm = self.accept_as_Ximmediat(jump_size) except ImmediatOverflow: return (None, None) return (1, BitArray.from_string(jmp_imm))
ValueError
dataset/ETHPy150Open hakril/PythonForWindows/windows/native_exec/simple_x86.py/JmpImm.accept_arg
5,863
def update_metaconfig(metaconfig_name, *args, **kwargs): zk_value, version = _kazoo_client(ZK_HOSTS).get( METACONFIG_ZK_PATH_FORMAT.format(metaconfig_name)) s3_key = METACONFIG_S3_KEY_FORMAT.format(metaconfig_name) s3_path = zk_util.construct_s3_path(s3_key, zk_value) try: metaconfig_data = s3config.S3Config(AWS_KEY_FILE, S3_BUCKET, s3_endpoint=S3_ENDPOINT).get_config_string(s3_path) except __HOLE__ as ve: log.error("Abort downloading from s3 key %s due to ValueError: %s" % (s3_path, ve)) return except Exception as e: log.error("Abort downloading from s3 key %s due to unexpected s3 exception: %s" % (s3_path, e)) return metaconfig_list = json.loads(metaconfig_data) for metaconfig in metaconfig_list: _place_watch_from_metaconfig(metaconfig)
ValueError
dataset/ETHPy150Open pinterest/kingpin/kingpin/zk_update_monitor/zk_update_monitor.py/update_metaconfig
5,864
def _safe(self, fn): try: fn() except (__HOLE__, KeyboardInterrupt): raise except Exception as e: warnings.warn( "testing_reaper couldn't " "rollback/close connection: %s" % e)
SystemExit
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/testing/engines.py/ConnectionKiller._safe
5,865
def _safe(self, fn): try: fn() except (__HOLE__, KeyboardInterrupt): raise except Exception as e: warnings.warn( "ReconnectFixture couldn't " "close connection: %s" % e)
SystemExit
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/testing/engines.py/ReconnectFixture._safe
5,866
def __getattribute__(self, key): try: return object.__getattribute__(self, key) except __HOLE__: pass subject, buffer = [object.__getattribute__(self, x) for x in ('_subject', '_buffer')] try: result = type(subject).__getattribute__(subject, key) except AttributeError: buffer.append(ReplayableSession.NoAttribute) raise else: if type(result) not in ReplayableSession.Natives: buffer.append(ReplayableSession.Callable) return type(self)(buffer, result) else: buffer.append(result) return result
AttributeError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/testing/engines.py/ReplayableSession.Recorder.__getattribute__
5,867
def __getattribute__(self, key): try: return object.__getattribute__(self, key) except __HOLE__: pass buffer = object.__getattribute__(self, '_buffer') result = buffer.popleft() if result is ReplayableSession.Callable: return self elif result is ReplayableSession.NoAttribute: raise AttributeError(key) else: return result
AttributeError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/testing/engines.py/ReplayableSession.Player.__getattribute__
5,868
def decode(path): if isinstance(path, bytes_cls): try: path = path.decode(fs_encoding, 'strict') except __HOLE__: if not platform.is_linux(): raise path = path.decode(fs_fallback_encoding, 'strict') return path
UnicodeDecodeError
dataset/ETHPy150Open ppalex7/SourcePawnCompletions/watchdog/utils/unicode_paths.py/decode
5,869
def DeleteById(self, sid): """Delete session data for a session id. Args: sid: str, session id """ try: del self._sessions[sid] except __HOLE__: pass
KeyError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/AuthSessionDict.DeleteById
5,870
def _LoadKey(self, keystr): """Load a key and return a key object. Args: keystr: str, key in PEM format Returns: tlslite.utils.RSAKey instance Raises: ValueError: keystr is improperly formed """ try: key = tlslite_bridge.parsePEMKey(keystr) except (SyntaxError, __HOLE__), e: raise ValueError('invalid PEM key format: %s' % str(e)) return key
AttributeError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/Auth1._LoadKey
5,871
def VerifyCertSignedByCA(self, cert): """Verify that a client cert was signed by the required CA cert. Args: cert: certificate object, client cert to verify Returns: True or False """ ca_cert = self.LoadOtherCert(self._ca_pem) try: return cert.IsSignedBy(ca_cert) except (x509.Error, __HOLE__), e: logging.exception(str(e)) raise CryptoError( 'VerifyCertSignedByCA: IsSignedBy: %s' % str(e))
AssertionError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/Auth1.VerifyCertSignedByCA
5,872
def VerifyDataSignedWithCert(self, data, signature, cert=None): """Verify that this cert signed this data. Args: data: str, data to verify signing signature: str, signature data cert: certificate object, or None for this instance's cert Returns: True or False Raises: CryptoError: if the underlying crypto APIs raise an assertion due to malformed data """ if cert is None: cert = self._cert signature_b = array.array('B') signature_b.fromstring(str(signature)) data_b = array.array('B') data_b.fromstring(str(data)) try: pk = cert.GetPublicKey() return pk.hashAndVerify(signature_b, data_b) except __HOLE__, e: logging.exception(str(e)) raise CryptoError( 'VerifyDataSignedWithCert: hashAndVerify: %s' % str(e))
AssertionError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/Auth1.VerifyDataSignedWithCert
5,873
def Input(self, n=None, m=None, s=None): # pylint: disable=arguments-differ """Input parameters to the auth function. Callers should provide n, OR m and s. Args: n: str, nonce from client, an integer in str form e.g. '12345' m: str, message from client s: str, b64 signature from client Raises: ValueError: if invalid combination of arguments supplied """ super(Auth1, self).Input() self.ResetState() # paranoia clear auth_state, tests run OK without if n is not None and m is None and s is None: #logging.debug('Auth step 1') try: cn = int(n) except ValueError: logging.critical('Non-integer Cn was supplied: %s', str(n)) self.AuthFail() return if cn < MIN_VALUE_CN: logging.critical('Cn value is too small: %d', cn) self.AuthFail() return sn = self.Nonce() m = self._AssembleMessage(str(cn), str(sn)) sig = self.Sign(m) sig = base64.urlsafe_b64encode(sig) m = self._AssembleMessage(m, sig) self._AddOutput(m) self.SessionSetCnSn(cn, sn) #logging.debug('Server supplied Sn %s for Cn %s', sn, cn) elif m is not None and s is not None and n is None: #logging.debug('Auth step 2') class _Error(Exception): """Temporary exception used here.""" log_prefix = '' cn = None try: # open up the message to get the client cert 'c'. try: (c, cn, sn) = self._SplitMessage(m, 3) except MessageError, e: raise _Error('SplitMessage MessageError (%s)', str(e)) log_prefix = '' # signature 's' and client cert 'c' are urlsafe_base64 try: s = base64.urlsafe_b64decode(str(s)) c = base64.urlsafe_b64decode(str(c)) except __HOLE__, e: raise _Error('Invalid c or s parameter b64 format(%s)', str(e)) # load X509 client cert 'c' into object try: client_cert = self.LoadOtherCert(c) except ValueError, e: raise _Error('Invalid cert supplied %s' % str(e)) # sanity check if not client_cert.GetPublicKey(): raise _Error('Malformed X509 cert with no public key') client_cert.SetRequiredIssuer(self._required_issuer) try: client_cert.CheckAll() except x509.Error, e: raise _Error('X509 certificate error: %s' % str(e)) # obtain uuid from cert uuid = client_cert.GetSubject() log_prefix = uuid # client_cert is loaded #logging.debug('%s Client cert loaded', log_prefix) #logging.debug('%s Message = %s', log_prefix, m) # verify that the client cert is legitimate if not self.VerifyCertSignedByCA(client_cert): raise _Error('Client cert is not signed by the required CA') # verify that the message was signed by the client cert if not self.VerifyDataSignedWithCert(m, s, client_cert): raise _Error('Signed message does not verify') # verify that the Sn was the one offered to this Cn previously if not self.SessionVerifyKnownCnSn(cn, sn): raise _Error('Client offered unknown Sn %s', sn) # success! #logging.debug('%s Client auth successful', log_prefix) # careful here, switching the state setting and AddOutput # lines causes a hard to test bug (because Input() test mocks out # AuthToken()) self._auth_state = AuthState.OK token = self.SessionCreateAuthToken(uuid) self._AddOutput(token) except (_Error, CryptoError), e: logging.warning('%s Auth error: %s', log_prefix, e.args) logging.debug('%s Auth message: %s', log_prefix, m) logging.debug( '%s Auth sig: %s', log_prefix, base64.urlsafe_b64encode(s)) self.AuthFail() # no matter what, delete the current cn:sn pair if an attempt # was made to auth against it, success or not. if cn is not None: self.SessionDelCn(cn) else: #logging.debug('Auth step unknown') raise ValueError('invalid input')
TypeError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/Auth1.Input
5,874
def Input(self, m=None, t=None): # pylint: disable=arguments-differ """Accept input to auth methods. Callers should provide either m OR t, or neither, but not both. Args: m: str, message from server (cn, sn, signature) t: str, token reply from server Raises: ValueError: if invalid combination of arguments supplied """ self.ResetState() # no input - step 0, producing step 1 input if m is None and t is None: cn = str(self.Nonce()) self._AddOutput(cn) self._session.Set('cn', cn) # message input - step 1 output, produce step 2 input elif m is not None and t is None: class _Error(Exception): """Temporary exception used here.""" cn = None try: # open up the message to get the client nonce (cn), # server nonce (sn) and signature (s) try: (cn, sn, s) = self._SplitMessage(m, 3) except MessageError, e: raise _Error('SplitMessage MessageError (%s)' % str(e)) try: s = base64.urlsafe_b64decode(str(s)) except TypeError, e: raise _Error('Invalid s parameter b64 format (%s)' % str(e)) # verify cert is a server cert try: server_cert = self.LoadOtherCert(self._server_cert_pem) except __HOLE__, e: raise _Error('Server cert load error: %s' % str(e)) if not self.VerifyCertSignedByCA(server_cert): raise _Error('Server cert is not signed by known CA') # load the Cn value that this client used previously orig_cn = self._session.Get('cn') if cn != orig_cn: raise _Error('Server supplied Cn does not match our Cn') # verify signature on message "Cn Sn" tmp_m = self._AssembleMessage(cn, sn) if not self.VerifyDataSignedWithCert(tmp_m, s, server_cert): raise _Error('Sn is not signed by server cert') # create return message: base64_client_cert cn sn c = base64.urlsafe_b64encode(self._cert_str) out_m = self._AssembleMessage(c, cn, sn) sig = self.Sign(out_m) sig = base64.urlsafe_b64encode(str(sig)) #logging.debug('M= %s', out_m) #logging.debug('S= %s', sig) self._AddOutput({'m': out_m, 's': sig}) except _Error, e: self._session.DeleteById('cn') self._AddError(str(e)) self.AuthFail() # token input - step 3 input elif t is not None and m is None: if t == Auth1.TOKEN: self._session.DeleteById('cn') self.ResetState() self._auth_state = AuthState.OK else: self.AuthFail() # unknown input else: raise ValueError('Invalid input')
ValueError
dataset/ETHPy150Open google/simian/src/simian/auth/base.py/Auth1Client.Input
5,875
def parser(chunks): """ Parse a data chunk into a dictionary; catch failures and return suitable defaults """ dictionaries = [] for chunk in chunks: try: dictionaries.append(json.loads(chunk)) except __HOLE__: dictionaries.append({ 'unparsed': chunk }) return dictionaries
ValueError
dataset/ETHPy150Open jomido/jogger/jogger/jogger.py/parser
5,876
def _where(self, *comparators, **comparator): def by_schema(self, schema): log = self[:] to_remove = set() for line in log: if not isinstance(line, collections.Mapping): try: d = line.__dict__ except __HOLE__: continue else: d = line for k, t in schema.items(): if k.startswith('~'): notted = True k = k[1:] else: notted = False if k not in d: to_remove.add(line) else: v = d[k] if (isinstance(v, collections.Iterable) and not isinstance(v, (str, unicode))): method = self._vector_match else: method = self._scalar_match if notted: if method(v, t): to_remove.add(line) else: if not method(v, t): to_remove.add(line) for line in to_remove: log.remove(line) return log log = self.__class__(self) if comparator: comparators += (comparator,) for comparator in comparators: if isinstance(comparator, collections.Mapping): log = by_schema(log, comparator) continue if callable(comparator): log = self.__class__( [line for line in log if comparator(line)]) continue raise TypeError("Invalid comparator") return log
AttributeError
dataset/ETHPy150Open jomido/jogger/jogger/jogger.py/APIMixin._where
5,877
def run(self): self.log.info('ListenerThread started on {0}:{1}(udp)'.format( self.host, self.port)) rdr = collectd_network.Reader(self.host, self.port) try: while ALIVE: try: items = rdr.interpret(poll_interval=self.poll_interval) self.send_to_collector(items) except __HOLE__, e: self.log.warn('Dropping bad packet: {0}'.format(e)) except Exception, e: self.log.error('caught exception: type={0}, exc={1}'.format(type(e), e)) self.log.info('ListenerThread - stop')
ValueError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/jcollectd/jcollectd.py/ListenerThread.run
5,878
def handle(self, *args, **options): try: from celery.task.control import broadcast except __HOLE__: raise CommandError("Celery is not currently installed.") # Shut them all down. broadcast("shutdown")
ImportError
dataset/ETHPy150Open duointeractive/django-fabtastic/fabtastic/management/commands/ft_celeryd_restart.py/Command.handle
5,879
def show_current(name): ''' Display the current highest-priority alternative for a given alternatives link CLI Example: .. code-block:: bash salt '*' alternatives.show_current editor ''' alt_link_path = '/etc/alternatives/{0}'.format(name) try: return os.readlink(alt_link_path) except __HOLE__: log.error( 'alternatives: path {0} does not exist'.format(alt_link_path) ) return False
OSError
dataset/ETHPy150Open saltstack/salt/salt/modules/alternatives.py/show_current
5,880
def run(self): """Include a file as part of the content of this reST file.""" if not self.state.document.settings.file_insertion_enabled: raise self.warning('"%s" directive disabled.' % self.name) source = self.state_machine.input_lines.source( self.lineno - self.state_machine.input_offset - 1) source_dir = os.path.dirname(os.path.abspath(source)) path = directives.path(self.arguments[0]) if path.startswith('<') and path.endswith('>'): path = os.path.join(self.standard_include_path, path[1:-1]) path = os.path.normpath(os.path.join(source_dir, path)) path = utils.relative_path(None, path) path = nodes.reprunicode(path) encoding = self.options.get( 'encoding', self.state.document.settings.input_encoding) e_handler=self.state.document.settings.input_encoding_error_handler tab_width = self.options.get( 'tab-width', self.state.document.settings.tab_width) try: self.state.document.settings.record_dependencies.add(path) include_file = io.FileInput(source_path=path, encoding=encoding, error_handler=e_handler) except UnicodeEncodeError, error: raise self.severe(u'Problems with "%s" directive path:\n' 'Cannot encode input file path "%s" ' '(wrong locale?).' % (self.name, SafeString(path))) except __HOLE__, error: raise self.severe(u'Problems with "%s" directive path:\n%s.' % (self.name, ErrorString(error))) startline = self.options.get('start-line', None) endline = self.options.get('end-line', None) try: if startline or (endline is not None): lines = include_file.readlines() rawtext = ''.join(lines[startline:endline]) else: rawtext = include_file.read() except UnicodeError, error: raise self.severe(u'Problem with "%s" directive:\n%s' % (self.name, ErrorString(error))) # start-after/end-before: no restrictions on newlines in match-text, # and no restrictions on matching inside lines vs. line boundaries after_text = self.options.get('start-after', None) if after_text: # skip content in rawtext before *and incl.* a matching text after_index = rawtext.find(after_text) if after_index < 0: raise self.severe('Problem with "start-after" option of "%s" ' 'directive:\nText not found.' % self.name) rawtext = rawtext[after_index + len(after_text):] before_text = self.options.get('end-before', None) if before_text: # skip content in rawtext after *and incl.* a matching text before_index = rawtext.find(before_text) if before_index < 0: raise self.severe('Problem with "end-before" option of "%s" ' 'directive:\nText not found.' % self.name) rawtext = rawtext[:before_index] include_lines = statemachine.string2lines(rawtext, tab_width, convert_whitespace=True) if 'literal' in self.options: # Convert tabs to spaces, if `tab_width` is positive. if tab_width >= 0: text = rawtext.expandtabs(tab_width) else: text = rawtext literal_block = nodes.literal_block(rawtext, source=path, classes=self.options.get('class', [])) literal_block.line = 1 self.add_name(literal_block) if 'number-lines' in self.options: try: startline = int(self.options['number-lines'] or 1) except ValueError: raise self.error(':number-lines: with non-integer ' 'start value') endline = startline + len(include_lines) if text.endswith('\n'): text = text[:-1] tokens = NumberLines([([], text)], startline, endline) for classes, value in tokens: if classes: literal_block += nodes.inline(value, value, classes=classes) else: literal_block += nodes.Text(value, value) else: literal_block += nodes.Text(text, text) return [literal_block] if 'code' in self.options: self.options['source'] = path codeblock = CodeBlock(self.name, [self.options.pop('code')], # arguments self.options, include_lines, # content self.lineno, self.content_offset, self.block_text, self.state, self.state_machine) return codeblock.run() self.state_machine.insert_input(include_lines, path) return []
IOError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Include.run
5,881
def run(self): if (not self.state.document.settings.raw_enabled or (not self.state.document.settings.file_insertion_enabled and ('file' in self.options or 'url' in self.options))): raise self.warning('"%s" directive disabled.' % self.name) attributes = {'format': ' '.join(self.arguments[0].lower().split())} encoding = self.options.get( 'encoding', self.state.document.settings.input_encoding) e_handler=self.state.document.settings.input_encoding_error_handler if self.content: if 'file' in self.options or 'url' in self.options: raise self.error( '"%s" directive may not both specify an external file ' 'and have content.' % self.name) text = '\n'.join(self.content) elif 'file' in self.options: if 'url' in self.options: raise self.error( 'The "file" and "url" options may not be simultaneously ' 'specified for the "%s" directive.' % self.name) source_dir = os.path.dirname( os.path.abspath(self.state.document.current_source)) path = os.path.normpath(os.path.join(source_dir, self.options['file'])) path = utils.relative_path(None, path) try: raw_file = io.FileInput(source_path=path, encoding=encoding, error_handler=e_handler) # TODO: currently, raw input files are recorded as # dependencies even if not used for the chosen output format. self.state.document.settings.record_dependencies.add(path) except IOError, error: raise self.severe(u'Problems with "%s" directive path:\n%s.' % (self.name, ErrorString(error))) try: text = raw_file.read() except UnicodeError, error: raise self.severe(u'Problem with "%s" directive:\n%s' % (self.name, ErrorString(error))) attributes['source'] = path elif 'url' in self.options: source = self.options['url'] # Do not import urllib2 at the top of the module because # it may fail due to broken SSL dependencies, and it takes # about 0.15 seconds to load. import urllib2 try: raw_text = urllib2.urlopen(source).read() except (urllib2.URLError, __HOLE__, OSError), error: raise self.severe(u'Problems with "%s" directive URL "%s":\n%s.' % (self.name, self.options['url'], ErrorString(error))) raw_file = io.StringInput(source=raw_text, source_path=source, encoding=encoding, error_handler=e_handler) try: text = raw_file.read() except UnicodeError, error: raise self.severe(u'Problem with "%s" directive:\n%s' % (self.name, ErrorString(error))) attributes['source'] = source else: # This will always fail because there is no content. self.assert_has_content() raw_node = nodes.raw('', text, **attributes) (raw_node.source, raw_node.line) = self.state_machine.get_source_and_line(self.lineno) return [raw_node]
IOError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Raw.run
5,882
def run(self): if not isinstance(self.state, states.SubstitutionDef): raise self.error( 'Invalid context: the "%s" directive can only be used within ' 'a substitution definition.' % self.name) substitution_definition = self.state_machine.node if 'trim' in self.options: substitution_definition.attributes['ltrim'] = 1 substitution_definition.attributes['rtrim'] = 1 if 'ltrim' in self.options: substitution_definition.attributes['ltrim'] = 1 if 'rtrim' in self.options: substitution_definition.attributes['rtrim'] = 1 codes = self.comment_pattern.split(self.arguments[0])[0].split() element = nodes.Element() for code in codes: try: decoded = directives.unicode_code(code) except __HOLE__, error: raise self.error(u'Invalid character code: %s\n%s' % (code, ErrorString(error))) element += nodes.Text(decoded) return element.children
ValueError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Unicode.run
5,883
def run(self): try: class_value = directives.class_option(self.arguments[0]) except __HOLE__: raise self.error( 'Invalid class attribute value for "%s" directive: "%s".' % (self.name, self.arguments[0])) node_list = [] if self.content: container = nodes.Element() self.state.nested_parse(self.content, self.content_offset, container) for node in container: node['classes'].extend(class_value) node_list.extend(container.children) else: pending = nodes.pending( misc.ClassAttribute, {'class': class_value, 'directive': self.name}, self.block_text) self.state_machine.document.note_pending(pending) node_list.append(pending) return node_list
ValueError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Class.run
5,884
def run(self): """Dynamically create and register a custom interpreted text role.""" if self.content_offset > self.lineno or not self.content: raise self.error('"%s" directive requires arguments on the first ' 'line.' % self.name) args = self.content[0] match = self.argument_pattern.match(args) if not match: raise self.error('"%s" directive arguments not valid role names: ' '"%s".' % (self.name, args)) new_role_name = match.group(1) base_role_name = match.group(3) messages = [] if base_role_name: base_role, messages = roles.role( base_role_name, self.state_machine.language, self.lineno, self.state.reporter) if base_role is None: error = self.state.reporter.error( 'Unknown interpreted text role "%s".' % base_role_name, nodes.literal_block(self.block_text, self.block_text), line=self.lineno) return messages + [error] else: base_role = roles.generic_custom_role assert not hasattr(base_role, 'arguments'), ( 'Supplemental directive arguments for "%s" directive not ' 'supported (specified by "%r" role).' % (self.name, base_role)) try: converted_role = convert_directive_function(base_role) (arguments, options, content, content_offset) = ( self.state.parse_directive_block( self.content[1:], self.content_offset, converted_role, option_presets={})) except states.MarkupError, detail: error = self.state_machine.reporter.error( 'Error in "%s" directive:\n%s.' % (self.name, detail), nodes.literal_block(self.block_text, self.block_text), line=self.lineno) return messages + [error] if 'class' not in options: try: options['class'] = directives.class_option(new_role_name) except __HOLE__, detail: error = self.state_machine.reporter.error( u'Invalid argument for "%s" directive:\n%s.' % (self.name, SafeString(detail)), nodes.literal_block( self.block_text, self.block_text), line=self.lineno) return messages + [error] role = roles.CustomRole(new_role_name, base_role, options, content) roles.register_local_role(new_role_name, role) return messages
ValueError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Role.run
5,885
def run(self): if not isinstance(self.state, states.SubstitutionDef): raise self.error( 'Invalid context: the "%s" directive can only be used within ' 'a substitution definition.' % self.name) format_str = '\n'.join(self.content) or '%Y-%m-%d' if sys.version_info< (3, 0): try: format_str = format_str.encode(locale_encoding or 'utf-8') except UnicodeEncodeError: raise self.warning(u'Cannot encode date format string ' u'with locale encoding "%s".' % locale_encoding) text = time.strftime(format_str) if sys.version_info< (3, 0): # `text` is a byte string that may contain non-ASCII characters: try: text = text.decode(locale_encoding or 'utf-8') except __HOLE__: text = text.decode(locale_encoding or 'utf-8', 'replace') raise self.warning(u'Error decoding "%s"' u'with locale encoding "%s".' % (text, locale_encoding)) return [nodes.Text(text)]
UnicodeDecodeError
dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/Renderers/libs/python2/docutils/parsers/rst/directives/misc.py/Date.run
5,886
def get_node(self, k): try: db = self.nodeDB except AttributeError: return k else: try: return db[k] except __HOLE__: db[k] = Node(k) return db[k]
KeyError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Query_Test.get_node
5,887
def node_graph(self, g): try: db = self.nodeDB except __HOLE__: return g out = {} for k, e in g.items(): k = self.get_node(k) d = out.setdefault(k, {}) for dest, edge in e.items(): d[self.get_node(dest)] = edge return out
AttributeError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Query_Test.node_graph
5,888
def node_list(self, l): try: db = self.nodeDB except __HOLE__: return l out = [] for k in l: out.append(self.get_node(k)) return out
AttributeError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Query_Test.node_list
5,889
def node_result(self, r): try: db = self.nodeDB except __HOLE__: return r l = [] for d in r: d2 = {} for k, v in d.items(): d2[k] = self.get_node(v) l.append(d2) return l
AttributeError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Query_Test.node_result
5,890
def update_graph(self, datagraph): try: g = self.datagraph except __HOLE__: return datagraph else: g.update(datagraph) return g
AttributeError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Query_Test.update_graph
5,891
def test_delraise(self): "Delete raise" datagraph = self.datagraph datagraph += self.get_node(1) datagraph += self.get_node(2) datagraph[self.get_node(2)] += self.get_node(3) try: for i in range(0, 2): datagraph -= self.get_node(3) raise ValueError('failed to catch bad node deletion attempt') except __HOLE__: pass # THIS IS THE CORRECT RESULT
KeyError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Mapping_Test.test_delraise
5,892
def test_setitemraise(self): "Setitemraise" datagraph = self.datagraph datagraph += self.get_node(1) try: datagraph[self.get_node(1)] = self.get_node(2) raise KeyError('failed to catch bad setitem attempt') except __HOLE__: pass # THIS IS THE CORRECT RESULT
ValueError
dataset/ETHPy150Open cjlee112/pygr/tests/graph_test.py/Mapping_Test.test_setitemraise
5,893
def clean(self, value): from ca_provinces import PROVINCES_NORMALIZED super(CAProvinceField, self).clean(value) if value in EMPTY_VALUES: return u'' try: value = value.strip().lower() except __HOLE__: pass else: try: return PROVINCES_NORMALIZED[value.strip().lower()].decode('ascii') except KeyError: pass raise ValidationError(self.error_messages['invalid'])
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/localflavor/ca/forms.py/CAProvinceField.clean
5,894
def main(args=None): try: if args is None: args = sys.argv[1:] SenlinShell().main(args) except __HOLE__: print(_("... terminating senlin client"), file=sys.stderr) sys.exit(130) except Exception as e: if '--debug' in args or '-d' in args: raise else: print(encodeutils.safe_encode(six.text_type(e)), file=sys.stderr) sys.exit(1)
KeyboardInterrupt
dataset/ETHPy150Open openstack/python-senlinclient/senlinclient/shell.py/main
5,895
def _calculate(self, data): x = data.pop('x') fun = self.params['fun'] n = self.params['n'] args = self.params['args'] if not hasattr(fun, '__call__'): raise GgplotError("stat_function requires parameter 'fun' to be " + "a function or any other callable object") old_fun = fun if isinstance(args,list): fun = lambda x: old_fun(x, *args) elif isinstance(args,dict): fun = lambda x: old_fun(x, **args) elif args is not None: fun = lambda x: old_fun(x, args) else: fun = lambda x: old_fun(x) x = np.linspace(x.min(), x.max(),n) y = list(map(fun, x)) new_data = pd.DataFrame({'x': x, 'y': y}) # Copy the other aesthetics into the new dataframe # Don't copy the any previous 'y' assignments try: del data['y'] except __HOLE__: pass n = len(x) for ae in data: new_data[ae] = make_iterable_ntimes(data[ae].iloc[0], n) return new_data
KeyError
dataset/ETHPy150Open yhat/ggplot/ggplot/stats/stat_function.py/stat_function._calculate
5,896
def configure(self): """ Configure the driver to use the stored configuration options Any store that needs special configuration should implement this method. If the store was not able to successfully configure itself, it should raise `exception.BadDriverConfiguration` """ # Here we set up the various file-based image cache paths # that we need in order to find the files in different states # of cache management. self.set_paths() # We do a quick attempt to write a user xattr to a temporary file # to check that the filesystem is even enabled to support xattrs image_cache_dir = self.base_dir fake_image_filepath = os.path.join(image_cache_dir, 'checkme') with open(fake_image_filepath, 'wb') as fake_file: fake_file.write("XXX") fake_file.flush() try: set_xattr(fake_image_filepath, 'hits', '1') except __HOLE__, e: if e.errno == errno.EOPNOTSUPP: msg = _("The device housing the image cache directory " "%(image_cache_dir)s does not support xattr. It is " "likely you need to edit your fstab and add the " "user_xattr option to the appropriate line for the " "device housing the cache directory.") % locals() logger.error(msg) raise exception.BadDriverConfiguration(driver="xattr", reason=msg) else: # Cleanup after ourselves... if os.path.exists(fake_image_filepath): os.unlink(fake_image_filepath)
IOError
dataset/ETHPy150Open rcbops/glance-buildpackage/glance/image_cache/drivers/xattr.py/Driver.configure
5,897
def get_xattr(path, key, **kwargs): """Return the value for a particular xattr If the key doesn't not exist, or xattrs aren't supported by the file system then a KeyError will be raised, that is, unless you specify a default using kwargs. """ namespaced_key = _make_namespaced_xattr_key(key) entry_xattr = xattr.xattr(path) try: return entry_xattr[namespaced_key] except __HOLE__: if 'default' in kwargs: return kwargs['default'] else: raise
KeyError
dataset/ETHPy150Open rcbops/glance-buildpackage/glance/image_cache/drivers/xattr.py/get_xattr
5,898
def test_abs__file__(self): # Make sure all imported modules have their __file__ attribute # as an absolute path. # Handled by abs__file__() site.abs__file__() for module in (sys, os, __builtin__): try: self.assertTrue(os.path.isabs(module.__file__), repr(module)) except __HOLE__: continue # We could try everything in sys.modules; however, when regrtest.py # runs something like test_frozen before test_site, then we will # be testing things loaded *after* test_site did path normalization
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_site.py/ImportSideEffectTests.test_abs__file__
5,899
def test_sitecustomize_executed(self): # If sitecustomize is available, it should have been imported. if "sitecustomize" not in sys.modules: try: import sitecustomize except __HOLE__: pass else: self.fail("sitecustomize not imported automatically")
ImportError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_site.py/ImportSideEffectTests.test_sitecustomize_executed