Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,000
def _process_road_rxmsg(self, msg, sender): ''' Send to the right queue msg is the message body dict sender is the unique name of the remote estate that sent the message ''' try: s_estate, s_yard, s_share = msg['route']['src'] d_estate, d_yard, d_share = msg['route']['dst'] except (ValueError, __HOLE__): log.error('Received invalid message: {0}'.format(msg)) return if s_estate is None: # drop return log.debug("**** Road Router rxMsg **** id={0} estate={1} yard={2}\n" " msg= {3}\n".format( self.opts.value['id'], self.road_stack.value.local.name, self.lane_stack.value.local.name, msg)) if d_estate is not None and d_estate != self.road_stack.value.local.name: log.error( 'Road Router Received message for wrong estate: {0}'.format(d_estate)) return if d_yard is not None: # Meant for another yard, send it off! if d_yard in self.lane_stack.value.nameRemotes: self.lane_stack.value.transmit(msg, self.lane_stack.value.nameRemotes[d_yard].uid) return return if d_share is None: # No queue destination! log.error('Received message without share: {0}'.format(msg)) return elif d_share == 'fun': if self.road_stack.value.kind == kinds.applKinds.minion: self.fun.value.append(msg) elif d_share == 'stats_req': self.stats_req.value.append(msg) #log.debug("\n**** Stats Request \n {0}\n".format(msg))
IndexError
dataset/ETHPy150Open saltstack/salt/salt/daemons/flo/core.py/SaltRaetRouterMinion._process_road_rxmsg
5,001
def _process_lane_rxmsg(self, msg, sender): ''' Send uxd messages tot he right queue or forward them to the correct yard etc. msg is message body dict sender is unique name of remote that sent the message ''' try: s_estate, s_yard, s_share = msg['route']['src'] d_estate, d_yard, d_share = msg['route']['dst'] except (__HOLE__, IndexError): log.error('Lane Router Received invalid message: {0}'.format(msg)) return if s_yard is None: return # drop message if s_estate is None: # substitute local estate s_estate = self.road_stack.value.local.name msg['route']['src'] = (s_estate, s_yard, s_share) log.debug("**** Lane Router rxMsg **** id={0} estate={1} yard={2}\n" " msg={3}\n".format( self.opts.value['id'], self.road_stack.value.local.name, self.lane_stack.value.local.name, msg)) if d_estate is None: pass elif d_estate != self.road_stack.value.local.name: # Forward to the correct estate if d_estate in self.road_stack.value.nameRemotes: self.road_stack.value.message(msg, self.road_stack.value.nameRemotes[d_estate].uid) return if d_yard is None: pass elif d_yard != self.lane_stack.value.local.name: # Meant for another yard, send it off! if d_yard in self.lane_stack.value.nameRemotes: self.lane_stack.value.transmit(msg, self.lane_stack.value.nameRemotes[d_yard].uid) return return if d_share is None: # No queue destination! log.error('Lane Router Received message without share: {0}'.format(msg)) return elif d_share == 'event_req': self.event_req.value.append(msg) #log.debug("\n**** Event Subscribe \n {0}\n".format(msg)) elif d_share == 'event_fire': self.event.value.append(msg) #log.debug("\n**** Event Fire \n {0}\n".format(msg)) elif d_share == 'remote_cmd': # assume minion to master or salt-call if not self.road_stack.value.remotes: log.error("**** Lane Router: Missing joined master. Unable to route " "remote_cmd. Requeuing".format()) self.laters.value.append((msg, sender)) return d_estate = self._get_master_estate_name(clustered=self.opts.get('cluster_mode', False)) if not d_estate: log.error("**** Lane Router: No available destination estate for 'remote_cmd'." "Unable to route. Requeuing".format()) self.laters.value.append((msg, sender)) return msg['route']['dst'] = (d_estate, d_yard, d_share) log.debug("**** Lane Router: Missing destination estate for 'remote_cmd'. " "Using default route={0}.".format(msg['route']['dst'])) self.road_stack.value.message(msg, self.road_stack.value.nameRemotes[d_estate].uid)
ValueError
dataset/ETHPy150Open saltstack/salt/salt/daemons/flo/core.py/SaltRaetRouterMinion._process_lane_rxmsg
5,002
def _send_presence(self, msg): ''' Forward an presence message to all subscribed yards Presence message has a route ''' y_name = msg['route']['src'][1] if y_name not in self.lane_stack.value.nameRemotes: # subscriber not a remote pass # drop msg don't answer else: if 'data' in msg and 'state' in msg['data']: state = msg['data']['state'] else: state = None # create answer message if state in [None, 'available', 'present']: present = odict() for name in self.availables.value: minion = self.aliveds.value.get(name, None) present[name] = minion.ha[0] if minion else None data = {'present': present} else: # TODO: update to really return joineds states = {'joined': self.alloweds, 'allowed': self.alloweds, 'alived': self.aliveds, 'reaped': self.reapeds} try: minions = states[state].value except __HOLE__: # error: wrong/unknown state requested log.error('Lane Router Received invalid message: {0}'.format(msg)) return result = odict() for name in minions: result[name] = minions[name].ha[0] data = {state: result} tag = tagify('present', 'presence') route = {'dst': (None, None, 'event_fire'), 'src': (None, self.lane_stack.value.local.name, None)} msg = {'route': route, 'tag': tag, 'data': data} self.lane_stack.value.transmit(msg, self.lane_stack.value.fetchUidByName(y_name)) self.lane_stack.value.serviceAll()
KeyError
dataset/ETHPy150Open saltstack/salt/salt/daemons/flo/core.py/SaltRaetPresenter._send_presence
5,003
def _remove_ssh_client(p): try: _ssh_clients.remove(p) except __HOLE__: pass
ValueError
dataset/ETHPy150Open esrlabs/git-repo/git_command.py/_remove_ssh_client
5,004
def terminate_ssh_clients(): global _ssh_clients for p in _ssh_clients: try: os.kill(p.pid, SIGTERM) p.wait() except __HOLE__: pass _ssh_clients = []
OSError
dataset/ETHPy150Open esrlabs/git-repo/git_command.py/terminate_ssh_clients
5,005
def sort_feed_items(items, order): """Return feed items, sorted according to sortFeedItems.""" if order == 'asInFeed': return items (key, reverse) = _sort_arguments(order) try: sitems = sorted(items, key=key, reverse=reverse) except __HOLE__: # feedparser normalizes required timestamp fields in ATOM and RSS # to the "published"/"updated" fields. Feeds missing it are unsortable by date. return items return sitems
KeyError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/RSS/plugin.py/sort_feed_items
5,006
def getCommandMethod(self, command): try: return self.__parent.getCommandMethod(command) except __HOLE__: return self.get_feed(command[0]).get_command(self)
AttributeError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/RSS/plugin.py/RSS.getCommandMethod
5,007
def app_loop(args, log): pulsar_app = _app(args, log) sleep = True while sleep: try: time.sleep(5) except __HOLE__: sleep = False except SystemExit: sleep = False except Exception: pass try: pulsar_app.shutdown() except Exception: log.exception("Failed to shutdown Pulsar application") raise
KeyboardInterrupt
dataset/ETHPy150Open galaxyproject/pulsar/pulsar/main.py/app_loop
5,008
def __init__(self): self.config_home = os.path.join(click.get_app_dir('OpenConnect Helper')) self.config_file = os.path.join(self.config_home, 'profiles.toml') fn = self.config_file try: with open(fn) as f: self.config = toml.load(f) except __HOLE__ as e: if e.errno != errno.ENOENT: raise click.UsageError('Could not open config file: %s' % e) self.config = {}
IOError
dataset/ETHPy150Open mitsuhiko/osx-openconnect-helper/openconnect_helper.py/ProfileManager.__init__
5,009
def save(self): fn = self.config_file try: os.makedirs(self.config_home) except __HOLE__: pass with open(fn, 'w') as f: return toml.dump(self.config, f)
OSError
dataset/ETHPy150Open mitsuhiko/osx-openconnect-helper/openconnect_helper.py/ProfileManager.save
5,010
def connect(self, name, cert_check=True): profile = self.get_profile(name) if profile is None: raise click.UsageError('The profile "%s" does not exist.' % name) kwargs = {} stdin = None password = self.get_keychain_password(name) rsa_token = self.get_rsa_token(name) args = ['sudo', 'openconnect'] if not cert_check: args.append('--no-cert-check') user = profile.get('user') if user is not None: args.append('--user=%s' % user) group = profile.get('group') if group is not None: args.append('--authgroup=%s' % group) if password is not None: args.append('--passwd-on-stdin') stdin = password kwargs['stdin'] = PIPE elif rsa_token is not None: args.append('--token-mode=rsa') args.append('--token-secret=%s' % rsa_token) fingerprint = profile.get('fingerprint') if fingerprint is not None: args.append('--servercert=%s' % fingerprint) args.append(profile['url']) c = Popen(args, **kwargs) try: if stdin is not None: c.stdin.write(stdin) c.stdin.flush() c.stdin.close() c.wait() except __HOLE__: try: c.terminate() except Exception: pass
KeyboardInterrupt
dataset/ETHPy150Open mitsuhiko/osx-openconnect-helper/openconnect_helper.py/ProfileManager.connect
5,011
def validate_fingerprint(ctx, param, value): if value is not None: fingerprint = value.replace(':', '').strip().upper() try: if len(fingerprint.decode('hex')) != 20: raise ValueError() except (TypeError, __HOLE__): raise click.BadParameter('Invalid SHA1 fingerprint provided.') return fingerprint
ValueError
dataset/ETHPy150Open mitsuhiko/osx-openconnect-helper/openconnect_helper.py/validate_fingerprint
5,012
def _incrdecr_async(self, key, is_negative, delta, namespace=None, initial_value=None, rpc=None): """Async version of _incrdecr(). Returns: A UserRPC instance whose get_result() method returns the same kind of value as _incrdecr() returns. """ if not isinstance(delta, (int, long)): raise TypeError('Delta must be an integer or long, received %r' % delta) if delta < 0: raise ValueError('Delta must not be negative.') if not isinstance(key, basestring): try: it = iter(key) except __HOLE__: pass else: if is_negative: delta = -delta return self.offset_multi_async(dict((k, delta) for k in it), namespace=namespace, initial_value=initial_value, rpc=rpc) request = MemcacheIncrementRequest() self._add_app_id(request) _add_name_space(request, namespace) response = MemcacheIncrementResponse() request.set_key(_key_string(key)) request.set_delta(delta) if is_negative: request.set_direction(MemcacheIncrementRequest.DECREMENT) else: request.set_direction(MemcacheIncrementRequest.INCREMENT) if initial_value is not None: request.set_initial_value(long(initial_value)) initial_flags = None if isinstance(initial_value, int): initial_flags = TYPE_INT elif isinstance(initial_value, long): initial_flags = TYPE_LONG if initial_flags is not None: request.set_initial_flags(initial_flags) return self._make_async_call(rpc, 'Increment', request, response, self.__incrdecr_hook, None)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/memcache/__init__.py/Client._incrdecr_async
5,013
def dates(self, xpath): result = [] for value in self.strings(xpath): if 'T' in value: if value.endswith('Z'): value = value[:-1] + ' UTC' fmt = '%Y-%m-%dT%H:%M:%S %Z' else: fmt = '%Y-%m-%dT%H:%M:%S' elif value.count('-') == 2: fmt = '%Y-%m-%d' elif value.count('/') == 2: fmt = '%Y/%m/%d' else: fmt = '%Y%m%d' try: result.append(datetime.datetime.strptime(value, fmt)) except __HOLE__: raise ValueError('Unknown date format: %s' % value) return result
ValueError
dataset/ETHPy150Open infrae/moai/moai/utils.py/XPath.dates
5,014
def Collect(self): """Collects the stats.""" user, system = self.proc.cpu_times() percent = self.proc.cpu_percent() self.cpu_samples.append((rdfvalue.RDFDatetime().Now(), user, system, percent)) # Keep stats for one hour. self.cpu_samples = self.cpu_samples[-3600 / self.sleep_time:] # Not supported on MacOS. try: _, _, read_bytes, write_bytes = self.proc.io_counters() self.io_samples.append((rdfvalue.RDFDatetime().Now(), read_bytes, write_bytes)) self.io_samples = self.io_samples[-3600 / self.sleep_time:] except (AttributeError, __HOLE__, psutil.Error): pass
NotImplementedError
dataset/ETHPy150Open google/grr/grr/client/client_stats.py/ClientStatsCollector.Collect
5,015
def PrintIOSample(self): try: return str(self.proc.io_counters()) except (NotImplementedError, __HOLE__): return "Not available on this platform."
AttributeError
dataset/ETHPy150Open google/grr/grr/client/client_stats.py/ClientStatsCollector.PrintIOSample
5,016
def get_field_info(self, field): """ Given an instance of a serializer field, return a dictionary of metadata about it. """ field_info = OrderedDict() serializer = field.parent if isinstance(field, serializers.ManyRelatedField): field_info['type'] = self.type_lookup[field.child_relation] else: field_info['type'] = self.type_lookup[field] try: serializer_model = getattr(serializer.Meta, 'model') field_info['relationship_type'] = self.relation_type_lookup[getattr(serializer_model, field.field_name)] except __HOLE__: pass except AttributeError: pass else: field_info['relationship_resource'] = get_related_resource_type(field) field_info['required'] = getattr(field, 'required', False) attrs = [ 'read_only', 'write_only', 'label', 'help_text', 'min_length', 'max_length', 'min_value', 'max_value', 'initial' ] for attr in attrs: value = getattr(field, attr, None) if value is not None and value != '': field_info[attr] = force_text(value, strings_only=True) if getattr(field, 'child', None): field_info['child'] = self.get_field_info(field.child) elif getattr(field, 'fields', None): field_info['children'] = self.get_serializer_info(field) if (not field_info.get('read_only') and hasattr(field, 'choices') and not field_info.get('relationship_resource')): field_info['choices'] = [ { 'value': choice_value, 'display_name': force_text(choice_name, strings_only=True) } for choice_value, choice_name in field.choices.items() ] if hasattr(serializer, 'included_serializers') and 'relationship_resource' in field_info: field_info['allows_include'] = field.field_name in serializer.included_serializers return field_info
KeyError
dataset/ETHPy150Open django-json-api/django-rest-framework-json-api/rest_framework_json_api/metadata.py/JSONAPIMetadata.get_field_info
5,017
def bind_key_hold(self, method, key, modifiers=0): """Bind a method to a key at runtime to be invoked when the key is held down, this replaces any existing key hold binding for this key. To unbind the key entirely, pass ``None`` for method. """ if method is not None: self._key_hold_map[key, modifiers & self.MODIFIER_MASK] = method else: try: del self._key_hold_map[key, modifiers & self.MODIFIER_MASK] except __HOLE__: pass
KeyError
dataset/ETHPy150Open caseman/grease/grease/controls.py/KeyControls.bind_key_hold
5,018
def bind_key_press(self, method, key, modifiers=0): """Bind a method to a key at runtime to be invoked when the key is initially pressed, this replaces any existing key hold binding for this key. To unbind the key entirely, pass ``None`` for method. """ if method is not None: self._key_press_map[key, modifiers & self.MODIFIER_MASK] = method else: try: del self._key_press_map[key, modifiers & self.MODIFIER_MASK] except __HOLE__: pass
KeyError
dataset/ETHPy150Open caseman/grease/grease/controls.py/KeyControls.bind_key_press
5,019
def bind_key_release(self, method, key, modifiers=0): """Bind a method to a key at runtime to be invoked when the key is releaseed, this replaces any existing key hold binding for this key. To unbind the key entirely, pass ``None`` for method. """ if method is not None: self._key_release_map[key, modifiers & self.MODIFIER_MASK] = method else: try: del self._key_release_map[key, modifiers & self.MODIFIER_MASK] except __HOLE__: pass
KeyError
dataset/ETHPy150Open caseman/grease/grease/controls.py/KeyControls.bind_key_release
5,020
@base.post_async('/cmd/exec') def node_exec_command(request): t = Talker(request.form['host'], int(request.form['port'])) try: r = t.talk(*json.loads(request.form['cmd'])) except __HOLE__ as e: r = None if e.message == 'No reply' else ('-ERROR: ' + e.message) except ReplyError as e: r = '-' + e.message finally: t.close() return base.json_result(r)
ValueError
dataset/ETHPy150Open HunanTV/redis-ctl/handlers/commands.py/node_exec_command
5,021
def symbol(ident, bp=0): ''' Gets (and create if not exists) as named symbol. Optionally, you can specify a binding power (bp) value, which will be used to control operator presedence; the higher the value, the tighter a token binds to the tokens that follow. ''' try: s = SYMBOLS[ident] except __HOLE__: class s(SymbolBase): pass s.__name__ = 'symbol-%s' % (ident,) s.ident = ident s.lbp = bp SYMBOLS[ident] = s else: s.lbp = max(bp, s.lbp) return s # Helper functions
KeyError
dataset/ETHPy150Open tehmaze/nagios-cli/nagios_cli/filters/parser.py/symbol
5,022
@classmethod def reader(cls, program, **scope): scope.update({ 'None': None, 'null': None, 'True': True, 'true': True, 'False': False, 'false': False, 'empty': '', }) for kind, value in tokenize(program): #print (kind, value), if kind == 'name': s = SYMBOLS['(literal)']() try: s.value = scope[value] except KeyError: raise NameError('Name %r is not defined' % (value,)) yield s elif kind == 'variable': s = SYMBOLS['(literal)']() try: s.value = cls.variables[int(value[1:])] except __HOLE__: s.value = None yield s elif kind == 'string': s = SYMBOLS['(literal)']() s.value = value[1:-1] yield s elif kind == 'number': s = SYMBOLS['(literal)']() s.value = long(value) yield s elif kind == 'float': s = SYMBOLS['(literal)']() s.value = float(value) yield s elif kind == 'symbol': yield SYMBOLS[value]() else: raise SyntaxError('Unknown operator %s' % (kind,)) #print '->', yield SYMBOLS['(end)']() # Definition and order of expressions
IndexError
dataset/ETHPy150Open tehmaze/nagios-cli/nagios_cli/filters/parser.py/Parser.reader
5,023
def has_win32com(): """ Run this to determine if the local machine has win32com, and if it does, include additional tests. """ if not sys.platform.startswith('win32'): return False try: mod = __import__('win32com') except __HOLE__: return False return True
ImportError
dataset/ETHPy150Open balanced/status.balancedpayments.com/venv/lib/python2.7/site-packages/distribute-0.6.34-py2.7.egg/setuptools/tests/test_sandbox.py/has_win32com
5,024
def create_commit_message(repo): message = messages.CommitMessage() try: commit = repo.head.commit except ValueError: raise NoGitHeadError('On initial commit, no HEAD yet.') try: repo.git.diff('--quiet') has_unstaged_changes = False except git.exc.GitCommandError: has_unstaged_changes = True message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message try: message.branch = repo.head.ref.name except __HOLE__: # Allow operating in an environment with a detached HEAD. pass message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message
TypeError
dataset/ETHPy150Open grow/grow/grow/deployments/utils.py/create_commit_message
5,025
def default(self, obj): """Convert Home Assistant objects. Hand other objects to the original method. """ if isinstance(obj, datetime): return obj.isoformat() elif hasattr(obj, 'as_dict'): return obj.as_dict() try: return json.JSONEncoder.default(self, obj) except __HOLE__: # If the JSON serializer couldn't serialize it # it might be a generator, convert it to a list try: return [self.default(child_obj) for child_obj in obj] except TypeError: # Ok, we're lost, cause the original error return json.JSONEncoder.default(self, obj)
TypeError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/remote.py/JSONEncoder.default
5,026
def get_event_listeners(api): """List of events that is being listened for.""" try: req = api(METHOD_GET, URL_API_EVENTS) return req.json() if req.status_code == 200 else {} except (HomeAssistantError, __HOLE__): # ValueError if req.json() can't parse the json _LOGGER.exception("Unexpected result retrieving event listeners") return {}
ValueError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/remote.py/get_event_listeners
5,027
def get_state(api, entity_id): """Query given API for state of entity_id.""" try: req = api(METHOD_GET, URL_API_STATES_ENTITY.format(entity_id)) # req.status_code == 422 if entity does not exist return ha.State.from_dict(req.json()) \ if req.status_code == 200 else None except (HomeAssistantError, __HOLE__): # ValueError if req.json() can't parse the json _LOGGER.exception("Error fetching state") return None
ValueError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/remote.py/get_state
5,028
def get_states(api): """Query given API for all states.""" try: req = api(METHOD_GET, URL_API_STATES) return [ha.State.from_dict(item) for item in req.json()] except (HomeAssistantError, __HOLE__, AttributeError): # ValueError if req.json() can't parse the json _LOGGER.exception("Error fetching states") return []
ValueError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/remote.py/get_states
5,029
def get_services(api): """Return a list of dicts. Each dict has a string "domain" and a list of strings "services". """ try: req = api(METHOD_GET, URL_API_SERVICES) return req.json() if req.status_code == 200 else {} except (HomeAssistantError, __HOLE__): # ValueError if req.json() can't parse the json _LOGGER.exception("Got unexpected services result") return {}
ValueError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/remote.py/get_services
5,030
def handle(self, *app_labels, **options): format = options['format'] indent = options['indent'] using = options['database'] excludes = options['exclude'] output = options['output'] show_traceback = options['traceback'] use_natural_foreign_keys = options['use_natural_foreign_keys'] use_natural_primary_keys = options['use_natural_primary_keys'] use_base_manager = options['use_base_manager'] pks = options['primary_keys'] if pks: primary_keys = pks.split(',') else: primary_keys = [] excluded_apps = set() excluded_models = set() for exclude in excludes: if '.' in exclude: try: model = apps.get_model(exclude) except LookupError: raise CommandError('Unknown model in excludes: %s' % exclude) excluded_models.add(model) else: try: app_config = apps.get_app_config(exclude) except LookupError as e: raise CommandError(str(e)) excluded_apps.add(app_config) if len(app_labels) == 0: if primary_keys: raise CommandError("You can only use --pks option with one model") app_list = OrderedDict( (app_config, None) for app_config in apps.get_app_configs() if app_config.models_module is not None and app_config not in excluded_apps ) else: if len(app_labels) > 1 and primary_keys: raise CommandError("You can only use --pks option with one model") app_list = OrderedDict() for label in app_labels: try: app_label, model_label = label.split('.') try: app_config = apps.get_app_config(app_label) except LookupError as e: raise CommandError(str(e)) if app_config.models_module is None or app_config in excluded_apps: continue try: model = app_config.get_model(model_label) except LookupError: raise CommandError("Unknown model: %s.%s" % (app_label, model_label)) app_list_value = app_list.setdefault(app_config, []) # We may have previously seen a "all-models" request for # this app (no model qualifier was given). In this case # there is no need adding specific models to the list. if app_list_value is not None: if model not in app_list_value: app_list_value.append(model) except __HOLE__: if primary_keys: raise CommandError("You can only use --pks option with one model") # This is just an app - no model qualifier app_label = label try: app_config = apps.get_app_config(app_label) except LookupError as e: raise CommandError(str(e)) if app_config.models_module is None or app_config in excluded_apps: continue app_list[app_config] = None # Check that the serialization format exists; this is a shortcut to # avoid collating all the objects and _then_ failing. if format not in serializers.get_public_serializer_formats(): try: serializers.get_serializer(format) except serializers.SerializerDoesNotExist: pass raise CommandError("Unknown serialization format: %s" % format) def get_objects(count_only=False): """ Collate the objects to be serialized. If count_only is True, just count the number of objects to be serialized. """ models = serializers.sort_dependencies(app_list.items()) for model in models: if model in excluded_models: continue if model._meta.proxy and model._meta.proxy_for_model not in models: warnings.warn( "%s is a proxy model and won't be serialized." % model._meta.label, category=ProxyModelWarning, ) if not model._meta.proxy and router.allow_migrate_model(using, model): if use_base_manager: objects = model._base_manager else: objects = model._default_manager queryset = objects.using(using).order_by(model._meta.pk.name) if primary_keys: queryset = queryset.filter(pk__in=primary_keys) if count_only: yield queryset.order_by().count() else: for obj in queryset.iterator(): yield obj try: self.stdout.ending = None progress_output = None object_count = 0 # If dumpdata is outputting to stdout, there is no way to display progress if (output and self.stdout.isatty() and options['verbosity'] > 0): progress_output = self.stdout object_count = sum(get_objects(count_only=True)) stream = open(output, 'w') if output else None try: serializers.serialize( format, get_objects(), indent=indent, use_natural_foreign_keys=use_natural_foreign_keys, use_natural_primary_keys=use_natural_primary_keys, stream=stream or self.stdout, progress_output=progress_output, object_count=object_count, ) finally: if stream: stream.close() except Exception as e: if show_traceback: raise raise CommandError("Unable to serialize database: %s" % e)
ValueError
dataset/ETHPy150Open django/django/django/core/management/commands/dumpdata.py/Command.handle
5,031
def test_find_and_modify_with_sort(self): c = self.db.test c.drop() for j in range(5): c.insert({'j': j, 'i': 0}) sort = {'j': DESCENDING} self.assertEqual(4, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = {'j': ASCENDING} self.assertEqual(0, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = [('j', DESCENDING)] self.assertEqual(4, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = [('j', ASCENDING)] self.assertEqual(0, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = SON([('j', DESCENDING)]) self.assertEqual(4, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = SON([('j', ASCENDING)]) self.assertEqual(0, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) try: from collections import OrderedDict sort = OrderedDict([('j', DESCENDING)]) self.assertEqual(4, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) sort = OrderedDict([('j', ASCENDING)]) self.assertEqual(0, c.find_and_modify({}, {'$inc': {'i': 1}}, sort=sort)['j']) except __HOLE__: pass # Test that a standard dict with two keys is rejected. sort = {'j': DESCENDING, 'foo': DESCENDING} self.assertRaises(TypeError, c.find_and_modify, {}, {'$inc': {'i': 1}}, sort=sort)
ImportError
dataset/ETHPy150Open mongodb/mongo-python-driver/test/test_legacy_api.py/TestLegacy.test_find_and_modify_with_sort
5,032
def __new__(cls): try: return cls._instance except __HOLE__: cls._instance = tzinfo.__new__(UTCTimeZone) return cls._instance
AttributeError
dataset/ETHPy150Open rsms/smisk/lib/smisk/util/DateTime.py/UTCTimeZone.__new__
5,033
def Delete(self, queue, tasks, mutation_pool=None): """Removes the tasks from the queue. Note that tasks can already have been removed. It is not an error to re-delete an already deleted task. Args: queue: A queue to clear. tasks: A list of tasks to remove. Tasks may be Task() instances or integers representing the task_id. mutation_pool: An optional MutationPool object to schedule deletions on. If not given, self.data_store is used directly. """ if queue: predicates = [] for task in tasks: try: task_id = task.task_id except __HOLE__: task_id = int(task) predicates.append(self._TaskIdToColumn(task_id)) if mutation_pool: mutation_pool.DeleteAttributes(queue, predicates) else: self.data_store.DeleteAttributes( queue, predicates, token=self.token, sync=False)
AttributeError
dataset/ETHPy150Open google/grr/grr/lib/queue_manager.py/QueueManager.Delete
5,034
def build_Add(self, o): real, imag = map(self.build_Const, o.getChildren()) try: real = float(real) except __HOLE__: raise UnknownType('Add') if not isinstance(imag, complex) or imag.real != 0.0: raise UnknownType('Add') return real+imag
TypeError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Builder.build_Add
5,035
def _interpolate(self, key, value): try: # do we already have an interpolation engine? engine = self._interpolation_engine except __HOLE__: # not yet: first time running _interpolate(), so pick the engine name = self.main.interpolation if name == True: # note that "if name:" would be incorrect here # backwards-compatibility: interpolation=True means use default name = DEFAULT_INTERPOLATION name = name.lower() # so that "Template", "template", etc. all work class_ = interpolation_engines.get(name, None) if class_ is None: # invalid value for self.main.interpolation self.main.interpolation = False return value else: # save reference to engine so we don't have to do this again engine = self._interpolation_engine = class_(self) # let the engine do the actual work return engine.interpolate(key, value)
AttributeError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Section._interpolate
5,036
def get(self, key, default=None): """A version of ``get`` that doesn't bypass string interpolation.""" try: return self[key] except __HOLE__: return default
KeyError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Section.get
5,037
def pop(self, key, default=MISSING): """ 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised' """ try: val = self[key] except __HOLE__: if default is MISSING: raise val = default else: del self[key] return val
KeyError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Section.pop
5,038
def setdefault(self, key, default=None): """A version of setdefault that sets sequence if appropriate.""" try: return self[key] except __HOLE__: self[key] = default return self[key]
KeyError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Section.setdefault
5,039
def as_bool(self, key): """ Accepts a key as input. The corresponding value must be a string or the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to retain compatibility with Python 2.2. If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns ``True``. If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns ``False``. ``as_bool`` is not case sensitive. Any other input will raise a ``ValueError``. >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_bool('a') Traceback (most recent call last): ValueError: Value "fish" is neither True nor False >>> a['b'] = 'True' >>> a.as_bool('b') 1 >>> a['b'] = 'off' >>> a.as_bool('b') 0 """ val = self[key] if val == True: return True elif val == False: return False else: try: if not isinstance(val, basestring): # TODO: Why do we raise a KeyError here? raise KeyError() else: return self.main._bools[val.lower()] except __HOLE__: raise ValueError('Value "%s" is neither True nor False' % val)
KeyError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/Section.as_bool
5,040
def _handle_configspec(self, configspec): """Parse the configspec.""" # FIXME: Should we check that the configspec was created with the # correct settings ? (i.e. ``list_values=False``) if not isinstance(configspec, ConfigObj): try: configspec = ConfigObj(configspec, raise_errors=True, file_error=True, _inspec=True) except ConfigObjError, e: # FIXME: Should these errors have a reference # to the already parsed ConfigObj ? raise ConfigspecError('Parsing configspec failed: %s' % e) except __HOLE__, e: raise IOError('Reading configspec failed: %s' % e) self.configspec = configspec
IOError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/ConfigObj._handle_configspec
5,041
def validate(self, validator, preserve_errors=False, copy=False, section=None): """ Test the ConfigObj against a configspec. It uses the ``validator`` object from *validate.py*. To run ``validate`` on the current ConfigObj, call: :: test = config.validate(validator) (Normally having previously passed in the configspec when the ConfigObj was created - you can dynamically assign a dictionary of checks to the ``configspec`` attribute of a section though). It returns ``True`` if everything passes, or a dictionary of pass/fails (True/False). If every member of a subsection passes, it will just have the value ``True``. (It also returns ``False`` if all members fail). In addition, it converts the values from strings to their native types if their checks pass (and ``stringify`` is set). If ``preserve_errors`` is ``True`` (``False`` is default) then instead of a marking a fail with a ``False``, it will preserve the actual exception object. This can contain info about the reason for failure. For example the ``VdtValueTooSmallError`` indicates that the value supplied was too small. If a value (or section) is missing it will still be marked as ``False``. You must have the validate module to use ``preserve_errors=True``. You can then use the ``flatten_errors`` function to turn your nested results dictionary into a flattened list of failures - useful for displaying meaningful error messages. """ if section is None: if self.configspec is None: raise ValueError('No configspec supplied.') if preserve_errors: # We do this once to remove a top level dependency on the validate module # Which makes importing configobj faster from validate import VdtMissingValue self._vdtMissingValue = VdtMissingValue section = self if copy: section.initial_comment = section.configspec.initial_comment section.final_comment = section.configspec.final_comment section.encoding = section.configspec.encoding section.BOM = section.configspec.BOM section.newlines = section.configspec.newlines section.indent_type = section.configspec.indent_type # # section.default_values.clear() #?? configspec = section.configspec self._set_configspec(section, copy) def validate_entry(entry, spec, val, missing, ret_true, ret_false): section.default_values.pop(entry, None) try: section.default_values[entry] = validator.get_default_value(configspec[entry]) except (KeyError, __HOLE__, validator.baseErrorClass): # No default, bad default or validator has no 'get_default_value' # (e.g. SimpleVal) pass try: check = validator.check(spec, val, missing=missing ) except validator.baseErrorClass, e: if not preserve_errors or isinstance(e, self._vdtMissingValue): out[entry] = False else: # preserve the error out[entry] = e ret_false = False ret_true = False else: ret_false = False out[entry] = True if self.stringify or missing: # if we are doing type conversion # or the value is a supplied default if not self.stringify: if isinstance(check, (list, tuple)): # preserve lists check = [self._str(item) for item in check] elif missing and check is None: # convert the None from a default to a '' check = '' else: check = self._str(check) if (check != val) or missing: section[entry] = check if not copy and missing and entry not in section.defaults: section.defaults.append(entry) return ret_true, ret_false # out = {} ret_true = True ret_false = True unvalidated = [k for k in section.scalars if k not in configspec] incorrect_sections = [k for k in configspec.sections if k in section.scalars] incorrect_scalars = [k for k in configspec.scalars if k in section.sections] for entry in configspec.scalars: if entry in ('__many__', '___many___'): # reserved names continue if (not entry in section.scalars) or (entry in section.defaults): # missing entries # or entries from defaults missing = True val = None if copy and entry not in section.scalars: # copy comments section.comments[entry] = ( configspec.comments.get(entry, [])) section.inline_comments[entry] = ( configspec.inline_comments.get(entry, '')) # else: missing = False val = section[entry] ret_true, ret_false = validate_entry(entry, configspec[entry], val, missing, ret_true, ret_false) many = None if '__many__' in configspec.scalars: many = configspec['__many__'] elif '___many___' in configspec.scalars: many = configspec['___many___'] if many is not None: for entry in unvalidated: val = section[entry] ret_true, ret_false = validate_entry(entry, many, val, False, ret_true, ret_false) unvalidated = [] for entry in incorrect_scalars: ret_true = False if not preserve_errors: out[entry] = False else: ret_false = False msg = 'Value %r was provided as a section' % entry out[entry] = validator.baseErrorClass(msg) for entry in incorrect_sections: ret_true = False if not preserve_errors: out[entry] = False else: ret_false = False msg = 'Section %r was provided as a single value' % entry out[entry] = validator.baseErrorClass(msg) # Missing sections will have been created as empty ones when the # configspec was read. for entry in section.sections: # FIXME: this means DEFAULT is not copied in copy mode if section is self and entry == 'DEFAULT': continue if section[entry].configspec is None: unvalidated.append(entry) continue if copy: section.comments[entry] = configspec.comments.get(entry, []) section.inline_comments[entry] = configspec.inline_comments.get(entry, '') check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) out[entry] = check if check == False: ret_true = False elif check == True: ret_false = False else: ret_true = False section.extra_values = unvalidated if preserve_errors and not section._created: # If the section wasn't created (i.e. it wasn't missing) # then we can't return False, we need to preserve errors ret_false = False # if ret_false and preserve_errors and out: # If we are preserving errors, but all # the failures are from missing sections / values # then we can return False. Otherwise there is a # real failure that we need to preserve. ret_false = not any(out.values()) if ret_true: return True elif ret_false: return False return out
AttributeError
dataset/ETHPy150Open boakley/robotframework-workbench/rwb/lib/configobj.py/ConfigObj.validate
5,042
def interact(self, ps1="shparse> ", ps2="more> "): try: while 1: line = raw_input(ps1) if not line: continue line += "\n" # add newline to force callback while self.feed(line): line = raw_input(ps2) except (EOFError, __HOLE__): print
KeyboardInterrupt
dataset/ETHPy150Open kdart/pycopia/core/pycopia/shparser.py/ShellParser.interact
5,043
def is_numeric(val): try: float(val) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open adambullmer/sublime_docblockr_python/parsers/parser.py/is_numeric
5,044
def tearDown(self): # We have to remove all the files from the real FS. Doing the same for the # fake FS is optional, but doing it is an extra sanity check. os.chdir(tempfile.gettempdir()) try: rev_files = self._created_files[:] rev_files.reverse() for info in rev_files: real_path, fake_path = self._Paths(info[1]) if info[0] == 'd': try: os.rmdir(real_path) except __HOLE__ as e: if 'Directory not empty' in e: self.fail('Real path %s not empty: %s : %s' % ( real_path, e, os.listdir(real_path))) else: raise self.fake_os.rmdir(fake_path) if info[0] == 'f' or info[0] == 'l': os.remove(real_path) self.fake_os.remove(fake_path) finally: shutil.rmtree(self.real_base)
OSError
dataset/ETHPy150Open jmcgeheeiv/pyfakefs/fake_filesystem_vs_real_test.py/FakeFilesystemVsRealTest.tearDown
5,045
def _GetErrno(self, raised_error): try: return (raised_error and raised_error.errno) or None except __HOLE__: return None
AttributeError
dataset/ETHPy150Open jmcgeheeiv/pyfakefs/fake_filesystem_vs_real_test.py/FakeFilesystemVsRealTest._GetErrno
5,046
def train(self, train_set, valid_set=None, test_set=None, train_size=None): '''We train over mini-batches and evaluate periodically.''' iteration = 0 while True: if not iteration % self.config.test_frequency and test_set: try: self.test(iteration, test_set) except __HOLE__: logging.info('interrupted!') break if not iteration % self.validation_frequency and valid_set: try: if not self.evaluate(iteration, valid_set): logging.info('patience elapsed, bailing out') break except KeyboardInterrupt: logging.info('interrupted!') break train_message = "" try: train_message = self.train_func(train_set) except KeyboardInterrupt: logging.info('interrupted!') break if not iteration % self.config.monitor_frequency: logging.info('monitor (iter=%i) %s', iteration + 1, train_message) iteration += 1 if hasattr(self.network, "iteration_callback"): self.network.iteration_callback() yield train_message if valid_set: self.set_params(self.best_params) if test_set: self.test(0, test_set)
KeyboardInterrupt
dataset/ETHPy150Open zomux/deepy/deepy/trainers/customize_trainer.py/CustomizeTrainer.train
5,047
def scan_modules(self): """Loads the python modules specified in the JIP configuration. This will register any functions and classes decorated with one of the JIP decorators. """ if self.__scanned: return path = getenv("JIP_MODULES", "") log.debug("Scanning modules") for module in path.split(":") + self.jip_modules + ['jip.scripts']: try: if module: log.debug("Importing module: %s", module) __import__(module) except __HOLE__, e: log.debug("Error while importing module: %s. " "Trying file import", str(e)) if exists(module): self._load_from_file(module) self.__scanned = True
ImportError
dataset/ETHPy150Open thasso/pyjip/jip/tools.py/Scanner.scan_modules
5,048
def run(self, tool, stdin=None, stdout=None): """Execute this block """ import subprocess import jip # write template to named temp file and run with interpreter script_file = jip.create_temp_file() try: script_file.write(self.render(tool)) script_file.close() cmd = [self.interpreter if self.interpreter else "bash"] if self.interpreter_args: cmd += self.interpreter_args self.process = subprocess.Popen( cmd + [script_file.name], stdin=stdin, stdout=stdout ) return self.process except __HOLE__, err: # catch the errno 2 No such file or directory, which indicates the # interpreter is not available if err.errno == 2: raise Exception("Interpreter %s not found!" % self.interpreter) raise err
OSError
dataset/ETHPy150Open thasso/pyjip/jip/tools.py/Block.run
5,049
def check_file(self, option_name): """Delegates to the options check name function :param option_name: the name of the option """ try: self.options[option_name].check_file() except __HOLE__ as e: self.validation_error(str(e))
ValueError
dataset/ETHPy150Open thasso/pyjip/jip/tools.py/Tool.check_file
5,050
def main(args=None, get_subs_fn=None): get_subs_fn = get_subs_fn or get_subs _place_template_files = True _process_template_files = True package_path = os.getcwd() if args is not None: package_path = args.package_path or os.getcwd() _place_template_files = args.place_template_files _process_template_files = args.process_template_files pkgs_dict = find_packages(package_path) if len(pkgs_dict) == 0: sys.exit("No packages found in path: '{0}'".format(package_path)) if len(pkgs_dict) > 1: sys.exit("Multiple packages found, " "this tool only supports one package at a time.") os_data = create_default_installer_context().get_os_name_and_version() os_name, os_version = os_data ros_distro = os.environ.get('ROS_DISTRO', 'indigo') # Allow args overrides os_name = args.os_name or os_name os_version = args.os_version or os_version ros_distro = args.ros_distro or ros_distro # Summarize info(fmt("@!@{gf}==> @|") + fmt("Generating RPMs for @{cf}%s:%s@| for package(s) %s" % (os_name, os_version, [p.name for p in pkgs_dict.values()]))) for path, pkg in pkgs_dict.items(): template_files = None try: subs = get_subs_fn(pkg, os_name, os_version, ros_distro) if _place_template_files: # Place template files place_template_files(path) if _process_template_files: # Just process existing template files template_files = process_template_files(path, subs) if not _place_template_files and not _process_template_files: # If neither, do both place_template_files(path) template_files = process_template_files(path, subs) if template_files is not None: for template_file in template_files: os.remove(os.path.normpath(template_file)) except Exception as exc: debug(traceback.format_exc()) error(type(exc).__name__ + ": " + str(exc), exit=True) except (__HOLE__, EOFError): sys.exit(1) # This describes this command to the loader
KeyboardInterrupt
dataset/ETHPy150Open ros-infrastructure/bloom/bloom/generators/rpm/generate_cmd.py/main
5,051
def __getdict(self, key): ret = self.get(key) try: ret = json.loads(ret) except __HOLE__: # The value was not JSON encoded :-) raise Exception('"%s" was not JSON encoded as expected (%s).' % (key, str(ret))) return ret
ValueError
dataset/ETHPy150Open klipstein/dojango/dojango/decorators.py/__getdict
5,052
def __prepare_json_ret(request, ret, callback_param_name=None, use_iframe=False): if ret==False: ret = {'success':False} elif ret==None: # Sometimes there is no return. ret = {} # Add the 'ret'=True, since it was obviously no set yet and we got valid data, no exception. func_name = None if callback_param_name: func_name = request.GET.get(callback_param_name, "callbackParamName") try: if not ret.has_key('success'): ret['success'] = True except __HOLE__, e: raise Exception("The returned data of your function must be a dictionary!") json_ret = "" try: # Sometimes the serialization fails, i.e. when there are too deeply nested objects or even classes inside json_ret = to_json_response(ret, func_name, use_iframe) except Exception, e: print '\n\n===============Exception=============\n\n'+str(e)+'\n\n' print ret print '\n\n' return HttpResponseServerError(content=str(e)) return json_ret
AttributeError
dataset/ETHPy150Open klipstein/dojango/dojango/decorators.py/__prepare_json_ret
5,053
def _on_model_change(self, form, model, is_created): """ Compatibility helper. """ try: self.on_model_change(form, model, is_created) except __HOLE__: msg = ('%s.on_model_change() now accepts third ' + 'parameter is_created. Please update your code') % self.model warnings.warn(msg) self.on_model_change(form, model)
TypeError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/base.py/BaseModelView._on_model_change
5,054
def _export_tablib(self, export_type, return_url): """ Exports a variety of formats using the tablib library. """ if tablib is None: flash(gettext('Tablib dependency not installed.')) return redirect(return_url) filename = self.get_export_name(export_type) disposition = 'attachment;filename=%s' % (secure_filename(filename),) mimetype, encoding = mimetypes.guess_type(filename) if not mimetype: mimetype = 'application/octet-stream' if encoding: mimetype = '%s; charset=%s' % (mimetype, encoding) ds = tablib.Dataset(headers=[c[1] for c in self._export_columns]) count, data = self._export_data() for row in data: vals = [self.get_export_value(row, c[0]) for c in self._export_columns] ds.append(vals) try: try: response_data = ds.export(format=export_type) except __HOLE__: response_data = getattr(ds, export_type) except (AttributeError, tablib.UnsupportedFormat): flash(gettext('Export type "%(type)s not supported.', type=export_type)) return redirect(return_url) return Response( response_data, headers={'Content-Disposition': disposition}, mimetype=mimetype, )
AttributeError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/base.py/BaseModelView._export_tablib
5,055
def test_defer_connect(self): import socket for db in self.databases: d = db.copy() try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(d['unix_socket']) except __HOLE__: sock = socket.create_connection( (d.get('host', 'localhost'), d.get('port', 3306))) for k in ['unix_socket', 'host', 'port']: try: del d[k] except KeyError: pass c = pymysql.connect(defer_connect=True, **d) self.assertFalse(c.open) c.connect(sock) c.close() sock.close()
KeyError
dataset/ETHPy150Open PyMySQL/PyMySQL/pymysql/tests/test_connection.py/TestConnection.test_defer_connect
5,056
@task(rate_limit='600/m', ignore_result=True) def redo_ranks(run_id): from brabeion import badges logger = redo_ranks.get_logger() try: user_run = Run.objects.get(pk=run_id) except Run.DoesNotExist: logger.error("[R- /U- /M- ] Run not found (pk={0}).".format(run_id)) return False map_obj = user_run.map if user_run.user == None: logger.info("[R-{0}/U- /M-{1}] Anonymous run, not" " processing the rank.".format(run_id, map_obj.id)) return user_best = BestRun.objects.get(map=map_obj, user=user_run.user) if not user_best.run_id == user_run.id: logger.info("[R-{0}/U-{1}/M-{2}] Not best run," " not processing the rank.".format(run_id, user_run.user_id, map_obj.id)) return runs = BestRun.objects.filter(map=map_obj) # ranked = player that receives points for his place ranked_count = len(BestRun.SCORING) # exclude banned users from scoring ranked = runs.exclude(user__is_active=False)[:ranked_count] try: if user_run.time >= ranked[ranked_count-1].run.time: logger.info("[R-{0}/U-{1}/M-{2}] Run won't affect scoring," " not processing the rank.".format(run_id, user_run.user_id, map_obj.id)) return except __HOLE__: pass old_rank = user_run.user.profile.map_position(map_obj.id) new_rank = None i = 0 for run in ranked: run.points = BestRun.SCORING[i] run.save() # FIXME it's 3 AM, sorry for that run.user.profile.points = BestRun.objects.filter(user=run.user).aggregate( Sum('points') )['points__sum'] or 0 run.user.profile.save() i += 1 if user_run.user_id == run.user_id: new_rank = i runs.exclude(id__in=ranked.values_list('id', flat=True)).update( points=0 ) badges.possibly_award_badge("rank_processed", user=user_run.user) logger.info("[R-{0}/U-{1}/M-{2}] {3}'s new map rank is {4} (was: {5})." \ .format(run_id, user_run.user_id, map_obj.id, user_run.user, new_rank, old_rank))
IndexError
dataset/ETHPy150Open chaosk/teerace/teerace/race/tasks.py/redo_ranks
5,057
def __getattr__(self, attr): try: return self.INDEXES.get(attr) except __HOLE__: raise AttributeError(attr)
TypeError
dataset/ETHPy150Open chaosk/teerace/teerace/race/tasks.py/index_factory.__getattr__
5,058
@task(ignore_result=True) def retrieve_map_details(map_id): """ WARNING! This task is CPU/(and highly) RAM expensive. Checks for presence of specified tiles, counts some of them and at the end takes a beautiful photo of the map. Thanks for your help, erdbeere! """ logger = retrieve_map_details.get_logger() try: map_obj = Map.objects.get(pk=map_id) except Map.DoesNotExist: logger.error("[M- ] Map not found (pk={0}).".format(map_id)) return False try: # I actually can use that! Thanks, erd! teemap = Teemap(map_obj.map_file.path) logger.info("[M-{0}] Loaded \"{1}\" map.".format(map_id, map_obj.name)) except __HOLE__: logger.error("[M-{0}] Couldn't load \"{1}\" map" \ .format(map_id, map_obj.name)) has_unhookables = has_deathtiles = None shield_count = heart_count = grenade_count = None else: has_unhookables = has_deathtiles = is_fastcap = has_teleporters = \ has_speedups = False shield_count = heart_count = grenade_count = 0 logger.info("Counting map items...") for tile in teemap.gamelayer.tiles: if tile.index == indexes.DEATHTILE: has_deathtiles = True elif tile.index == indexes.UNHOOKABLE: has_unhookables = True elif tile.index == indexes.SHIELD: shield_count += 1 elif tile.index == indexes.HEART: heart_count += 1 elif tile.index == indexes.GRENADE: grenade_count += 1 if teemap.telelayer: has_teleporters = True if teemap.speeduplayer: has_speedups = True # DISABLED due to huge (counted in GiBs) memory usage # logger.info("Rendering map screenshot.") # map_image = teemap.render(gamelayer_on_top=True) # map_image.save('{0}images/maps/full/{1}.png'.format(settings.MEDIA_ROOT, # map_obj.name)) # logger.info("Finished rendering map screenshot.") # map_obj.has_image = True map_obj.has_unhookables = has_unhookables map_obj.has_deathtiles = has_deathtiles map_obj.has_teleporters = has_teleporters map_obj.has_speedups = has_speedups map_map_types = map_obj.map_types.filter(slug='fastcap-no-weapons') if not map_map_types: map_obj.shield_count = shield_count map_obj.heart_count = heart_count map_obj.grenade_count = grenade_count logger.info("Finished counting map items.") logger.info("Generating map CRC...") map_obj.map_file.open() map_obj.crc = '{0:x}'.format(crc32(map_obj.map_file.read()) & 0xffffffff) map_obj.map_file.close() map_obj.save() map_map_types = map_obj.map_types.filter(slug='fastcap') if map_map_types: logger.info("Creating a non-weapon twin...") new_map, created = Map.objects.get_or_create( name="{0}-no-weapons".format(map_obj.name), defaults={ 'author': map_obj.author, 'added_by': map_obj.added_by, 'map_file' :map_obj.map_file, 'crc': map_obj.crc, 'has_unhookables': has_unhookables, 'has_deathtiles': has_deathtiles, }) if not created: logger.info("Oh, it already exists! Updating...") new_map.author = map_obj.author new_map.added_by = map_obj.added_by new_map.map_file = map_obj.map_file new_map.crc = map_obj.crc new_map.has_unhookables = has_unhookables new_map.has_deathtiles = has_deathtiles new_map.save() else: new_map.map_types.add(MapType.objects.get(slug='fastcap-no-weapons')) new_map.map_types.add(*map_obj.map_types.exclude(slug='fastcap')) new_map.save() logger.info("[M-{0}] Finished processing \"{1}\" map." \ .format(map_id, map_obj.name))
IndexError
dataset/ETHPy150Open chaosk/teerace/teerace/race/tasks.py/retrieve_map_details
5,059
@pytest.fixture(scope='module') def ctx(sqlctx, people): try: df = sqlctx.createDataFrame(people) except __HOLE__: schema = sqlctx.inferSchema(people) schema.registerTempTable('t') schema.registerTempTable('t2') else: df2 = sqlctx.createDataFrame(people) sqlctx.registerDataFrameAsTable(df, 't') sqlctx.registerDataFrameAsTable(df2, 't2') return sqlctx
AttributeError
dataset/ETHPy150Open blaze/odo/odo/backends/tests/test_sparksql.py/ctx
5,060
def wait_until_start(): while True: try: results = urlopen(EXAMPLE_APP) if results.code == 404: raise Exception('%s returned unexpected 404' % EXAMPLE_APP) break except __HOLE__: pass
IOError
dataset/ETHPy150Open cobrateam/splinter/run_tests.py/wait_until_start
5,061
def wait_until_stop(): while True: try: results = urlopen(EXAMPLE_APP) if results.code == 404: break except __HOLE__: break
IOError
dataset/ETHPy150Open cobrateam/splinter/run_tests.py/wait_until_stop
5,062
def lmode(inlist): """ Returns a list of the modal (most common) score(s) in the passed list. If there is more than one such score, all are returned. The bin-count for the mode(s) is also returned. Usage: lmode(inlist) Returns: bin-count for mode(s), a list of modal value(s) """ scores = pstats.unique(inlist) scores.sort() freq = [] for item in scores: freq.append(inlist.count(item)) maxfreq = max(freq) mode = [] stillmore = 1 while stillmore: try: indx = freq.index(maxfreq) mode.append(scores[indx]) del freq[indx] del scores[indx] except __HOLE__: stillmore=0 return maxfreq, mode #################################### ############ MOMENTS ############# ####################################
ValueError
dataset/ETHPy150Open radlab/sparrow/src/main/python/third_party/stats.py/lmode
5,063
def get_contents(self, origin): """ Returns the contents of template at origin. """ try: with io.open(origin.name, encoding=self.engine.file_charset) as fp: return fp.read() except __HOLE__ as e: if e.errno == errno.ENOENT: raise TemplateDoesNotExist(origin) raise
IOError
dataset/ETHPy150Open tethysplatform/tethys/tethys_apps/template_loaders.py/TethysAppsTemplateLoader.get_contents
5,064
def hits_numpy(G,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- The eigenvector calculation uses NumPy's interface to LAPACK. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-32, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import numpy as np except __HOLE__: raise ImportError(\ "hits_numpy() requires NumPy: http://scipy.org/") if len(G) == 0: return {},{} H = nx.hub_matrix(G, list(G)) e,ev=np.linalg.eig(H) m=e.argsort()[-1] # index of maximum eigenvalue h=np.array(ev[:,m]).flatten() A=nx.authority_matrix(G, list(G)) e,ev=np.linalg.eig(A) m=e.argsort()[-1] # index of maximum eigenvalue a=np.array(ev[:,m]).flatten() if normalized: h = h/h.sum() a = a/a.sum() else: h = h/h.max() a = a/a.max() hubs = dict(zip(G, map(float, h))) authorities = dict(zip(G, map(float, a))) return hubs,authorities
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/link_analysis/hits_alg.py/hits_numpy
5,065
def hits_scipy(G,max_iter=100,tol=1.0e-6,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of each node for power method iteration. normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- This implementation uses SciPy sparse matrices. The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-632, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import scipy.sparse import numpy as np except __HOLE__: raise ImportError(\ "hits_scipy() requires SciPy: http://scipy.org/") if len(G) == 0: return {},{} M = nx.to_scipy_sparse_matrix(G, nodelist=list(G)) (n,m)=M.shape # should be square A=M.T*M # authority matrix x=scipy.ones((n,1))/n # initial guess # power iteration on authority matrix i=0 while True: xlast=x x=A*x x=x/x.max() # check convergence, l1 norm err=scipy.absolute(x-xlast).sum() if err < tol: break if i>max_iter: raise NetworkXError(\ "HITS: power iteration failed to converge in %d iterations."%(i+1)) i+=1 a=np.asarray(x).flatten() # h=M*a h=np.asarray(M*a).flatten() if normalized: h = h/h.sum() a = a/a.sum() hubs = dict(zip(G, map(float, h))) authorities = dict(zip(G, map(float, a))) return hubs,authorities # fixture for nose tests
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/link_analysis/hits_alg.py/hits_scipy
5,066
def _generate_path(self, path, attr, wildcard_key, raiseerr=True): if raiseerr and not path.has_entity: if isinstance(path, TokenRegistry): raise sa_exc.ArgumentError( "Wildcard token cannot be followed by another entity") else: raise sa_exc.ArgumentError( "Attribute '%s' of entity '%s' does not " "refer to a mapped entity" % (path.prop.key, path.parent.entity) ) if isinstance(attr, util.string_types): default_token = attr.endswith(_DEFAULT_TOKEN) if attr.endswith(_WILDCARD_TOKEN) or default_token: if default_token: self.propagate_to_loaders = False if wildcard_key: attr = "%s:%s" % (wildcard_key, attr) return path.token(attr) try: # use getattr on the class to work around # synonyms, hybrids, etc. attr = getattr(path.entity.class_, attr) except __HOLE__: if raiseerr: raise sa_exc.ArgumentError( "Can't find property named '%s' on the " "mapped entity %s in this Query. " % ( attr, path.entity) ) else: return None else: attr = attr.property path = path[attr] else: prop = attr.property if not prop.parent.common_parent(path.mapper): if raiseerr: raise sa_exc.ArgumentError( "Attribute '%s' does not " "link from element '%s'" % (attr, path.entity)) else: return None if getattr(attr, '_of_type', None): ac = attr._of_type ext_info = inspect(ac) path_element = ext_info.mapper if not ext_info.is_aliased_class: ac = orm_util.with_polymorphic( ext_info.mapper.base_mapper, ext_info.mapper, aliased=True, _use_mapper_path=True) path.entity_path[prop].set( self.context, "path_with_polymorphic", inspect(ac)) path = path[prop][path_element] else: path = path[prop] if path.has_entity: path = path.entity_path return path
AttributeError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/orm/strategy_options.py/Load._generate_path
5,067
def get_context_data(self, **kwargs): context = super(SettingsTextView, self).get_context_data(**kwargs) try: context['nodes'] = serializers.serialize('python', NodeSettings.objects.all()) context['settings'] = serializers.serialize('python', OpenstackSettings.objects.all()) except __HOLE__: pass return context
IndexError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/config/views.py/SettingsTextView.get_context_data
5,068
def get_context_data(self, **kwargs): context = super(SettingsView, self).get_context_data(**kwargs) context['cluster_form'] = ClusterSettingsForm() context['ucsm_form'] = UCSMSettingsForm() context['os_form'] = OSSettingsForm() context['network_form'] = NetworkSettingsForm context['host_form'] = HostSettingsForm() context['node_form'] = NodeSettingsForm() context['settings_form'] = OpenstackSettingsForm() context['nodes'] = NodeSettings.objects.all() context['settings'] = {} try: context['settings'] = OpenstackSettings.objects.all()[0] context['settings_form'] = OpenstackSettingsForm(instance=context['settings']) except __HOLE__: pass scenario_list = [] print settings.PROJECT_PATH for filename in os.listdir(os.path.join(settings.PROJECT_PATH, 'static-raw', 'scenarios')): if filename.endswith(".yaml"): scenario_list.append(filename.split('.')[0]) context['scenario_list'] = scenario_list return context
IndexError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/config/views.py/SettingsView.get_context_data
5,069
def visit_output(self, node): mode, rest = self.get_arg_rest(node) modes = [x.strip() for x in mode.split(',')] for mode in modes: try: handler_name = self.mod.output_handlers[mode.lower()] except __HOLE__: self.error('Unknown output mode: %r. Expected one of %r.'%( mode, self.mod.output_handlers.keys()), node, exception=None) else: handler = getattr(self.mod, handler_name) self.output_directives.append((handler, rest))
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Document.py/Document.visit_output
5,070
def gen_descriptions(self, ats, use_attr_header = 1): if not ats: return tab = self.sortup_aspects(ats) for typ, li in tab: try: try: gen_desc = getattr(self, 'gen_%s_descriptions'%typ) except __HOLE__: hd = typ if (len(li) > 1): hd = hd + 's' hd = hd.capitalize().replace('_', ' ') self.gen_outer_dt(hd) for a in li: self.gen_def(a) else: gen_desc(li) except self.mod.ReportedError: pass
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Document.py/SubDoc.gen_descriptions
5,071
def gen_synopsis(self, m): ats = m.find_aspects('*') ats = self.combine_attrs_of_same_kind(ats) tab = self.sortup_aspects(ats, synopsis=1) if tab: self.gen_outer_dt('Synopsis') self.open('dd') self.open('dl') self.level += 1 for typ, li in tab: try: gen_syn = getattr(self, 'gen_%s_syn'%typ) except __HOLE__: name = typ.capitalize().replace('_', ' ') if len(li) != 1: name = name+'s' self.gen_outer_dt(name) for a in li: self.open('dd') self.gen_ref(a) self.close() else: gen_syn(li) self.level -= 1 self.close() self.close()
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Document.py/SubDoc.gen_synopsis
5,072
def x86flags(archTag, baseArch, extraFlags, ofInterest): try: lines = open("/proc/cpuinfo").read().split("\n") except __HOLE__: lines=[] rc = [ (x, deps.FLAG_SENSE_PREFERRED) for x in extraFlags ] for line in lines: if not line.startswith("flags"): continue fields = line.split() if fields[0] != "flags": continue for flag in fields[2:]: if ofInterest.has_key(flag): rc.append((flag, deps.FLAG_SENSE_PREFERRED)) return deps.Dependency(archTag, rc) return deps.Dependency(archTag)
IOError
dataset/ETHPy150Open sassoftware/conary/conary/deps/arch.py/x86flags
5,073
def _url_for_fetch(self, mapping): try: return mapping['pre_processed_url'] except __HOLE__: return mapping['raw_url']
KeyError
dataset/ETHPy150Open openelections/openelections-core/openelex/us/ia/datasource.py/Datasource._url_for_fetch
5,074
def _checkRewrite(self): try: if os.stat(self.name)[6] < self.where: self.reset() except __HOLE__: self.close()
OSError
dataset/ETHPy150Open openstack/bareon/contrib/fuel_bootstrap/files/trusty/usr/bin/send2syslog.py/WatchedFile._checkRewrite
5,075
def readLines(self): """Return list of last append lines from file if exist.""" self._checkRewrite() if not self.fo: try: self.fo = open(self.name, 'r') except __HOLE__: return () lines = self.fo.readlines() self.where = self.fo.tell() return lines
IOError
dataset/ETHPy150Open openstack/bareon/contrib/fuel_bootstrap/files/trusty/usr/bin/send2syslog.py/WatchedFile.readLines
5,076
def send(self): """Send append data from files to servers.""" for watchedfile in self.watchedfiles: for line in watchedfile.readLines(): line = line.strip() level = self._get_msg_level(line, self.log_type) # Get rid of duplicated information in anaconda logs line = re.sub( msg_levels[self.log_type]['regex'] + "\s*:?\s?", "", line ) # Ignore meaningless errors try: for r in relevel_errors[self.log_type]: if level == r['levelfrom'] and \ re.match(r['regex'], line): level = r['levelto'] except __HOLE__: pass self.logger.log(level, line) main_logger and main_logger.log( level, 'From file "%s" send: %s' % (watchedfile.name, line) )
KeyError
dataset/ETHPy150Open openstack/bareon/contrib/fuel_bootstrap/files/trusty/usr/bin/send2syslog.py/WatchedGroup.send
5,077
@classmethod def getConfig(cls): """Generate config from command line arguments and config file.""" # example_config = { # "daemon": True, # "run_once": False, # "debug": False, # "watchlist": [ # {"servers": [ {"host": "localhost", "port": 514} ], # "watchfiles": [ # {"tag": "anaconda", # "log_type": "anaconda", # "files": ["/tmp/anaconda.log", # "/mnt/sysimage/root/install.log"] # } # ] # } # ] # } default_config = {"daemon": True, "run_once": False, "debug": False, "hostname": cls._getHostname(), "watchlist": [] } # First use default config as running config. config = dict(default_config) # Get command line options and validate it. cmdline = cls.cmdlineParse()[0] # Check config file source and read it. if cmdline.config_file or cmdline.stdin_config: try: if cmdline.stdin_config is True: fo = sys.stdin else: fo = open(cmdline.config_file, 'r') parsed_config = json.load(fo) if cmdline.debug: print(parsed_config) except IOError: # Raised if IO operations failed. main_logger.error("Can not read config file %s\n" % cmdline.config_file) exit(1) except __HOLE__ as e: # Raised if json parsing failed. main_logger.error("Can not parse config file. %s\n" % e.message) exit(1) # Validate config from config file. cls.configValidate(parsed_config) # Copy gathered config from config file to running config # structure. for key, value in parsed_config.items(): config[key] = value else: # If no config file specified use watchlist setting from # command line. watchlist = {"servers": [{"host": cmdline.host, "port": cmdline.port}], "watchfiles": [{"tag": cmdline.tag, "log_type": cmdline.log_type, "files": cmdline.watchfiles}]} config['watchlist'].append(watchlist) # Apply behavioural command line options to running config. if cmdline.no_daemon: config["daemon"] = False if cmdline.run_once: config["run_once"] = True if cmdline.debug: config["debug"] = True return config
ValueError
dataset/ETHPy150Open openstack/bareon/contrib/fuel_bootstrap/files/trusty/usr/bin/send2syslog.py/Config.getConfig
5,078
def load_user_config(): """ Returns the gitver's configuration: tries to read the stored configuration file and merges it with the default one, ensuring a valid configuration is always returned. """ try: with open(CFGFILE, 'r') as f: data = '' for line in f: l = line.strip() if not l.startswith('#'): data += l user = json.loads(data) except IOError: user = dict() except (ValueError, __HOLE__) as v: term.err("An error occured parsing the configuration file \"" + CFGFILE + "\": " + v.message + "\nPlease check its syntax or rename it and generate the " "default one with the " + bold("gitver init") + " command.") sys.exit(1) # merge user with defaults return dict(default_config, **user)
KeyError
dataset/ETHPy150Open manuelbua/gitver/gitver/config.py/load_user_config
5,079
def __init__(self, parent=None): QtWidgets.QTableWidget.__init__(self, parent) self.setColumnCount(6) self.setHorizontalHeaderLabels( ["Type", "File name", "Line", "Description", 'Details']) try: # pyqt4 self.horizontalHeader().setResizeMode( QtWidgets.QHeaderView.ResizeToContents) self.horizontalHeader().setResizeMode( COL_MSG, QtWidgets.QHeaderView.Stretch) except __HOLE__: # pyqt5 self.horizontalHeader().setSectionResizeMode( QtWidgets.QHeaderView.ResizeToContents) self.horizontalHeader().setSectionResizeMode( COL_MSG, QtWidgets.QHeaderView.Stretch) self.setMinimumSize(900, 200) self.itemActivated.connect(self._on_item_activated) self.setSelectionMode(self.SingleSelection) self.setSelectionBehavior(self.SelectRows) self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) self.customContextMenuRequested.connect(self._show_context_menu) self.context_mnu = QtWidgets.QMenu() self.action_details = QtWidgets.QAction(_('View details'), self) self.action_details.triggered.connect(self.showDetails) self.action_copy = QtWidgets.QAction(_('Copy error'), self) self.action_copy.triggered.connect(self._copy_cell_text) self.context_mnu.addAction(self.action_details) self.context_mnu.addAction(self.action_copy) self.clear()
AttributeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/widgets/errors_table.py/ErrorsTable.__init__
5,080
def fromTable(self, data): try: (nmatches, nmismatches, nrepmatches, nns, query_ngaps_counts, query_ngaps_bases, sbjct_ngaps_counts, sbjct_ngaps_bases, strand, query_id, query_length, query_from, query_to, sbjct_id, sbjct_length, sbjct_from, sbjct_to, nblocks, block_sizes, query_block_starts, sbjct_block_starts) = data[:21] except __HOLE__: raise ParsingError("parsing error: %i fields" % len(data), "\t".join(data)) nmatches, nmismatches, nrepmatches, nns = map( int, (nmatches, nmismatches, nrepmatches, nns)) self.mNMatches = nmatches self.mNMismatches = nmismatches self.mNRepMatches = nrepmatches self.mNns = nns self.mQueryNGapsCounts = int(query_ngaps_counts) self.mQueryNGapsBases = int(query_ngaps_bases) self.mSbjctNGapsCounts = int(sbjct_ngaps_counts) self.mSbjctNGapsBases = int(sbjct_ngaps_bases) self.strand = strand self.mQueryId = query_id self.mQueryLength = int(query_length) self.mQueryFrom = int(query_from) self.mQueryTo = int(query_to) self.mSbjctId = sbjct_id self.mSbjctLength = int(sbjct_length) self.mSbjctFrom = int(sbjct_from) self.mSbjctTo = int(sbjct_to) self.mNBlocks = int(nblocks) self.mBlockSizes = map(int, block_sizes[:-1].split(",")) self.mQueryBlockStarts = map(int, query_block_starts[:-1].split(",")) self.mSbjctBlockStarts = map(int, sbjct_block_starts[:-1].split(",")) # this makes sure that the block positions are rescaled if self.mQueryLength != 0: self.mQueryCoverage = 100.0 * \ (self.mNMismatches + self.mNMatches) / self.mQueryLength else: self.mQueryCoverage = 0 if self.mSbjctLength != 0: self.mSbjctCoverage = 100.0 * \ (self.mNMismatches + self.mNMatches) / self.mSbjctLength else: self.mSbjctCoverage = 0 if nmatches + nmismatches > 0: self.mPid = 100.0 * float(nmatches) / (nmatches + nmismatches) else: self.mPid = 100.0
ValueError
dataset/ETHPy150Open CGATOxford/cgat/obsolete/BlatTest.py/Match.fromTable
5,081
def fromTable(self, data): Match.fromTable(self, data) try: query_sequence, sbjct_sequence = data[21:23] except __HOLE__: raise ParsingError("parsing error", "\t".join(data)) self.mQuerySequence = query_sequence[:-1].split(",") self.mSbjctSequence = sbjct_sequence[:-1].split(",")
ValueError
dataset/ETHPy150Open CGATOxford/cgat/obsolete/BlatTest.py/MatchPSLX.fromTable
5,082
def next(self): try: return self.mIterator.next() except __HOLE__: return None
StopIteration
dataset/ETHPy150Open CGATOxford/cgat/obsolete/BlatTest.py/BlatIterator.next
5,083
def iterator_test(infile, report_step=100000): '''only output parseable lines from infile.''' ninput, noutput, nerrors = 0, 0, 0 while 1: try: x = infile.next() except ParsingError, msg: nerrors += 1 ninput += 1 E.warn(str(msg)) continue except __HOLE__: break if not x: break ninput += 1 if ninput % report_step == 0: E.info("progress: ninput=%i, noutput=%i" % (ninput, noutput)) yield x noutput += 1 E.info("iterator_test: ninput=%i, noutput=%i, nerrors=%i" % (ninput, noutput, nerrors))
StopIteration
dataset/ETHPy150Open CGATOxford/cgat/obsolete/BlatTest.py/iterator_test
5,084
def _read_eeglab_events(eeg, event_id=None, event_id_func='strip_to_integer'): """Create events array from EEGLAB structure An event array is constructed by looking up events in the event_id, trying to reduce them to their integer part otherwise, and entirely dropping them (with a warning) if this is impossible. Returns a 1x3 array of zeros if no events are found.""" if event_id_func is 'strip_to_integer': event_id_func = _strip_to_integer if event_id is None: event_id = dict() types = [event.type for event in eeg.event] latencies = [event.latency for event in eeg.event] if "boundary" in types and "boundary" not in event_id: warn("The data contains 'boundary' events, indicating data " "discontinuities. Be cautious of filtering and epoching around " "these events.") not_in_event_id = set(x for x in types if x not in event_id) not_purely_numeric = set(x for x in not_in_event_id if not x.isdigit()) no_numbers = set([x for x in not_purely_numeric if not any([d.isdigit() for d in x])]) have_integers = set([x for x in not_purely_numeric if x not in no_numbers]) if len(not_purely_numeric) > 0: basewarn = "Events like the following will be dropped" n_no_numbers, n_have_integers = len(no_numbers), len(have_integers) if n_no_numbers > 0: no_num_warm = " entirely: {0}, {1} in total" warn(basewarn + no_num_warm.format(list(no_numbers)[:5], n_no_numbers)) if n_have_integers > 0 and event_id_func is None: intwarn = (", but could be reduced to their integer part " "instead with the default `event_id_func`: " "{0}, {1} in total") warn(basewarn + intwarn.format(list(have_integers)[:5], n_have_integers)) events = list() for tt, latency in zip(types, latencies): try: # look up the event in event_id and if not, try event_id_func event_code = event_id[tt] if tt in event_id else event_id_func(tt) events.append([int(latency), 1, event_code]) except (ValueError, __HOLE__): # if event_id_func fails pass # We're already raising warnings above, so we just drop if len(events) < len(types): warn("Some event codes could not be mapped to integers. Use the " "`event_id` parameter to map such events to integers manually.") if len(events) < 1: warn("No events found, consider adding an `event_id`. As is, the " "trigger channel will consist entirely of zeros.") return np.zeros((0, 3)) else: return np.asarray(events)
TypeError
dataset/ETHPy150Open mne-tools/mne-python/mne/io/eeglab/eeglab.py/_read_eeglab_events
5,085
def execute(self): res = 0 try: # Be careful that generator terminates or this will iterate forever while True: self.logger.debug("About to call step()") res = self.step() self.logger.debug("Result is %d" % (res)) except __HOLE__: # Raised when tasklogic() "runs off the end" (terminates) pass # Return final result return res
StopIteration
dataset/ETHPy150Open ejeschke/ginga/ginga/misc/tests/test_Task.py/stepTask.execute
5,086
def serve(): server = helloworld_pb2.beta_create_Greeter_server(Greeter()) server.add_insecure_port('[::]:50051') server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except __HOLE__: server.stop()
KeyboardInterrupt
dataset/ETHPy150Open Akagi201/learning-python/grpc/helloworld/greeter_server.py/serve
5,087
def open(self, mode): fn = self._fn mock_target = self class Buffer(BytesIO): # Just to be able to do writing + reading from the same buffer _write_line = True def set_wrapper(self, wrapper): self.wrapper = wrapper def write(self, data): if mock_target._mirror_on_stderr: if self._write_line: sys.stderr.write(fn + ": ") if six.binary_type: sys.stderr.write(data.decode('utf8')) else: sys.stderr.write(data) if (data[-1]) == '\n': self._write_line = True else: self._write_line = False super(Buffer, self).write(data) def close(self): if mode == 'w': try: mock_target.wrapper.flush() except __HOLE__: pass mock_target.fs.get_all_data()[fn] = self.getvalue() super(Buffer, self).close() def __exit__(self, exc_type, exc_val, exc_tb): if not exc_type: self.close() def __enter__(self): return self def readable(self): return mode == 'r' def writeable(self): return mode == 'w' def seekable(self): return False if mode == 'w': wrapper = self.format.pipe_writer(Buffer()) wrapper.set_wrapper(wrapper) return wrapper else: return self.format.pipe_reader(Buffer(self.fs.get_all_data()[fn]))
AttributeError
dataset/ETHPy150Open spotify/luigi/luigi/mock.py/MockTarget.open
5,088
def separate_range_build_list(dir_of_logs, unbzipped_logs, start_datetime, to_datetime, server_vers): # todo - this is a bit big and nasty, but good enough for now """Opens current debug.log and un-bz'd logs and builds new list of loglines that fall within our reporting period. Returns two lists of strings: one for bandwidth, other for eventual parsing of filetypes served, device os/model, and ips devices accessed from.""" our_range_logline_str_list, bandwidth_lines_list, filetype_lines_list, service_restart_timestamps = [], [], [], [] try: with open(os.path.join(dir_of_logs, 'Debug.log'), 'rU') as current, open(unbzipped_logs, 'rU') as unzipped: for f in current, unzipped: for line in f: if line[:23] > start_datetime: if line[:23] < to_datetime: our_range_logline_str_list.append(line) except __HOLE__ as e: print 'Operation failed: %s' % e.strerror sys.exit(4) # currently just resetting start_time if service was restarted to most current occurance # and informing with datetime in report (with the proper format to add --through option) more_recent_svc_hup = False for logline_str in our_range_logline_str_list: if 'Registration succeeded. Resuming server.' in logline_str: service_restart_timestamps.append(logline_str[:23]) more_recent_svc_hup = True new_start_datetime = max(service_restart_timestamps) # todo - should probably be less repetition below excludes = ['egist', 'public', 'peers', 'Opened', 'EC', 'Bad'] filetypes = ['ipa', 'epub', 'pkg', 'zip'] if more_recent_svc_hup: for logline_str in our_range_logline_str_list: if logline_str[:23] > new_start_datetime: if server_vers == 'Five': if 'Served all' in logline_str: bandwidth_lines_list.append(logline_str.split()) elif not any(x in logline_str for x in excludes): if any(x in logline_str for x in filetypes): filetype_lines_list.append(logline_str.split()) else: if 'start:' in logline_str: bandwidth_lines_list.append(logline_str.split()) elif not any(x in logline_str for x in excludes): if any(x in logline_str for x in filetypes): filetype_lines_list.append(logline_str.split()) else: new_start_datetime = start_datetime for logline_str in our_range_logline_str_list: if logline_str[:23] > start_datetime: if server_vers == 'Five': if 'Served all' in logline_str: bandwidth_lines_list.append(logline_str.split()) elif not any(x in logline_str for x in excludes): if any(x in logline_str for x in filetypes): filetype_lines_list.append(logline_str.split()) else: if 'start:' in logline_str: bandwidth_lines_list.append(logline_str.split()) elif not any(x in logline_str for x in excludes): if any(x in logline_str for x in filetypes): filetype_lines_list.append(logline_str.split()) return bandwidth_lines_list, filetype_lines_list, more_recent_svc_hup, new_start_datetime
IOError
dataset/ETHPy150Open macadmins/sashay/sashay.py/separate_range_build_list
5,089
def _get_current_migration_number(self, database): try: result = Migration.objects.using( database ).order_by('-migration_label')[0] except __HOLE__: return 0 match = MIGRATION_NAME_RE.match(result.migration_label) return int(match.group(1))
IndexError
dataset/ETHPy150Open paltman-archive/nashvegas/nashvegas/management/commands/upgradedb.py/Command._get_current_migration_number
5,090
def init_nashvegas(self): # Copied from line 35 of django.core.management.commands.syncdb # Import the 'management' module within each installed app, to # register dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module(".management", app_name) except __HOLE__, exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because # different Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith("No module named") or "management" not in msg: raise # @@@ make cleaner / check explicitly for model instead of looping # over and doing string comparisons databases = self.databases or get_capable_databases() for database in databases: connection = connections[database] cursor = connection.cursor() all_new = get_sql_for_new_models(['nashvegas'], using=database) for lines in all_new: to_execute = "\n".join([ l for l in lines.split("\n") if not l.startswith("### New Model: ") ]) if not to_execute: continue cursor.execute(to_execute) transaction.commit_unless_managed(using=database)
ImportError
dataset/ETHPy150Open paltman-archive/nashvegas/nashvegas/management/commands/upgradedb.py/Command.init_nashvegas
5,091
def seed_migrations(self, stop_at=None): # @@@ the command-line interface needs to be re-thinked # TODO: this needs to be able to handle multi-db when you're # specifying stop_at if stop_at is None and self.args: stop_at = self.args[0] if stop_at: try: stop_at = int(stop_at) except ValueError: raise CommandError("Invalid --seed migration") except __HOLE__: raise CommandError( "Usage: ./manage.py upgradedb --seed [stop_at]" ) all_migrations = get_pending_migrations( self.path, self.databases, stop_at=stop_at ) for db, migrations in all_migrations.iteritems(): for migration in migrations: migration_path = self._get_migration_path(db, migration) m, created = Migration.objects.using(db).get_or_create( migration_label=os.path.split(migration)[-1], content=open(migration_path, "rb").read() ) if created: # this might have been executed prior to committing m.scm_version = self._get_rev(migration) m.save() print "%s:%s has been seeded" % (db, m.migration_label) else: print "%s:%s was already applied" % ( db, m.migration_label )
IndexError
dataset/ETHPy150Open paltman-archive/nashvegas/nashvegas/management/commands/upgradedb.py/Command.seed_migrations
5,092
def _get_default_migration_path(self): try: path = os.path.dirname(os.path.normpath( os.sys.modules[settings.SETTINGS_MODULE].__file__) ) except __HOLE__: path = os.getcwd() return os.path.join(path, "migrations")
KeyError
dataset/ETHPy150Open paltman-archive/nashvegas/nashvegas/management/commands/upgradedb.py/Command._get_default_migration_path
5,093
def _resolve_name(name, package, level): """Return the absolute name of the module to be imported.""" if not hasattr(package, 'rindex'): raise ValueError("'package' not set to a string") dot = len(package) for x in range(level, 1, -1): try: dot = package.rindex('.', 0, dot) except __HOLE__: raise ValueError("attempted relative import beyond top-level " "package") return "%s.%s" % (package[:dot], name)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/utils/importlib.py/_resolve_name
5,094
def rischDE(fa, fd, ga, gd, DE): """ Solve a Risch Differential Equation: Dy + f*y == g. See the outline in the docstring of rde.py for more information about the procedure used. Either raise NonElementaryIntegralException, in which case there is no solution y in the given differential field, or return y in k(t) satisfying Dy + f*y == g, or raise NotImplementedError, in which case, the algorithms necessary to solve the given Risch Differential Equation have not yet been implemented. """ _, (fa, fd) = weak_normalizer(fa, fd, DE) a, (ba, bd), (ca, cd), hn = normal_denom(fa, fd, ga, gd, DE) A, B, C, hs = special_denom(a, ba, bd, ca, cd, DE) try: # Until this is fully implemented, use oo. Note that this will almost # certainly cause non-termination in spde() (unless A == 1), and # *might* lead to non-termination in the next step for a nonelementary # integral (I don't know for certain yet). Fortunately, spde() is # currently written recursively, so this will just give # RuntimeError: maximum recursion depth exceeded. n = bound_degree(A, B, C, DE) except __HOLE__: # Useful for debugging: # import warnings # warnings.warn("rischDE: Proceeding with n = oo; may cause " # "non-termination.") n = oo B, C, m, alpha, beta = spde(A, B, C, n, DE) if C.is_zero: y = C else: y = solve_poly_rde(B, C, m, DE) return (alpha*y + beta, hn*hs)
NotImplementedError
dataset/ETHPy150Open sympy/sympy/sympy/integrals/rde.py/rischDE
5,095
def parse_markdown_readme(): """ Convert README.md to RST via pandoc, and load into memory (fallback to LONG_DESCRIPTION on failure) """ try: subprocess.call( ['pandoc', '-t', 'rst', '-o', 'README.rst', 'README.md'] ) except __HOLE__: return LONG_DESCRIPTION # Attempt to load output try: readme = open('README.rst') except IOError: return LONG_DESCRIPTION else: return readme.read()
OSError
dataset/ETHPy150Open wq/django-rest-pandas/setup.py/parse_markdown_readme
5,096
def setMax(self, maximum): """Set the maximum value of the Slider. If the current value of the Slider is out of new bounds, the value is set to new minimum. @param maximum: new maximum value of the Slider """ self._max = maximum try: if float(str( self.getValue() )) > maximum: super(Slider, self).setValue( float(maximum) ) except __HOLE__: # ClassCastException # FIXME: Handle exception # Where does ClassCastException come from? Can't see any casts # above super(Slider, self).setValue( float(maximum) ) self.requestRepaint()
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/ui/slider.py/Slider.setMax
5,097
def setMin(self, minimum): """Set the minimum value of the Slider. If the current value of the Slider is out of new bounds, the value is set to new minimum. @param minimum: New minimum value of the Slider. """ self._min = minimum try: if float( str(self.getValue()) ) < minimum: super(Slider, self).setValue(float(minimum)) except __HOLE__: # ClassCastException # FIXME: Handle exception # Where does ClassCastException come from? Can't see any casts # above super(Slider, self).setValue(float(minimum)) self.requestRepaint()
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/ui/slider.py/Slider.setMin
5,098
def setup(self, shell): """Install auto-completions for the appropriate shell. Args: shell (str): String specifying name of shell for which auto-completions will be installed for. """ system = platform.system() script = resource_string('rbtools', self.SHELLS[shell][system]['src']) dest = os.path.join(self.SHELLS[shell][system]['dest'], self.SHELLS[shell][system]['filename']) try: with open(dest, 'w') as f: f.write(script) except __HOLE__ as e: logging.error('I/O Error (%s): %s' % (e.errno, e.strerror)) sys.exit() print('Successfully installed %s auto-completions.' % shell) print('Restart the terminal for completions to work.')
IOError
dataset/ETHPy150Open reviewboard/rbtools/rbtools/commands/setup_completion.py/SetupCompletion.setup
5,099
def view_docs(browser=None): """A script (``openmdao docs``) points to this. It just pops up a browser to view the openmdao Sphinx docs. If the docs are not already built, it builds them before viewing; but if the docs already exist, it's not smart enough to rebuild them if they've changed since the last build. If this is run from a non-developer install (i.e., there is no local copy of the docs), it just looks for the docs on the ``openmdao.org`` website. """ if not browser: for arg in sys.argv: if arg.startswith('--browser='): browser = arg.split('=')[-1].strip() try: import openmdao.devtools.build_docs as build_docs except __HOLE__: idxpath = "file://"+os.path.join(os.path.dirname(openmdao.main.__file__), "docs", "index.html") wb = webbrowser.get(browser) wb.open(idxpath) else: build_docs.view_docs(browser)
ImportError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.util/src/openmdao/util/view_docs.py/view_docs