Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,700
@jsexpose(body_cls=ApiKeyAPI, status_code=http_client.CREATED) @request_user_has_resource_api_permission(permission_type=PermissionType.API_KEY_CREATE) def post(self, api_key_api): """ Create a new entry or update an existing one. """ api_key_db = None try: api_key_api.user = self._get_user() api_key, api_key_hash = auth_util.generate_api_key_and_hash() # store key_hash in DB api_key_api.key_hash = api_key_hash api_key_db = ApiKey.add_or_update(ApiKeyAPI.to_model(api_key_api)) except (ValidationError, __HOLE__) as e: LOG.exception('Validation failed for api_key data=%s.', api_key_api) abort(http_client.BAD_REQUEST, str(e)) extra = {'api_key_db': api_key_db} LOG.audit('ApiKey created. ApiKey.id=%s' % (api_key_db.id), extra=extra) api_key_create_response_api = ApiKeyCreateResponseAPI.from_model(api_key_db) # Return real api_key back to user. A one-way hash of the api_key is stored in the DB # only the real value only returned at create time. Also, no masking of key here since # the user needs to see this value atleast once. api_key_create_response_api.key = api_key return api_key_create_response_api
ValueError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/auth.py/ApiKeyController.post
5,701
def __init__(self, args, operating_system): """Create an instance of the controller passing in the debug flag, the options and arguments from the cli parser. :param argparse.Namespace args: Command line arguments :param str operating_system: Operating system name from helper.platform """ self.set_state(self.STATE_INITIALIZING) self.args = args self.debug = args.foreground try: self.config = config.Config(args.config) except __HOLE__: sys.exit(1) self.logging_config = config.LoggingConfig(self.config.logging, self.debug) self.operating_system = operating_system
ValueError
dataset/ETHPy150Open gmr/helper/helper/controller.py/Controller.__init__
5,702
def __del__(self): import threading key = object.__getattribute__(self, '_local__key') try: threads = list(threading.enumerate()) except: return for thread in threads: try: __dict__ = thread.__dict__ except AttributeError: continue if key in __dict__: try: del __dict__[key] except __HOLE__: pass
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/dist/_threading_local.py/local.__del__
5,703
def __init__(self, *args, **kwargs): """ Set up some defaults and check for a ``default_related_model`` attribute for the ``to`` argument. """ kwargs.setdefault("object_id_field", "object_pk") to = getattr(self, "default_related_model", None) # Avoid having both a positional arg and a keyword arg for # the parameter ``to`` if to and not args: kwargs.setdefault("to", to) try: # Check if ``related_model`` has been modified by a subclass self.related_model except (AppRegistryNotReady, __HOLE__): # if not, all is good super(BaseGenericRelation, self).__init__(*args, **kwargs) else: # otherwise, warn the user to stick to the new (as of 4.0) # ``default_related_model`` attribute raise ImproperlyConfigured("BaseGenericRelation changed the " "way it handled a default ``related_model`` in mezzanine " "4.0. Please override ``default_related_model`` instead " "and do not tamper with django's ``related_model`` " "property anymore.")
AttributeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/generic/fields.py/BaseGenericRelation.__init__
5,704
def related_items_changed(self, instance, related_manager): """ Stores the number of comments. A custom ``count_filter`` queryset gets checked for, allowing managers to implement custom count logic. """ try: count = related_manager.count_queryset() except __HOLE__: count = related_manager.count() count_field_name = list(self.fields.keys())[0] % \ self.related_field_name setattr(instance, count_field_name, count) instance.save()
AttributeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/generic/fields.py/CommentsField.related_items_changed
5,705
def contribute_to_class(self, cls, name): """ Swap out any reference to ``KeywordsField`` with the ``KEYWORDS_FIELD_string`` field in ``search_fields``. """ super(KeywordsField, self).contribute_to_class(cls, name) string_field_name = list(self.fields.keys())[0] % \ self.related_field_name if hasattr(cls, "search_fields") and name in cls.search_fields: try: weight = cls.search_fields[name] except __HOLE__: # search_fields is a sequence. index = cls.search_fields.index(name) search_fields_type = type(cls.search_fields) cls.search_fields = list(cls.search_fields) cls.search_fields[index] = string_field_name cls.search_fields = search_fields_type(cls.search_fields) else: del cls.search_fields[name] cls.search_fields[string_field_name] = weight
TypeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/generic/fields.py/KeywordsField.contribute_to_class
5,706
def get_redis_backend(): """Connect to redis from a string like CACHE_BACKEND.""" # From django-redis-cache. server, params = parse_backend_uri(settings.REDIS_BACKEND) db = params.pop('db', 0) try: db = int(db) except (ValueError, __HOLE__): db = 0 try: socket_timeout = float(params.pop('socket_timeout')) except (KeyError, ValueError): socket_timeout = None password = params.pop('password', None) if ':' in server: host, port = server.split(':') try: port = int(port) except (ValueError, TypeError): port = 6379 else: host = 'localhost' port = 6379 return redislib.Redis(host=host, port=port, db=db, password=password, socket_timeout=socket_timeout)
TypeError
dataset/ETHPy150Open django-cache-machine/django-cache-machine/caching/invalidation.py/get_redis_backend
5,707
def add_lazy_relation(cls, field, relation, operation): """ Adds a lookup on ``cls`` when a related field is defined using a string, i.e.:: class MyModel(Model): fk = ForeignKey("AnotherModel") This string can be: * RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive relation. * The name of a model (i.e "AnotherModel") to indicate another model in the same app. * An app-label and model name (i.e. "someapp.AnotherModel") to indicate another model in a different app. If the other model hasn't yet been loaded -- almost a given if you're using lazy relationships -- then the relation won't be set up until the class_prepared signal fires at the end of model initialization. operation is the work that must be performed once the relation can be resolved. """ # Check for recursive relations if relation == RECURSIVE_RELATIONSHIP_CONSTANT: app_label = cls._meta.app_label model_name = cls.__name__ else: # Look for an "app.Model" relation if isinstance(relation, six.string_types): try: app_label, model_name = relation.split(".") except __HOLE__: # If we can't split, assume a model in current app app_label = cls._meta.app_label model_name = relation else: # it's actually a model class app_label = relation._meta.app_label model_name = relation._meta.object_name # Try to look up the related model, and if it's already loaded resolve the # string right away. If get_model returns None, it means that the related # model isn't loaded yet, so we need to pend the relation until the class # is prepared. model = get_model(app_label, model_name, seed_cache=False, only_installed=False) if model: operation(field, model, cls) else: key = (app_label, model_name) value = (cls, field, operation) pending_lookups.setdefault(key, []).append(value)
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/add_lazy_relation
5,708
def __get__(self, instance, instance_type=None): if instance is None: return self try: rel_obj = getattr(instance, self.cache_name) except __HOLE__: related_pk = instance._get_pk_val() if related_pk is None: rel_obj = None else: params = {} for lh_field, rh_field in self.related.field.related_fields: params['%s__%s' % (self.related.field.name, rh_field.name)] = getattr(instance, rh_field.attname) try: rel_obj = self.get_queryset(instance=instance).get(**params) except self.related.model.DoesNotExist: rel_obj = None else: setattr(rel_obj, self.related.field.get_cache_name(), instance) setattr(instance, self.cache_name, rel_obj) if rel_obj is None: raise self.related.model.DoesNotExist("%s has no %s." % ( instance.__class__.__name__, self.related.get_accessor_name())) else: return rel_obj
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/SingleRelatedObjectDescriptor.__get__
5,709
def __get__(self, instance, instance_type=None): if instance is None: return self try: rel_obj = getattr(instance, self.cache_name) except __HOLE__: val = self.field.get_local_related_value(instance) if None in val: rel_obj = None else: params = dict( (rh_field.attname, getattr(instance, lh_field.attname)) for lh_field, rh_field in self.field.related_fields) qs = self.get_queryset(instance=instance) extra_filter = self.field.get_extra_descriptor_filter(instance) if isinstance(extra_filter, dict): params.update(extra_filter) qs = qs.filter(**params) else: qs = qs.filter(extra_filter, **params) # Assuming the database enforces foreign keys, this won't fail. rel_obj = qs.get() if not self.field.rel.multiple: setattr(rel_obj, self.field.related.get_cache_name(), instance) setattr(instance, self.cache_name, rel_obj) if rel_obj is None and not self.field.null: raise self.field.rel.to.DoesNotExist( "%s has no %s." % (self.field.model.__name__, self.field.name)) else: return rel_obj
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ReverseSingleRelatedObjectDescriptor.__get__
5,710
def __set__(self, instance, value): # If null=True, we can assign null here, but otherwise the value needs # to be an instance of the related class. if value is None and self.field.null == False: raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' % (instance._meta.object_name, self.field.name)) elif value is not None and not isinstance(value, self.field.rel.to): raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (value, instance._meta.object_name, self.field.name, self.field.rel.to._meta.object_name)) elif value is not None: if instance._state.db is None: instance._state.db = router.db_for_write(instance.__class__, instance=value) elif value._state.db is None: value._state.db = router.db_for_write(value.__class__, instance=instance) elif value._state.db is not None and instance._state.db is not None: if not router.allow_relation(value, instance): raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value) # If we're setting the value of a OneToOneField to None, we need to clear # out the cache on any old related object. Otherwise, deleting the # previously-related object will also cause this object to be deleted, # which is wrong. if value is None: # Look up the previously-related object, which may still be available # since we've not yet cleared out the related field. # Use the cache directly, instead of the accessor; if we haven't # populated the cache, then we don't care - we're only accessing # the object to invalidate the accessor cache, so there's no # need to populate the cache just to expire it again. related = getattr(instance, self.cache_name, None) # If we've got an old related object, we need to clear out its # cache. This cache also might not exist if the related object # hasn't been accessed yet. if related is not None: setattr(related, self.field.related.get_cache_name(), None) # Set the value of the related field for lh_field, rh_field in self.field.related_fields: try: setattr(instance, lh_field.attname, getattr(value, rh_field.attname)) except __HOLE__: setattr(instance, lh_field.attname, None) # Since we already know what the related object is, seed the related # object caches now, too. This avoids another db hit if you get the # object you just set. setattr(instance, self.cache_name, value) if value is not None and not self.field.rel.multiple: setattr(value, self.field.related.get_cache_name(), instance)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ReverseSingleRelatedObjectDescriptor.__set__
5,711
@cached_property def related_manager_cls(self): # Dynamically create a class that subclasses the related model's default # manager. superclass = self.related.model._default_manager.__class__ rel_field = self.related.field rel_model = self.related.model class RelatedManager(superclass): def __init__(self, instance): super(RelatedManager, self).__init__() self.instance = instance self.core_filters= {'%s__exact' % rel_field.name: instance} self.model = rel_model def get_queryset(self): try: return self.instance._prefetched_objects_cache[rel_field.related_query_name()] except (AttributeError, __HOLE__): db = self._db or router.db_for_read(self.model, instance=self.instance) qs = super(RelatedManager, self).get_queryset().using(db).filter(**self.core_filters) empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls for field in rel_field.foreign_related_fields: val = getattr(self.instance, field.attname) if val is None or (val == '' and empty_strings_as_null): return qs.none() qs._known_related_objects = {rel_field: {self.instance.pk: self.instance}} return qs def get_prefetch_queryset(self, instances): rel_obj_attr = rel_field.get_local_related_value instance_attr = rel_field.get_foreign_related_value instances_dict = dict((instance_attr(inst), inst) for inst in instances) db = self._db or router.db_for_read(self.model, instance=instances[0]) query = {'%s__in' % rel_field.name: instances} qs = super(RelatedManager, self).get_queryset().using(db).filter(**query) # Since we just bypassed this class' get_queryset(), we must manage # the reverse relation manually. for rel_obj in qs: instance = instances_dict[rel_obj_attr(rel_obj)] setattr(rel_obj, rel_field.name, instance) cache_name = rel_field.related_query_name() return qs, rel_obj_attr, instance_attr, False, cache_name def add(self, *objs): for obj in objs: if not isinstance(obj, self.model): raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj)) setattr(obj, rel_field.name, self.instance) obj.save() add.alters_data = True def create(self, **kwargs): kwargs[rel_field.name] = self.instance db = router.db_for_write(self.model, instance=self.instance) return super(RelatedManager, self.db_manager(db)).create(**kwargs) create.alters_data = True def get_or_create(self, **kwargs): # Update kwargs with the related object that this # ForeignRelatedObjectsDescriptor knows about. kwargs[rel_field.name] = self.instance db = router.db_for_write(self.model, instance=self.instance) return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs) get_or_create.alters_data = True # remove() and clear() are only provided if the ForeignKey can have a value of null. if rel_field.null: def remove(self, *objs): val = rel_field.get_foreign_related_value(self.instance) for obj in objs: # Is obj actually part of this descriptor set? if rel_field.get_local_related_value(obj) == val: setattr(obj, rel_field.name, None) obj.save() else: raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, self.instance)) remove.alters_data = True def clear(self): self.update(**{rel_field.name: None}) clear.alters_data = True return RelatedManager
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ForeignRelatedObjectsDescriptor.related_manager_cls
5,712
def create_many_related_manager(superclass, rel): """Creates a manager that subclasses 'superclass' (which is a Manager) and adds behavior for many-to-many related objects.""" class ManyRelatedManager(superclass): def __init__(self, model=None, query_field_name=None, instance=None, symmetrical=None, source_field_name=None, target_field_name=None, reverse=False, through=None, prefetch_cache_name=None): super(ManyRelatedManager, self).__init__() self.model = model self.query_field_name = query_field_name source_field = through._meta.get_field(source_field_name) source_related_fields = source_field.related_fields self.core_filters = {} for lh_field, rh_field in source_related_fields: self.core_filters['%s__%s' % (query_field_name, rh_field.name)] = getattr(instance, rh_field.attname) self.instance = instance self.symmetrical = symmetrical self.source_field = source_field self.source_field_name = source_field_name self.target_field_name = target_field_name self.reverse = reverse self.through = through self.prefetch_cache_name = prefetch_cache_name self.related_val = source_field.get_foreign_related_value(instance) # Used for single column related auto created models self._fk_val = self.related_val[0] if None in self.related_val: raise ValueError('"%r" needs to have a value for field "%s" before ' 'this many-to-many relationship can be used.' % (instance, source_field_name)) # Even if this relation is not to pk, we require still pk value. # The wish is that the instance has been already saved to DB, # although having a pk value isn't a guarantee of that. if instance.pk is None: raise ValueError("%r instance needs to have a primary key value before " "a many-to-many relationship can be used." % instance.__class__.__name__) def _get_fk_val(self, obj, field_name): """ Returns the correct value for this relationship's foreign key. This might be something else than pk value when to_field is used. """ fk = self.through._meta.get_field(field_name) if fk.rel.field_name and fk.rel.field_name != fk.rel.to._meta.pk.attname: attname = fk.rel.get_related_field().get_attname() return fk.get_prep_lookup('exact', getattr(obj, attname)) else: return obj.pk def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (__HOLE__, KeyError): db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance) return super(ManyRelatedManager, self).get_queryset().using(db)._next_is_sticky().filter(**self.core_filters) def get_prefetch_queryset(self, instances): instance = instances[0] db = self._db or router.db_for_read(instance.__class__, instance=instance) query = {'%s__in' % self.query_field_name: instances} qs = super(ManyRelatedManager, self).get_queryset().using(db)._next_is_sticky().filter(**query) # M2M: need to annotate the query in order to get the primary model # that the secondary model was actually related to. We know that # there will already be a join on the join table, so we can just add # the select. # For non-autocreated 'through' models, can't assume we are # dealing with PK values. fk = self.through._meta.get_field(self.source_field_name) join_table = self.through._meta.db_table connection = connections[db] qn = connection.ops.quote_name qs = qs.extra(select=dict( ('_prefetch_related_val_%s' % f.attname, '%s.%s' % (qn(join_table), qn(f.column))) for f in fk.local_related_fields)) return (qs, lambda result: tuple([getattr(result, '_prefetch_related_val_%s' % f.attname) for f in fk.local_related_fields]), lambda inst: tuple([getattr(inst, f.attname) for f in fk.foreign_related_fields]), False, self.prefetch_cache_name) # If the ManyToMany relation has an intermediary model, # the add and remove methods do not exist. if rel.through._meta.auto_created: def add(self, *objs): self._add_items(self.source_field_name, self.target_field_name, *objs) # If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table if self.symmetrical: self._add_items(self.target_field_name, self.source_field_name, *objs) add.alters_data = True def remove(self, *objs): self._remove_items(self.source_field_name, self.target_field_name, *objs) # If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table if self.symmetrical: self._remove_items(self.target_field_name, self.source_field_name, *objs) remove.alters_data = True def clear(self): self._clear_items(self.source_field_name) # If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table if self.symmetrical: self._clear_items(self.target_field_name) clear.alters_data = True def create(self, **kwargs): # This check needs to be done here, since we can't later remove this # from the method lookup table, as we do with add and remove. if not self.through._meta.auto_created: opts = self.through._meta raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)) db = router.db_for_write(self.instance.__class__, instance=self.instance) new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs) self.add(new_obj) return new_obj create.alters_data = True def get_or_create(self, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) obj, created = \ super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs) # We only need to add() if created because if we got an object back # from get() then the relationship already exists. if created: self.add(obj) return obj, created get_or_create.alters_data = True def _add_items(self, source_field_name, target_field_name, *objs): # source_field_name: the PK fieldname in join table for the source object # target_field_name: the PK fieldname in join table for the target object # *objs - objects to add. Either object instances, or primary keys of object instances. # If there aren't any objects, there is nothing to do. from django.db.models import Model if objs: new_ids = set() for obj in objs: if isinstance(obj, self.model): if not router.allow_relation(obj, self.instance): raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' % (obj, self.instance._state.db, obj._state.db)) fk_val = self._get_fk_val(obj, target_field_name) if fk_val is None: raise ValueError('Cannot add "%r": the value for field "%s" is None' % (obj, target_field_name)) new_ids.add(self._get_fk_val(obj, target_field_name)) elif isinstance(obj, Model): raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj)) else: new_ids.add(obj) db = router.db_for_write(self.through, instance=self.instance) vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True) vals = vals.filter(**{ source_field_name: self._fk_val, '%s__in' % target_field_name: new_ids, }) new_ids = new_ids - set(vals) if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are inserting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action='pre_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=new_ids, using=db) # Add the ones that aren't there already self.through._default_manager.using(db).bulk_create([ self.through(**{ '%s_id' % source_field_name: self._fk_val, '%s_id' % target_field_name: obj_id, }) for obj_id in new_ids ]) if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are inserting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action='post_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=new_ids, using=db) def _remove_items(self, source_field_name, target_field_name, *objs): # source_field_name: the PK colname in join table for the source object # target_field_name: the PK colname in join table for the target object # *objs - objects to remove # If there aren't any objects, there is nothing to do. if objs: # Check that all the objects are of the right type old_ids = set() for obj in objs: if isinstance(obj, self.model): old_ids.add(self._get_fk_val(obj, target_field_name)) else: old_ids.add(obj) # Work out what DB we're operating on db = router.db_for_write(self.through, instance=self.instance) # Send a signal to the other end if need be. if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are deleting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action="pre_remove", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=old_ids, using=db) # Remove the specified objects from the join table self.through._default_manager.using(db).filter(**{ source_field_name: self._fk_val, '%s__in' % target_field_name: old_ids }).delete() if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are deleting the # duplicate data row for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action="post_remove", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=old_ids, using=db) def _clear_items(self, source_field_name): db = router.db_for_write(self.through, instance=self.instance) # source_field_name: the PK colname in join table for the source object if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are clearing the # duplicate data rows for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action="pre_clear", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=None, using=db) self.through._default_manager.using(db).filter(**{ source_field_name: self.related_val }).delete() if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are clearing the # duplicate data rows for symmetrical reverse entries. signals.m2m_changed.send(sender=self.through, action="post_clear", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=None, using=db) return ManyRelatedManager
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/create_many_related_manager
5,713
def __init__(self, field, to, related_name=None, limit_choices_to=None, parent_link=False, on_delete=None, related_query_name=None): try: to._meta except __HOLE__: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT assert isinstance(to, six.string_types), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT self.field = field self.to = to self.related_name = related_name self.related_query_name = related_query_name self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to self.multiple = True self.parent_link = parent_link self.on_delete = on_delete
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ForeignObjectRel.__init__
5,714
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, db_constraint=True, **kwargs): try: to_name = to._meta.object_name.lower() except __HOLE__: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT) else: assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name) # For backwards compatibility purposes, we need to *try* and set # the to_field during FK construction. It won't be guaranteed to # be correct until contribute_to_class is called. Refs #12190. to_field = to_field or (to._meta.pk and to._meta.pk.name) if 'db_index' not in kwargs: kwargs['db_index'] = True self.db_constraint = db_constraint kwargs['rel'] = rel_class( self, to, to_field, related_name=kwargs.pop('related_name', None), related_query_name=kwargs.pop('related_query_name', None), limit_choices_to=kwargs.pop('limit_choices_to', None), parent_link=kwargs.pop('parent_link', False), on_delete=kwargs.pop('on_delete', CASCADE), ) super(ForeignKey, self).__init__(to, ['self'], [to_field], **kwargs)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ForeignKey.__init__
5,715
def __init__(self, to, db_constraint=True, **kwargs): try: assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name) except __HOLE__: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT) # Python 2.6 and earlier require dictionary keys to be of str type, # not unicode and class names must be ASCII (in Python 2.x), so we # forcibly coerce it here (breaks early if there's a problem). to = str(to) kwargs['verbose_name'] = kwargs.get('verbose_name', None) kwargs['rel'] = ManyToManyRel(to, related_name=kwargs.pop('related_name', None), related_query_name=kwargs.pop('related_query_name', None), limit_choices_to=kwargs.pop('limit_choices_to', None), symmetrical=kwargs.pop('symmetrical', to == RECURSIVE_RELATIONSHIP_CONSTANT), through=kwargs.pop('through', None), db_constraint=db_constraint, ) self.db_table = kwargs.pop('db_table', None) if kwargs['rel'].through is not None: assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used." super(ManyToManyField, self).__init__(**kwargs)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/fields/related.py/ManyToManyField.__init__
5,716
@defer.inlineCallbacks def _on_event(self, event_name): baton = dict(event=event_name, client=self) try: processor = yield self.dependencies.wait_for_resource(event_name) yield processor(baton) except __HOLE__ as ae: # we have no processor for this event pass
KeyError
dataset/ETHPy150Open foundit/Piped/contrib/zookeeper/piped_zookeeper/providers.py/PipedZookeeperClient._on_event
5,717
def __init__(self, *args, **kwargs): self.document = kwargs.pop('document', None) super(DocumentContentForm, self).__init__(*args, **kwargs) content = [] self.fields['contents'].initial = '' try: document_pages = self.document.pages.all() except __HOLE__: document_pages = [] for page in document_pages: try: page_content = page.ocr_content.content except DocumentPageContent.DoesNotExist: pass else: content.append(conditional_escape(force_unicode(page_content))) content.append( '\n\n\n<hr/><div class="document-page-content-divider">- %s -</div><hr/>\n\n\n' % ( ugettext( 'Page %(page_number)d' ) % {'page_number': page.page_number} ) ) self.fields['contents'].initial = mark_safe(''.join(content))
AttributeError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/ocr/forms.py/DocumentContentForm.__init__
5,718
def dispatch(self, parameterName, value): """ When called in dispatch, do the coerce for C{value} and save the returned value. """ if value is None: raise UsageError("Parameter '%s' requires an argument." % (parameterName,)) try: value = self.coerce(value) except __HOLE__, e: raise UsageError("Parameter type enforcement failed: %s" % (e,)) self.options.opts[parameterName] = value
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/python/usage.py/CoerceParameter.dispatch
5,719
def parseOptions(self, options=None): """ The guts of the command-line parser. """ if options is None: options = sys.argv[1:] try: opts, args = getopt.getopt(options, self.shortOpt, self.longOpt) except getopt.error, e: raise UsageError(str(e)) for opt, arg in opts: if opt[1] == '-': opt = opt[2:] else: opt = opt[1:] optMangled = opt if optMangled not in self.synonyms: optMangled = opt.replace("-", "_") if optMangled not in self.synonyms: raise UsageError("No such option '%s'" % (opt,)) optMangled = self.synonyms[optMangled] if isinstance(self._dispatch[optMangled], CoerceParameter): self._dispatch[optMangled].dispatch(optMangled, arg) else: self._dispatch[optMangled](optMangled, arg) if (getattr(self, 'subCommands', None) and (args or self.defaultSubCommand is not None)): if not args: args = [self.defaultSubCommand] sub, rest = args[0], args[1:] for (cmd, short, parser, doc) in self.subCommands: if sub == cmd or sub == short: self.subCommand = cmd self.subOptions = parser() self.subOptions.parent = self self.subOptions.parseOptions(rest) break else: raise UsageError("Unknown command: %s" % sub) else: try: self.parseArgs(*args) except __HOLE__: raise UsageError("Wrong number of arguments.") self.postOptions()
TypeError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/python/usage.py/Options.parseOptions
5,720
def testStringKeys(self): d = {'a':1, 'b':2, '3':3, 3:4} self.assertEqual(d['a'], 1) self.assertEqual(d['b'], 2) # XXX: the length here is 3 because we have the same keys for "3" # and 3 #self.assertEqual(len(d), 4) # XXX: we have to have constant handling in the translator in # order to distinguish ints and strings, so the lines below do # not work #self.assertEqual(d['3'], 3) #self.assertEqual(d[3], 4) try: x = d['notthere'] self.fail('__getitem__ must raise KeyError') except __HOLE__, e: self.assertEqual(e.__class__.__name__, 'KeyError') self.assertEqual(str(e), "'notthere'") d = {} self.assertEqual(1, d.setdefault('foo', 1)) self.assertEqual(1, d.setdefault('foo', 2)) self.assertEqual(1, d.get('foo', 2)) self.assertEqual(2, d.get('bar', 2)) self.assertEqual(3, d.setdefault('bar', 3)) d = {} d.update({1:1}) d.update({2:2}, a='a') self.assertEqual(d, {1:1, 2:2, 'a':'a'}) self.assertRaises(TypeError, getattr(d, 'update'), {}, {})
KeyError
dataset/ETHPy150Open anandology/pyjamas/examples/libtest/DictTest.py/DictTest.testStringKeys
5,721
def testPop(self): d = {'a': 1, 'b': 2, 'c': 3} item = d.pop('d', 4) self.assertEqual(item, 4) try: item = d.pop('d') self.fail("Failed to raise KeyError on d.pop('d')") except __HOLE__, e: self.assertEqual(e[0], "d") item = d.pop('b') self.assertEqual(item, 2) item = d.popitem() self.assertTrue(item == ('a',1) or item == ('c',3), "popped invalid item %s" % str(item)) item = d.popitem() try: item = d.popitem() except KeyError, e: self.assertEqual(e[0], "popitem(): dictionary is empty")
KeyError
dataset/ETHPy150Open anandology/pyjamas/examples/libtest/DictTest.py/DictTest.testPop
5,722
def testEq(self): self.failUnlessEqual({}, {}) self.failUnlessEqual({'1':1}, {'1':1}) self.failIfEqual({},{'1':1}) self.failIfEqual({'1':1},{'1':2}) # test for bug 362 try: self.failIfEqual({'1':1}, [1,2], "Test for Bug 362") except __HOLE__: self.fail("Bug 362 - comparison between dict and non-dict") class DICT(dict): pass self.failUnlessEqual(DICT(), {}) self.failUnlessEqual({}, DICT()) self.failUnlessEqual(DICT(a=1), dict(a=1))
TypeError
dataset/ETHPy150Open anandology/pyjamas/examples/libtest/DictTest.py/DictTest.testEq
5,723
def testUpdate(self): d1 = {1:2,3:4} d1.update({3:5,7:9}) self.assertEqual(d1[3],5) try: d1.update(((3,6),(9,12))) self.assertEqual(d1[3],6) except __HOLE__: self.fail("Couldn't dict.update(...) with a tuple of pairs.")
TypeError
dataset/ETHPy150Open anandology/pyjamas/examples/libtest/DictTest.py/DictTest.testUpdate
5,724
def startService(self): log.msg('Loading index') try: f = file(self._makeFilename('index'), 'rb') d = pickle.load(f) self._index = d['index'] self._nextOid = d['nextOid'] except __HOLE__: self._index = [] self._nextOid = 1
IOError
dataset/ETHPy150Open twisted/nevow/examples/pastebin/pastebin/service.py/FSPasteBinService.startService
5,725
def root_semrep(syntree, semkey='SEM'): """ Find the semantic representation at the root of a tree. :param syntree: a parse ``Tree`` :param semkey: the feature label to use for the root semantics in the tree :return: the semantic representation at the root of a ``Tree`` :rtype: sem.Expression """ from nltk.grammar import FeatStructNonterminal node = syntree.label() assert isinstance(node, FeatStructNonterminal) try: return node[semkey] except __HOLE__: print(node, end=' ') print("has no specification for the feature %s" % semkey) raise
KeyError
dataset/ETHPy150Open nltk/nltk/nltk/sem/util.py/root_semrep
5,726
def get(self, field, default=None): try: return self.__get_field_value(field, [], original=field) except __HOLE__: return default
KeyError
dataset/ETHPy150Open spinnaker/spinnaker/pylib/spinnaker/yaml_util.py/YamlBindings.get
5,727
def __resolve_value(self, value, saw, original): expression_re = re.compile('\${([\._a-zA-Z0-9]+)(:.+?)?}') exact_match = expression_re.match(value) if exact_match and exact_match.group(0) == value: try: got = self.__get_field_value(exact_match.group(1), saw, original) return got except __HOLE__: if exact_match.group(2): return self.__typed_value(exact_match.group(2)[1:]) else: return value result = [] offset = 0 # Look for fragments of ${key} or ${key:default} then resolve them. text = value for match in expression_re.finditer(text): result.append(text[offset:match.start()]) try: got = self.__get_field_value(str(match.group(1)), saw, original) result.append(str(got)) except KeyError: if match.group(2): result.append(str(match.group(2)[1:])) else: result.append(match.group(0)) offset = match.end() # skip trailing '}' result.append(text[offset:]) return ''.join(result)
KeyError
dataset/ETHPy150Open spinnaker/spinnaker/pylib/spinnaker/yaml_util.py/YamlBindings.__resolve_value
5,728
def transform_yaml_source(self, source, key): """Transform the given yaml source so its value of key matches the binding. Has no effect if key is not among the bindings. But raises a KeyError if it is in the bindings but not in the source. Args: source [string]: A YAML document key [string]: A key into the bindings. Returns: Transformed source with value of key replaced to match the bindings. """ try: value = self[key] except __HOLE__: return source parts = key.split('.') offset = 0 s = source for attr in parts: match = re.search('^ *{attr}:(.*)'.format(attr=attr), s, re.MULTILINE) if not match: raise ValueError( 'Could not find {key}. Failed on {attr} at {offset}' .format(key=key, attr=attr, offset=offset)) offset += match.start(0) s = source[offset:] offset -= match.start(0) value_start = match.start(1) + offset value_end = match.end(0) + offset return ''.join([ source[0:value_start], ' {value}'.format(value=value), source[value_end:] ])
KeyError
dataset/ETHPy150Open spinnaker/spinnaker/pylib/spinnaker/yaml_util.py/YamlBindings.transform_yaml_source
5,729
def _get_ri_id_of_network(self, oc_client, network_id): try: network = oc_client.show('Virtual Network', network_id) ri_fq_name = network['fq_name'] + [network['fq_name'][-1]] for ri_ref in network.get('routing_instances', []): if ri_ref['to'] == ri_fq_name: return ri_ref['uuid'] except (oc_exc.OpenContrailAPINotFound, __HOLE__): raise n_exc.NetworkNotFound(net_id=network_id) raise n_exc.NetworkNotFound(net_id=network_id)
IndexError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver._get_ri_id_of_network
5,730
def get_bgpvpns(self, context, filters=None, fields=None): LOG.debug("get_bgpvpns called, fields = %s, filters = %s" % (fields, filters)) oc_client = self._get_opencontrail_api_client(context) bgpvpns = [] for kv_dict in oc_client.kv_store('RETRIEVE'): try: value = json.loads(kv_dict['value']) except __HOLE__: continue if (isinstance(value, dict) and 'bgpvpn' in value and utils.filter_resource(value['bgpvpn'], filters)): bgpvpn = value['bgpvpn'] if not fields or 'networks' in fields: bgpvpn['networks'] = \ [net_assoc['network_id'] for net_assoc in self.get_net_assocs(context, bgpvpn['id'])] bgpvpns.append(utils.make_bgpvpn_dict(bgpvpn, fields)) return bgpvpns
ValueError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver.get_bgpvpns
5,731
def _clean_bgpvpn_assoc(self, oc_client, bgpvpn_id): for kv_dict in oc_client.kv_store('RETRIEVE'): try: value = json.loads(kv_dict['value']) except __HOLE__: continue if (isinstance(value, dict) and 'bgpvpn_net_assoc' in value and value['bgpvpn_net_assoc']['bgpvpn_id'] == bgpvpn_id): assoc_id = value['bgpvpn_net_assoc']['id'] oc_client.kv_store('DELETE', key=assoc_id)
ValueError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver._clean_bgpvpn_assoc
5,732
def get_bgpvpn(self, context, id, fields=None): LOG.debug("get_bgpvpn called for id %s with fields = %s" % (id, fields)) oc_client = self._get_opencontrail_api_client(context) try: bgpvpn = json.loads(oc_client.kv_store('RETRIEVE', key=id)) except (oc_exc.OpenContrailAPINotFound, __HOLE__): raise bgpvpn.BGPVPNNotFound(id=id) if (not isinstance(bgpvpn, dict) or 'bgpvpn' not in bgpvpn): raise bgpvpn.BGPVPNNotFound(id=id) bgpvpn = bgpvpn['bgpvpn'] if not fields or 'networks' in fields: bgpvpn['networks'] = [net_assoc['network_id'] for net_assoc in self.get_net_assocs(context, id)] return utils.make_bgpvpn_dict(bgpvpn, fields)
ValueError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver.get_bgpvpn
5,733
def get_net_assoc(self, context, assoc_id, bgpvpn_id, fields=None): LOG.debug("get_net_assoc called for %s for BGPVPN %s, with fields = %s" % (assoc_id, bgpvpn_id, fields)) oc_client = self._get_opencontrail_api_client(context) try: net_assoc = json.loads( oc_client.kv_store('RETRIEVE', key=assoc_id)) except (oc_exc.OpenContrailAPINotFound, __HOLE__): raise bgpvpn_ext.BGPVPNNetAssocNotFound(id=assoc_id, bgpvpn_id=bgpvpn_id) if (not isinstance(net_assoc, dict) or 'bgpvpn_net_assoc' not in net_assoc): raise bgpvpn_ext.BGPVPNNetAssocNotFound(id=assoc_id, bgpvpn_id=bgpvpn_id) net_assoc = net_assoc['bgpvpn_net_assoc'] if net_assoc['bgpvpn_id'] != bgpvpn_id: raise bgpvpn_ext.BGPVPNNetAssocNotFound(id=assoc_id, bgpvpn_id=bgpvpn_id) # It the bgpvpn was deleted, the 'get_bgpvpn' will clean all related # associations and replaces BGPVPNNotFound by a BGPVPNNetAssocNotFound try: get_fields = ['tenant_id', 'route_targets', 'import_targets', 'export_targets'] bgpvpn = self.get_bgpvpn(context, net_assoc['bgpvpn_id'], fields=get_fields) except bgpvpn.BGPVPNNotFound: raise bgpvpn_ext.BGPVPNNetAssocNotFound(id=assoc_id, bgpvpn_id=bgpvpn_id) # If the network was delete all bgpvpn related association should be # deleted also try: oc_client.id_to_fqname(net_assoc['network_id']) except oc_exc.OpenContrailAPINotFound: self._set_bgpvpn_association(oc_client, 'DELETE', bgpvpn, [net_assoc['network_id']]) oc_client.kv_store('DELETE', key=assoc_id) raise bgpvpn_ext.BGPVPNNetAssocNotFound(id=assoc_id, bgpvpn_id=bgpvpn_id) net_assoc = utils.make_net_assoc_dict(net_assoc['id'], net_assoc['tenant_id'], net_assoc['bgpvpn_id'], net_assoc['network_id'], fields) return net_assoc
ValueError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver.get_net_assoc
5,734
def get_net_assocs(self, context, bgpvpn_id, filters=None, fields=None): LOG.debug("get_net_assocs called for bgpvpn %s, fields = %s, " "filters = %s" % (bgpvpn_id, fields, filters)) oc_client = self._get_opencontrail_api_client(context) get_fields = ['tenant_id', 'route_targets', 'import_targets', 'export_targets'] bgpvpn = self.get_bgpvpn(context, bgpvpn_id, fields=get_fields) bgpvpn_net_assocs = [] for kv_dict in oc_client.kv_store('RETRIEVE'): try: value = json.loads(kv_dict['value']) except __HOLE__: continue if (isinstance(value, dict) and 'bgpvpn_net_assoc' in value and utils.filter_resource(value['bgpvpn_net_assoc'], filters) and value['bgpvpn_net_assoc']['bgpvpn_id'] == bgpvpn_id): net_assoc = value['bgpvpn_net_assoc'] # If the network was delete all bgpvpn related association # should be deleted also try: oc_client.id_to_fqname(net_assoc['network_id']) except oc_exc.OpenContrailAPINotFound: self._set_bgpvpn_association(oc_client, 'DELETE', bgpvpn, [net_assoc['network_id']]) oc_client.kv_store('DELETE', key=net_assoc['id']) continue net_assoc = utils.make_net_assoc_dict(net_assoc['id'], net_assoc['tenant_id'], net_assoc['bgpvpn_id'], net_assoc['network_id'], fields) bgpvpn_net_assocs.append(net_assoc) return bgpvpn_net_assocs
ValueError
dataset/ETHPy150Open openstack/networking-bgpvpn/networking_bgpvpn/neutron/services/service_drivers/opencontrail/opencontrail.py/OpenContrailBGPVPNDriver.get_net_assocs
5,735
def update_row(self, row_data): """Update Row (By ID). Only the fields supplied will be updated. :param row_data: A dictionary containing row data. The row will be updated according to the value in the ID_FIELD. :return: The updated row. """ try: id = row_data[ID_FIELD] except __HOLE__: raise WorksheetException("Row does not contain '{0}' field. " "Please update by index.".format(ID_FIELD)) entry = self._get_row_entry_by_id(id) new_row = self._row_to_dict(entry) new_row.update(row_data) entry = self.gd_client.UpdateRow(entry, new_row) if not isinstance(entry, gdata.spreadsheet.SpreadsheetsList): raise WorksheetException("Row update failed: '{0}'".format(entry)) for i, e in enumerate(self.entries): if e.id.text == entry.id.text: self.entries[i] = entry return self._row_to_dict(entry)
KeyError
dataset/ETHPy150Open yoavaviram/python-google-spreadsheet/google_spreadsheet/api.py/Worksheet.update_row
5,736
def delete_row(self, row): """Delete Row (By ID). Requires that the given row dictionary contains an ID_FIELD. :param row: A row dictionary to delete. """ try: id = row[ID_FIELD] except __HOLE__: raise WorksheetException("Row does not contain '{0}' field. " "Please delete by index.".format(ID_FIELD)) entry = self._get_row_entry_by_id(id) self.gd_client.DeleteRow(entry) for i, e in enumerate(self.entries): if e.id.text == entry.id.text: del self.entries[i]
KeyError
dataset/ETHPy150Open yoavaviram/python-google-spreadsheet/google_spreadsheet/api.py/Worksheet.delete_row
5,737
def render(self, name, value, attrs=None): # Update the template parameters with any attributes passed in. if attrs: self.params.update(attrs) # Defaulting the WKT value to a blank string -- this # will be tested in the JavaScript and the appropriate # interfaace will be constructed. self.params['wkt'] = '' # If a string reaches here (via a validation error on another # field) then just reconstruct the Geometry. if isinstance(value, basestring): try: value = GEOSGeometry(value) except (GEOSException, __HOLE__): value = None if value and value.geom_type.upper() != self.geom_type: value = None # Constructing the dictionary of the map options. self.params['map_options'] = self.map_options() # Constructing the JavaScript module name using the ID of # the GeometryField (passed in via the `attrs` keyword). self.params['module'] = 'geodjango_%s' % self.params['field_name'] if value: # Transforming the geometry to the projection used on the # OpenLayers map. srid = self.params['srid'] if value.srid != srid: try: value.transform(srid) wkt = value.wkt except OGRException: wkt = '' else: wkt = value.wkt # Setting the parameter WKT with that of the transformed # geometry. self.params['wkt'] = wkt return render_to_string(self.template, self.params)
ValueError
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/gis/admin/widgets.py/OpenLayersWidget.render
5,738
def form_valid(self, form): """ Store new flatpage from form data. Checks wether a site is specified for the flatpage or sets the current site by default. Additionally, if URL is left blank, a slugified version of the title will be used as URL after checking if it is valid. """ # if no URL is specified, generate from title page = form.save(commit=False) if not page.url: page.url = '/%s/' % slugify(page.title) try: URLDoesNotExistValidator()(page.url) except __HOLE__: pass else: # use current site as default for new page page.save() page.sites.add(Site.objects.get_current()) return HttpResponseRedirect(self.get_success_url(page)) ctx = self.get_context_data() ctx['form'] = form return self.render_to_response(ctx)
ValidationError
dataset/ETHPy150Open django-oscar/django-oscar/src/oscar/apps/dashboard/pages/views.py/PageCreateView.form_valid
5,739
def post(self, request, *args, **kwargs): content = request.POST.get('content', '') markup = request.POST.get('markup') reader = get_reader(markup=markup) content_body, metadata = reader(content).read() image_id = metadata.get('image', '') try: image_url = EntryImage.objects.get(id=int(image_id)).image.url except (EntryImage.DoesNotExist, __HOLE__): image_url = '' context = { 'content': content_body, 'title': metadata.get('title'), 'tags': list(metadata.get('tags', [])), 'image_url': image_url } return self.render_to_response(context)
ValueError
dataset/ETHPy150Open gkmngrgn/radpress/radpress/views.py/PreviewView.post
5,740
def list_get(list_obj, index, default=None): """Like ``.get`` for list object. Args: list_obj (list): list to look up an index in index (int): index position to look up default (Optional[object]): default return value. Defaults to None. Returns: object: any object found at the index or ``default`` """ try: return list_obj[index] except __HOLE__: return default
IndexError
dataset/ETHPy150Open robinandeer/chanjo/chanjo/utils.py/list_get
5,741
def merge_collections_for_profile(): from totalimpact import item, tiredis view_name = "queues/by_type_and_id" view_rows = db.view(view_name, include_docs=True) row_count = 0 sql_statement_count = 0 page_size = 500 start_key = ["user", "00000000000"] end_key = ["user", "zzzzzzzzz"] from couch_paginator import CouchPaginator page = CouchPaginator(db, view_name, page_size, include_docs=True, start_key=start_key, end_key=end_key) email_data_strings = [] while page: for row in page: row_count += 1 user_doc = row.doc rowdata = {} rowdata["email"] = user_doc["_id"] if not user_doc["profile_collection"]: #print "not migrating this doc because it has no collections" continue rowdata["collection_id"] = user_doc["profile_collection"] try: rowdata["created"] = user_doc["created"] except __HOLE__: rowdata["created"] = datetime.datetime(2013, 1, 1).isoformat() rowdata["password_hash"] = default_password_hash rowdata["url_slug"] = "user" + str(50000 + row_count) rowdata["given_name"] = "Firstname" rowdata["surname"] = "Lastname" insert_unless_error(insert_string('"user"', rowdata.keys()), [rowdata]) sql_statement_count += 1 # pull information together to send out surveymonkey email profile_id = user_doc["profile_collection"] email = user_doc["_id"] profile_doc = db.get(profile_id) my_collections = user_doc["colls"] title = profile_doc["title"] if (len(my_collections) > 1): title = "" for cid in my_collections: coll_doc = db.get(cid) collection_title = coll_doc["title"] if collection_title != "My Collection": title += "*" + collection_title try: collections_string = str(";".join(my_collections.keys())) except UnicodeEncodeError: print "UnicodeEncodeError on ", email, "so setting collections to blank" collections_string = "" email_data_strings += [u"{url_slug}|{profile_id}|{len_profile}|{email}|{created}|{title}|{collections_string}".format( url_slug=rowdata["url_slug"], profile_id=profile_id, email=email, len_profile=len(profile_doc["alias_tiids"]), created=rowdata["created"], title=title, collections_string=collections_string)] logger.info("%i. getting new page, last id was %s" %(row_count, row.id)) if page.has_next: page = CouchPaginator(db, view_name, page_size, start_key=page.next, end_key=end_key, include_docs=True) else: page = None print "Number of rows: ", row_count print "Number of sql statements: ", sql_statement_count print "\n\n\n" for line in email_data_strings: print line
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/extras/db_housekeeping/migrate_user_docs.py/merge_collections_for_profile
5,742
def get_GenericForeignKey(): try: from django.contrib.contenttypes.fields import GenericForeignKey # For Django 1.6 and earlier except __HOLE__: from django.contrib.contenttypes.generic import GenericForeignKey return GenericForeignKey
ImportError
dataset/ETHPy150Open gregmuellegger/django-autofixture/autofixture/compat.py/get_GenericForeignKey
5,743
def get_GenericRelation(): try: from django.contrib.contenttypes.fields import GenericRelation # For Django 1.6 and earlier except __HOLE__: from django.contrib.contenttypes.generic import GenericRelation return GenericRelation
ImportError
dataset/ETHPy150Open gregmuellegger/django-autofixture/autofixture/compat.py/get_GenericRelation
5,744
def gunzip(data): """Gunzip the given data and return as much data as possible. This is resilient to CRC checksum errors. """ f = GzipFile(fileobj=BytesIO(data)) output = b'' chunk = b'.' while chunk: try: chunk = read1(f, 8196) output += chunk except (__HOLE__, EOFError, struct.error): # complete only if there is some data, otherwise re-raise # see issue 87 about catching struct.error # some pages are quite small so output is '' and f.extrabuf # contains the whole page content if output or getattr(f, 'extrabuf', None): try: output += f.extrabuf finally: break else: raise return output
IOError
dataset/ETHPy150Open scrapy/scrapy/scrapy/utils/gz.py/gunzip
5,745
def to_python(self, value): """Converts the DB-stored value into a Python value.""" if isinstance(value, base.StateWrapper): res = value else: if isinstance(value, base.State): state = value elif value is None: state = self.workflow.initial_state else: try: state = self.workflow.states[value] except __HOLE__: raise exceptions.ValidationError(self.error_messages['invalid']) res = base.StateWrapper(state, self.workflow) if res.state not in self.workflow.states: raise exceptions.ValidationError(self.error_messages['invalid']) return res
KeyError
dataset/ETHPy150Open rbarrois/django_xworkflows/django_xworkflows/models.py/StateField.to_python
5,746
def testPasswordProtectedSite(self): support.requires('network') with support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' robots_url = url + "/robots.txt" # First check the URL is usable for our purposes, since the # test site is a bit flaky. try: urlopen(robots_url) except __HOLE__ as e: if e.code not in {401, 403}: self.skipTest( "%r should return a 401 or 403 HTTP error, not %r" % (robots_url, e.code)) else: self.skipTest( "%r should return a 401 or 403 HTTP error, not succeed" % (robots_url)) parser = urllib.robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except URLError: self.skipTest('%s is unavailable' % url) self.assertEqual(parser.can_fetch("*", robots_url), False)
HTTPError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_robotparser.py/NetworkTestCase.testPasswordProtectedSite
5,747
@register.filter(name="lookup") def lookup_filter(o, key): try: return o[key] except __HOLE__: return None
KeyError
dataset/ETHPy150Open timvideos/streaming-system/website/tracker/templatetags/lookup.py/lookup_filter
5,748
@main.command() @argument('source', required=False) @argument('destination', required=False) @option('-f', '--force', is_flag=True, help="Force the reprocessing of existing images") @option('-v', '--verbose', is_flag=True, help="Show all messages") @option('-d', '--debug', is_flag=True, help="Show all message, including debug messages") @option('-c', '--config', default=_DEFAULT_CONFIG_FILE, show_default=True, help="Configuration file") @option('-t', '--theme', help="Specify a theme directory, or a theme name for " "the themes included with Sigal") @option('--title', help="Title of the gallery (overrides the title setting.") @option('-n', '--ncpu', help="Number of cpu to use (default: all)") def build(source, destination, debug, verbose, force, config, theme, title, ncpu): """Run sigal to process a directory. If provided, 'source', 'destination' and 'theme' will override the corresponding values from the settings file. """ level = ((debug and logging.DEBUG) or (verbose and logging.INFO) or logging.WARNING) init_logging(__name__, level=level) logger = logging.getLogger(__name__) if not os.path.isfile(config): logger.error("Settings file not found: %s", config) sys.exit(1) start_time = time.time() settings = read_settings(config) for key in ('source', 'destination', 'theme'): arg = locals()[key] if arg is not None: settings[key] = os.path.abspath(arg) logger.info("%12s : %s", key.capitalize(), settings[key]) if not settings['source'] or not os.path.isdir(settings['source']): logger.error("Input directory not found: %s", settings['source']) sys.exit(1) # on windows os.path.relpath raises a ValueError if the two paths are on # different drives, in that case we just ignore the exception as the two # paths are anyway not relative relative_check = True try: relative_check = os.path.relpath(settings['destination'], settings['source']).startswith('..') except __HOLE__: pass if not relative_check: logger.error("Output directory should be outside of the input " "directory.") sys.exit(1) if title: settings['title'] = title locale.setlocale(locale.LC_ALL, settings['locale']) init_plugins(settings) gal = Gallery(settings, ncpu=ncpu) gal.build(force=force) # copy extra files for src, dst in settings['files_to_copy']: src = os.path.join(settings['source'], src) dst = os.path.join(settings['destination'], dst) logger.debug('Copy %s to %s', src, dst) copy(src, dst, symlink=settings['orig_link']) stats = gal.stats def format_stats(_type): opt = ["{} {}".format(stats[_type + '_' + subtype], subtype) for subtype in ('skipped', 'failed') if stats[_type + '_' + subtype] > 0] opt = ' ({})'.format(', '.join(opt)) if opt else '' return '{} {}s{}'.format(stats[_type], _type, opt) print('Done.\nProcessed {} and {} in {:.2f} seconds.' .format(format_stats('image'), format_stats('video'), time.time() - start_time))
ValueError
dataset/ETHPy150Open saimn/sigal/sigal/__init__.py/build
5,749
@main.command() @argument('destination', default='_build') @option('-p', '--port', help="Port to use", default=8000) @option('-c', '--config', default=_DEFAULT_CONFIG_FILE, show_default=True, help='Configuration file') def serve(destination, port, config): """Run a simple web server.""" if os.path.exists(destination): pass elif os.path.exists(config): settings = read_settings(config) destination = settings.get('destination') if not os.path.exists(destination): sys.stderr.write("The '{}' directory doesn't exist, " "maybe try building first?" "\n".format(destination)) sys.exit(1) else: sys.stderr.write("The {destination} directory doesn't exist " "and the config file ({config}) could not be " "read." "\n".format(destination=destination, config=config)) sys.exit(2) print('DESTINATION : {}'.format(destination)) os.chdir(destination) Handler = server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer(("", port), Handler, False) print(" * Running on http://127.0.0.1:{}/".format(port)) try: httpd.allow_reuse_address = True httpd.server_bind() httpd.server_activate() httpd.serve_forever() except __HOLE__: print('\nAll done!')
KeyboardInterrupt
dataset/ETHPy150Open saimn/sigal/sigal/__init__.py/serve
5,750
def discover_methods(iface): sniffers = [] for port in listening_ports(): sniff = discover_on_port(port, iface, MsgHandler(port)) sniffers.append(sniff) # done when all sniffers are done (or the user gets tired) try: while True: if all(not sniff.isAlive() for sniff in sniffers): break except __HOLE__: pass
KeyboardInterrupt
dataset/ETHPy150Open pinterest/thrift-tools/examples/methods_per_port.py/discover_methods
5,751
def lookup_cik(ticker, name=None): # Given a ticker symbol, retrieves the CIK. good_read = False ticker = ticker.strip().upper() url = 'http://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK={cik}&count=10&output=xml'.format(cik=ticker) try: xmlFile = urlopen( url ) try: xmlData = xmlFile.read() good_read = True finally: xmlFile.close() except __HOLE__ as e: print( "HTTP Error:", e.code ) except URLError as e: print( "URL Error:", e.reason ) except TimeoutError as e: print( "Timeout Error:", e.reason ) except socket.timeout: print( "Socket Timeout Error" ) if not good_read: print( "Unable to lookup CIK for ticker:", ticker ) return try: root = ET.fromstring(xmlData) except ET.ParseError as perr: print( "XML Parser Error:", perr ) try: cikElement = list(root.iter( "CIK" ))[0] return int(cikElement.text) except StopIteration: pass
HTTPError
dataset/ETHPy150Open altova/sec-xbrl/valSECfilings.py/lookup_cik
5,752
def release_resources(self, article): path = os.path.join(self.config.local_storage_path, '%s_*' % article.link_hash) for fname in glob.glob(path): try: os.remove(fname) except __HOLE__: # TODO better log handeling pass
OSError
dataset/ETHPy150Open xiaoxu193/PyTeaser/goose/crawler.py/Crawler.release_resources
5,753
def calculate(self, cart, contact): """ Based on the chosen UPS method, we will do our call to UPS and see how much it will cost. We will also need to store the results for further parsing and return via the methods above """ from satchmo_store.shop.models import Config settings = config_get_group('shipping.modules.ups') self.delivery_days = _("3 - 4") #Default setting for ground delivery shop_details = Config.objects.get_current() # Get the code and description for the packaging container = settings.SHIPPING_CONTAINER.value container_description = settings.SHIPPING_CONTAINER.choices[int(container)][1] configuration = { 'xml_key': settings.XML_KEY.value, 'account': settings.ACCOUNT.value, 'userid': settings.USER_ID.value, 'password': settings.USER_PASSWORD.value, 'container': container, 'container_description': container_description, 'pickup': settings.PICKUP_TYPE.value, 'ship_type': self.service_type_code, 'shop_details':shop_details, } shippingdata = { 'single_box': False, 'config': configuration, 'contact': contact, 'cart': cart, 'shipping_address' : shop_details, 'shipping_phone' : shop_details.phone, 'shipping_country_code' : shop_details.country.iso2_code } if settings.SINGLE_BOX.value: log.debug("Using single-box method for ups calculations.") box_weight = Decimal("0.00") for product in cart.get_shipment_list(): if product.smart_attr('weight') is None: log.warn("No weight on product (skipping for ship calculations): %s", product) else: box_weight += product.smart_attr('weight') if product.smart_attr('weight_units') and product.smart_attr('weight_units') != "": box_weight_units = product.smart_attr('weight_units') else: log.warn("No weight units for product") if box_weight < Decimal("0.1"): log.debug("Total box weight too small, defaulting to 0.1") box_weight = Decimal("0.1") shippingdata['single_box'] = True shippingdata['box_weight'] = '%.1f' % box_weight shippingdata['box_weight_units'] = box_weight_units.upper() signals.shipping_data_query.send(Shipper, shipper=self, cart=cart, shippingdata=shippingdata) c = Context(shippingdata) t = loader.get_template('shipping/ups/request.xml') request = t.render(c) self.is_valid = False if settings.LIVE.value: connection = settings.CONNECTION.value else: connection = settings.CONNECTION_TEST.value cache_key_response = "ups-cart-%s-response" % int(cart.id) cache_key_request = "ups-cart-%s-request" % int(cart.id) last_request = cache.get(cache_key_request) tree = cache.get(cache_key_response) if (last_request != request) or tree is None: self.verbose_log("Requesting from UPS [%s]\n%s", cache_key_request, request) cache.set(cache_key_request, request, 60) tree = self._process_request(connection, request) self.verbose_log("Got from UPS [%s]:\n%s", cache_key_response, self.raw) needs_cache = True else: needs_cache = False try: status_code = tree.getiterator('ResponseStatusCode') status_val = status_code[0].text self.verbose_log("UPS Status Code for cart #%s = %s", int(cart.id), status_val) except __HOLE__: status_val = "-1" if status_val == '1': self.is_valid = False self._calculated = False all_rates = tree.getiterator('RatedShipment') for response in all_rates: if self.service_type_code == response.find('.//Service/Code/').text: self.charges = response.find('.//TotalCharges/MonetaryValue').text if response.find('.//GuaranteedDaysToDelivery').text: self.delivery_days = response.find('.//GuaranteedDaysToDelivery').text self.is_valid = True self._calculated = True if needs_cache: cache.set(cache_key_response, tree, 60) if not self.is_valid: self.verbose_log("UPS Cannot find rate for code: %s [%s]", self.service_type_code, self.service_type_text) else: self.is_valid = False self._calculated = False try: errors = tree.find('.//Error') log.info("UPS %s Error: Code %s - %s" % (errors[0].text, errors[1].text, errors[2].text)) except AttributeError: log.info("UPS error - cannot parse response:\n %s", self.raw)
AttributeError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/shipping/modules/ups/shipper.py/Shipper.calculate
5,754
def cmd(self, _func=_NO_FUNC, name=None, *args, **kwargs): """Decorator to create a command line subcommand for a function. By default, the name of the decorated function is used as the name of the subcommand, but this can be overridden by specifying the `name` argument. Additional arguments are passed to the subcommand's :py:class:`ArgumentParser`. """ if _func is not _NO_FUNC: # Support for using this decorator without calling it, e.g. # @app.cmd <---- note: no parentheses here! # def foo(): pass return self.cmd()(_func) parser_args = args parser_kwargs = kwargs def wrapper(func): subcommand = name if name is not None else func.__name__ parser_kwargs.setdefault('help', "") # improves --help output subparser = self._subparsers.add_parser( subcommand, *parser_args, **parser_kwargs) # Add global arguments to subcommand as well so that they # can be given after the subcommand on the CLI. for global_args, global_kwargs in self._global_args: subparser.add_argument(*global_args, **global_kwargs) # Add any pending arguments for args, kwargs in self._pending_args: subparser.add_argument(*args, **kwargs) self._pending_args = [] # Add any pending default values try: pending_defaults = self._defaults.pop(None) except __HOLE__: pass # no pending defaults else: self._defaults[func] = pending_defaults # Store callback function and return the decorated function # unmodified subparser.set_defaults(_func=func) return func return wrapper
KeyError
dataset/ETHPy150Open splunk/splunk-webframework/contrib/aaargh/aaargh.py/App.cmd
5,755
def validate_number(self, number): "Validates the given 1-based page number." try: number = int(number) except (__HOLE__, ValueError): raise PageNotAnInteger('That page number is not an integer') if number < 1: raise EmptyPage('That page number is less than 1') if number > self.num_pages: if number == 1 and self.allow_empty_first_page: pass else: raise EmptyPage('That page contains no results') return number
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/paginator.py/Paginator.validate_number
5,756
def _get_count(self): "Returns the total number of objects, across all pages." if self._count is None: try: self._count = self.object_list.count() except (__HOLE__, TypeError): # AttributeError if object_list has no count() method. # TypeError if object_list.count() requires arguments # (i.e. is of type list). self._count = len(self.object_list) return self._count
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/paginator.py/Paginator._get_count
5,757
def __iter__(self): i = 0 try: while True: v = self[i] yield v i += 1 except __HOLE__: return
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/paginator.py/Page.__iter__
5,758
@classmethod def relpath(cls, path, start): """Wrapper for os.path.relpath for Python 2.4. Python 2.4 doesn't have the os.path.relpath function, so this approximates it well enough for our needs. ntpath.relpath() overflows and throws TypeError for paths containing atleast 520 characters (not that hard to encounter in UCM repository). """ try: return cpath.relpath(path, start) except (__HOLE__, TypeError): if start[-1] != os.sep: start += os.sep return path[len(start):]
AttributeError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/scmtools/clearcase.py/ClearCaseTool.relpath
5,759
def client_relpath(self, filename): """Normalize any path sent from client view and return relative path against vobtag """ try: path, revision = filename.split("@@", 1) except __HOLE__: path = filename revision = None relpath = "" logging.debug("vobstag: %s, path: %s", self.vobstag, path) while True: # An error should be raised if vobstag cannot be reached. if path == "/": logging.debug("vobstag not found in path, use client filename") return filename # Vobstag reach, relpath can be returned. if path.endswith(self.vobstag): break path, basename = os.path.split(path) # Init relpath with basename. if len(relpath) == 0: relpath = basename else: relpath = os.path.join(basename, relpath) logging.debug("relpath: %s", relpath) if revision: relpath = relpath + "@@" + revision return relpath
ValueError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/scmtools/clearcase.py/ClearCaseDiffParser.client_relpath
5,760
def test_log(self): b = wspbus.Bus() self.log(b) self.assertLog([]) # Try a normal message. expected = [] for msg in ["O mah darlin'"] * 3 + ["Clementiiiiiiiine"]: b.log(msg) expected.append(msg) self.assertLog(expected) # Try an error message try: foo except __HOLE__: b.log("You are lost and gone forever", traceback=True) lastmsg = self._log_entries[-1] if "Traceback" not in lastmsg or "NameError" not in lastmsg: self.fail("Last log message %r did not contain " "the expected traceback." % lastmsg) else: self.fail("NameError was not raised as expected.")
NameError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/cherrypy/cherrypy/test/test_bus.py/BusMethodTests.test_log
5,761
def to_pydot(N, strict=True): """Return a pydot graph from a NetworkX graph N. Parameters ---------- N : NetworkX graph A graph created with NetworkX Examples -------- >>> import networkx as nx >>> K5 = nx.complete_graph(5) >>> P = nx.to_pydot(K5) Notes ----- """ # set Graphviz graph type if N.is_directed(): graph_type = 'digraph' else: graph_type = 'graph' strict = N.number_of_selfloops() == 0 and not N.is_multigraph() name = N.graph.get('name') graph_defaults = N.graph.get('graph', {}) if name is None: P = pydot.Dot(graph_type=graph_type, strict=strict, **graph_defaults) else: P = pydot.Dot('"%s"' % name, graph_type=graph_type, strict=strict, **graph_defaults) try: P.set_node_defaults(**N.graph['node']) except __HOLE__: pass try: P.set_edge_defaults(**N.graph['edge']) except KeyError: pass for n, nodedata in N.nodes_iter(data=True): str_nodedata = dict((k, make_str(v)) for k, v in nodedata.items()) p = pydot.Node(make_str(n), **str_nodedata) P.add_node(p) if N.is_multigraph(): for u, v, key, edgedata in N.edges_iter(data=True, keys=True): str_edgedata = dict((k, make_str(v)) for k, v in edgedata.items()) edge = pydot.Edge(make_str(u), make_str(v), key=make_str(key), **str_edgedata) P.add_edge(edge) else: for u, v, edgedata in N.edges_iter(data=True): str_edgedata = dict((k, make_str(v)) for k, v in edgedata.items()) edge = pydot.Edge(make_str(u), make_str(v), **str_edgedata) P.add_edge(edge) return P
KeyError
dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/orchestrator/graph_visualization.py/to_pydot
5,762
def test_module_import(self): """ Import base functionality. """ try: from split_settings import __version__ as _version from split_settings.tools import include as _include from split_settings.tools import optional as _optional self._assert_types(_version, _include, _optional) except __HOLE__ as import_error: self.fail(msg=import_error)
ImportError
dataset/ETHPy150Open sobolevn/django-split-settings/example/tests/test_import.py/TestModuleImport.test_module_import
5,763
def test_wildcard_import(self): """ Imports all from all modules """ try: self._assert_types(__version__, include, optional) except __HOLE__ as import_error: self.fail(msg=import_error)
ImportError
dataset/ETHPy150Open sobolevn/django-split-settings/example/tests/test_import.py/TestModuleImport.test_wildcard_import
5,764
def run_mainloop_with(self, target): """Start the OS's main loop to process asyncronous BLE events and then run the specified target function in a background thread. Target function should be a function that takes no parameters and optionally return an integer response code. When the target function stops executing or returns with value then the main loop will be stopped and the program will exit with the returned code. Note that an OS main loop is required to process asyncronous BLE events and this function is provided as a convenience for writing simple tools and scripts that don't need to be full-blown GUI applications. If you are writing a GUI application that has a main loop (a GTK glib main loop on Linux, or a Cocoa main loop on OSX) then you don't need to call this function. """ # Create background thread to run user code. self._user_thread = threading.Thread(target=self._user_thread_main, args=(target,)) self._user_thread.daemon = True self._user_thread.start() # Run main loop. This call will never return! try: AppHelper.runConsoleEventLoop(installInterrupt=True) except __HOLE__: AppHelper.stopEventLoop() sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open adafruit/Adafruit_Python_BluefruitLE/Adafruit_BluefruitLE/corebluetooth/provider.py/CoreBluetoothProvider.run_mainloop_with
5,765
def next_source_row(self, handle): """ Given a file handle, return the next row of data as a key value dict. Return None to denote the EOF Return False to skip this row of data entirely """ if not getattr(self, "detected_dialect", None): # Sniff for the dialect of the CSV file pos = handle.tell() handle.seek(0) readahead = handle.read(1024) handle.seek(pos) try: dialect = csv.Sniffer().sniff(readahead, ",") except csv.Error: # Fallback to excel format dialect = csv.excel dialect_attrs = [ "delimiter", "doublequote", "escapechar", "lineterminator", "quotechar", "quoting", "skipinitialspace" ] self.detected_dialect = {x: getattr(dialect, x) for x in dialect_attrs} if not getattr(self, "reader", None): self.reader = csv.reader(handle, **self.detected_dialect) if not getattr(self, "detected_columns", None): # On first iteration, the line will be the column headings, # store those and return False to skip processing columns = self.reader.next() self.detected_columns = columns return False cols = self.detected_columns try: values = self.reader.next() except __HOLE__: return None if not values: return None return dict(zip(cols, values))
StopIteration
dataset/ETHPy150Open potatolondon/osmosis/osmosis/models.py/AbstractImportTask.next_source_row
5,766
def process(self): meta = self.task.get_meta() task_model = get_model(*self.task.model_path.split(".")) this = ImportShard.objects.get(pk=self.pk) # Reload, self is pickled source_data = json.loads(this.source_data_json) # If there are no rows to process mark_shard_complete = this.last_row_processed == this.total_rows - 1 or this.total_rows == 0 for i in xrange(this.last_row_processed, this.total_rows): # Always continue from the last processed row data = source_data[i] forms = [self.task.instantiate_form(form, data) for form in meta.forms] if all([form.is_valid() for form in forms]): # All forms are valid, let's process this shizzle cleaned_data = {} for form in forms: cleaned_data.update(form.cleaned_data) try: self.task.import_row(forms, cleaned_data) except __HOLE__, e: # We allow subclasses to raise a validation error on import_row errors = [] if hasattr(e, 'message_dict'): for name, errs in e.message_dict.items(): for err in errs: errors.append("{0}: {1}".format(name, err)) else: # Pre 1.6, ValidationError does not necessarily have a message_dict for err in e.messages: errors.append(err) self.handle_error(this.start_line_number + i, cleaned_data, errors) else: # We've encountered an error, call the error handler errors = [] for form in forms: for name, errs in form.errors.items(): for err in errs: errors.append("{0}: {1}".format(name, err)) self.handle_error(this.start_line_number + i, data, errors) # Now update the last processed row, transactionally @transactional def update_shard(_this): _this = ImportShard.objects.get(pk=_this.pk) _this.last_row_processed += 1 _this.save() return _this this = update_shard(this) # If this was the last iteration then mark as complete mark_shard_complete = i == this.total_rows - 1 if mark_shard_complete: @transactional def update_task(_this): if _this.complete: return task = task_model.objects.get(pk=_this.task_id) task.shards_processed += 1 task.save() _this.complete = True _this.save() update_task(this) deferred.defer(this._finalize_errors, _queue=self.task.get_meta().queue)
ValidationError
dataset/ETHPy150Open potatolondon/osmosis/osmosis/models.py/ImportShard.process
5,767
def import_idmap(idmapdb, samba3, logger): """Import idmap data. :param idmapdb: Samba4 IDMAP database :param samba3_idmap: Samba3 IDMAP database to import from :param logger: Logger object """ try: samba3_idmap = samba3.get_idmap_db() except __HOLE__, e: logger.warn('Cannot open idmap database, Ignoring: %s', str(e)) return currentxid = max(samba3_idmap.get_user_hwm(), samba3_idmap.get_group_hwm()) lowerbound = currentxid # FIXME: upperbound m = ldb.Message() m.dn = ldb.Dn(idmapdb, 'CN=CONFIG') m['lowerbound'] = ldb.MessageElement( str(lowerbound), ldb.FLAG_MOD_REPLACE, 'lowerBound') m['xidNumber'] = ldb.MessageElement( str(currentxid), ldb.FLAG_MOD_REPLACE, 'xidNumber') idmapdb.modify(m) for id_type, xid in samba3_idmap.ids(): if id_type == 'UID': xid_type = 'ID_TYPE_UID' elif id_type == 'GID': xid_type = 'ID_TYPE_GID' else: logger.warn('Wrong type of entry in idmap (%s), Ignoring', id_type) continue sid = samba3_idmap.get_sid(xid, id_type) add_idmap_entry(idmapdb, dom_sid(sid), xid, xid_type, logger)
IOError
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/upgrade.py/import_idmap
5,768
def upgrade_from_samba3(samba3, logger, targetdir, session_info=None, useeadb=False, dns_backend=None, use_ntvfs=False): """Upgrade from samba3 database to samba4 AD database :param samba3: samba3 object :param logger: Logger object :param targetdir: samba4 database directory :param session_info: Session information """ serverrole = samba3.lp.server_role() domainname = samba3.lp.get("workgroup") realm = samba3.lp.get("realm") netbiosname = samba3.lp.get("netbios name") if samba3.lp.get("ldapsam:trusted") is None: samba3.lp.set("ldapsam:trusted", "yes") # secrets db try: secrets_db = samba3.get_secrets_db() except IOError, e: raise ProvisioningError("Could not open '%s', the Samba3 secrets database: %s. Perhaps you specified the incorrect smb.conf, --testparm or --dbdir option?" % (samba3.privatedir_path("secrets.tdb"), str(e))) if not domainname: domainname = secrets_db.domains()[0] logger.warning("No workgroup specified in smb.conf file, assuming '%s'", domainname) if not realm: if serverrole == "ROLE_DOMAIN_BDC" or serverrole == "ROLE_DOMAIN_PDC": raise ProvisioningError("No realm specified in smb.conf file and being a DC. That upgrade path doesn't work! Please add a 'realm' directive to your old smb.conf to let us know which one you want to use (it is the DNS name of the AD domain you wish to create.") else: realm = domainname.upper() logger.warning("No realm specified in smb.conf file, assuming '%s'", realm) # Find machine account and password next_rid = 1000 try: machinepass = secrets_db.get_machine_password(netbiosname) except KeyError: machinepass = None if samba3.lp.get("passdb backend").split(":")[0].strip() == "ldapsam": base_dn = samba3.lp.get("ldap suffix") ldapuser = samba3.lp.get("ldap admin dn") ldappass = (secrets_db.get_ldap_bind_pw(ldapuser)).strip('\x00') ldap = True else: ldapuser = None ldappass = None ldap = False # We must close the direct pytdb database before the C code loads it secrets_db.close() # Connect to old password backend passdb.set_secrets_dir(samba3.lp.get("private dir")) s3db = samba3.get_sam_db() # Get domain sid try: domainsid = passdb.get_global_sam_sid() except passdb.error: raise Exception("Can't find domain sid for '%s', Exiting." % domainname) # Get machine account, sid, rid try: machineacct = s3db.getsampwnam('%s$' % netbiosname) except passdb.error: machinerid = None machinesid = None else: machinesid, machinerid = machineacct.user_sid.split() # Export account policy logger.info("Exporting account policy") policy = s3db.get_account_policy() # Export groups from old passdb backend logger.info("Exporting groups") grouplist = s3db.enum_group_mapping() groupmembers = {} for group in grouplist: sid, rid = group.sid.split() if sid == domainsid: if rid >= next_rid: next_rid = rid + 1 # Get members for each group/alias if group.sid_name_use == lsa.SID_NAME_ALIAS: try: members = s3db.enum_aliasmem(group.sid) groupmembers[str(group.sid)] = members except passdb.error, e: logger.warn("Ignoring group '%s' %s listed but then not found: %s", group.nt_name, group.sid, e) continue elif group.sid_name_use == lsa.SID_NAME_DOM_GRP: try: members = s3db.enum_group_members(group.sid) groupmembers[str(group.sid)] = members except passdb.error, e: logger.warn("Ignoring group '%s' %s listed but then not found: %s", group.nt_name, group.sid, e) continue elif group.sid_name_use == lsa.SID_NAME_WKN_GRP: (group_dom_sid, rid) = group.sid.split() if (group_dom_sid != security.dom_sid(security.SID_BUILTIN)): logger.warn("Ignoring 'well known' group '%s' (should already be in AD, and have no members)", group.nt_name) continue # A number of buggy databases mix up well known groups and aliases. try: members = s3db.enum_aliasmem(group.sid) groupmembers[str(group.sid)] = members except passdb.error, e: logger.warn("Ignoring group '%s' %s listed but then not found: %s", group.nt_name, group.sid, e) continue else: logger.warn("Ignoring group '%s' %s with sid_name_use=%d", group.nt_name, group.sid, group.sid_name_use) continue # Export users from old passdb backend logger.info("Exporting users") userlist = s3db.search_users(0) userdata = {} uids = {} admin_user = None for entry in userlist: if machinerid and machinerid == entry['rid']: continue username = entry['account_name'] if entry['rid'] < 1000: logger.info(" Skipping wellknown rid=%d (for username=%s)", entry['rid'], username) continue if entry['rid'] >= next_rid: next_rid = entry['rid'] + 1 user = s3db.getsampwnam(username) acct_type = (user.acct_ctrl & (samr.ACB_NORMAL|samr.ACB_WSTRUST|samr.ACB_SVRTRUST|samr.ACB_DOMTRUST)) if (acct_type == samr.ACB_NORMAL or acct_type == samr.ACB_WSTRUST): pass elif acct_type == samr.ACB_SVRTRUST: logger.warn(" Demoting BDC account trust for %s, this DC must be elevated to an AD DC using 'samba-tool domain promote'" % username[:-1]) user.acct_ctrl = (user.acct_ctrl & ~samr.ACB_SVRTRUST) | samr.ACB_WSTRUST elif acct_type == samr.ACB_DOMTRUST: logger.warn(" Skipping inter-domain trust from domain %s, this trust must be re-created as an AD trust" % username[:-1]) elif acct_type == (samr.ACB_NORMAL|samr.ACB_WSTRUST) and username[-1] == '$': logger.warn(" Fixing account %s which had both ACB_NORMAL (U) and ACB_WSTRUST (W) set. Account will be marked as ACB_WSTRUST (W), i.e. as a domain member" % username) user.acct_ctrl = (user.acct_ctrl & ~samr.ACB_NORMAL) elif acct_type == (samr.ACB_NORMAL|samr.ACB_SVRTRUST) and username[-1] == '$': logger.warn(" Fixing account %s which had both ACB_NORMAL (U) and ACB_SVRTRUST (S) set. Account will be marked as ACB_WSTRUST (S), i.e. as a domain member" % username) user.acct_ctrl = (user.acct_ctrl & ~samr.ACB_NORMAL) else: raise ProvisioningError("""Failed to upgrade due to invalid account %s, account control flags 0x%08X must have exactly one of ACB_NORMAL (N, 0x%08X), ACB_WSTRUST (W 0x%08X), ACB_SVRTRUST (S 0x%08X) or ACB_DOMTRUST (D 0x%08X). Please fix this account before attempting to upgrade again """ % (user.acct_flags, username, samr.ACB_NORMAL, samr.ACB_WSTRUST, samr.ACB_SVRTRUST, samr.ACB_DOMTRUST)) userdata[username] = user try: uids[username] = s3db.sid_to_id(user.user_sid)[0] except passdb.error: try: uids[username] = pwd.getpwnam(username).pw_uid except __HOLE__: pass if not admin_user and username.lower() == 'root': admin_user = username if username.lower() == 'administrator': admin_user = username try: group_memberships = s3db.enum_group_memberships(user); for group in group_memberships: if str(group) in groupmembers: if user.user_sid not in groupmembers[str(group)]: groupmembers[str(group)].append(user.user_sid) else: groupmembers[str(group)] = [user.user_sid]; except passdb.error, e: logger.warn("Ignoring group memberships of '%s' %s: %s", username, user.user_sid, e) logger.info("Next rid = %d", next_rid) # Check for same username/groupname group_names = set([g.nt_name for g in grouplist]) user_names = set([u['account_name'] for u in userlist]) common_names = group_names.intersection(user_names) if common_names: logger.error("Following names are both user names and group names:") for name in common_names: logger.error(" %s" % name) raise ProvisioningError("Please remove common user/group names before upgrade.") # Check for same user sid/group sid group_sids = set([str(g.sid) for g in grouplist]) if len(grouplist) != len(group_sids): raise ProvisioningError("Please remove duplicate group sid entries before upgrade.") user_sids = set(["%s-%u" % (domainsid, u['rid']) for u in userlist]) if len(userlist) != len(user_sids): raise ProvisioningError("Please remove duplicate user sid entries before upgrade.") common_sids = group_sids.intersection(user_sids) if common_sids: logger.error("Following sids are both user and group sids:") for sid in common_sids: logger.error(" %s" % str(sid)) raise ProvisioningError("Please remove duplicate sid entries before upgrade.") # Get posix attributes from ldap or the os homes = {} shells = {} pgids = {} if ldap: creds = Credentials() creds.guess(samba3.lp) creds.set_bind_dn(ldapuser) creds.set_password(ldappass) urls = samba3.lp.get("passdb backend").split(":",1)[1].strip('"') for url in urls.split(): try: ldb_object = Ldb(url, credentials=creds) except ldb.LdbError, e: logger.warning("Could not open ldb connection to %s, the error message is: %s", url, e) else: break logger.info("Exporting posix attributes") userlist = s3db.search_users(0) for entry in userlist: username = entry['account_name'] if username in uids.keys(): try: if ldap: homes[username] = get_posix_attr_from_ldap_backend(logger, ldb_object, base_dn, username, "homeDirectory") else: homes[username] = pwd.getpwnam(username).pw_dir except KeyError: pass try: if ldap: shells[username] = get_posix_attr_from_ldap_backend(logger, ldb_object, base_dn, username, "loginShell") else: shells[username] = pwd.getpwnam(username).pw_shell except KeyError: pass try: if ldap: pgids[username] = get_posix_attr_from_ldap_backend(logger, ldb_object, base_dn, username, "gidNumber") else: pgids[username] = pwd.getpwnam(username).pw_gid except KeyError: pass logger.info("Reading WINS database") samba3_winsdb = None try: samba3_winsdb = samba3.get_wins_db() except IOError, e: logger.warn('Cannot open wins database, Ignoring: %s', str(e)) if not (serverrole == "ROLE_DOMAIN_BDC" or serverrole == "ROLE_DOMAIN_PDC"): dns_backend = "NONE" # Do full provision result = provision(logger, session_info, None, targetdir=targetdir, realm=realm, domain=domainname, domainsid=str(domainsid), next_rid=next_rid, dc_rid=machinerid, dom_for_fun_level=dsdb.DS_DOMAIN_FUNCTION_2003, hostname=netbiosname.lower(), machinepass=machinepass, serverrole=serverrole, samdb_fill=FILL_FULL, useeadb=useeadb, dns_backend=dns_backend, use_rfc2307=True, use_ntvfs=use_ntvfs, skip_sysvolacl=True) result.report_logger(logger) # Import WINS database logger.info("Importing WINS database") if samba3_winsdb: import_wins(Ldb(result.paths.winsdb), samba3_winsdb) # Set Account policy logger.info("Importing Account policy") import_sam_policy(result.samdb, policy, logger) # Migrate IDMAP database logger.info("Importing idmap database") import_idmap(result.idmap, samba3, logger) # Set the s3 context for samba4 configuration new_lp_ctx = s3param.get_context() new_lp_ctx.load(result.lp.configfile) new_lp_ctx.set("private dir", result.lp.get("private dir")) new_lp_ctx.set("state directory", result.lp.get("state directory")) new_lp_ctx.set("lock directory", result.lp.get("lock directory")) # Connect to samba4 backend s4_passdb = passdb.PDB(new_lp_ctx.get("passdb backend")) # Export groups to samba4 backend logger.info("Importing groups") for g in grouplist: # Ignore uninitialized groups (gid = -1) if g.gid != -1: add_group_from_mapping_entry(result.samdb, g, logger) add_ad_posix_idmap_entry(result.samdb, g.sid, g.gid, "ID_TYPE_GID", logger) add_posix_attrs(samdb=result.samdb, sid=g.sid, name=g.nt_name, nisdomain=domainname.lower(), xid_type="ID_TYPE_GID", logger=logger) # Export users to samba4 backend logger.info("Importing users") for username in userdata: if username.lower() == 'administrator': if userdata[username].user_sid != dom_sid(str(domainsid) + "-500"): logger.error("User 'Administrator' in your existing directory has SID %s, expected it to be %s" % (userdata[username].user_sid, dom_sid(str(domainsid) + "-500"))) raise ProvisioningError("User 'Administrator' in your existing directory does not have SID ending in -500") if username.lower() == 'root': if userdata[username].user_sid == dom_sid(str(domainsid) + "-500"): logger.warn('User root has been replaced by Administrator') else: logger.warn('User root has been kept in the directory, it should be removed in favour of the Administrator user') s4_passdb.add_sam_account(userdata[username]) if username in uids: add_ad_posix_idmap_entry(result.samdb, userdata[username].user_sid, uids[username], "ID_TYPE_UID", logger) if (username in homes) and (homes[username] is not None) and \ (username in shells) and (shells[username] is not None) and \ (username in pgids) and (pgids[username] is not None): add_posix_attrs(samdb=result.samdb, sid=userdata[username].user_sid, name=username, nisdomain=domainname.lower(), xid_type="ID_TYPE_UID", home=homes[username], shell=shells[username], pgid=pgids[username], logger=logger) logger.info("Adding users to groups") for g in grouplist: if str(g.sid) in groupmembers: add_users_to_group(result.samdb, g, groupmembers[str(g.sid)], logger) # Set password for administrator if admin_user: logger.info("Setting password for administrator") admin_userdata = s4_passdb.getsampwnam("administrator") admin_userdata.nt_passwd = userdata[admin_user].nt_passwd if userdata[admin_user].lanman_passwd: admin_userdata.lanman_passwd = userdata[admin_user].lanman_passwd admin_userdata.pass_last_set_time = userdata[admin_user].pass_last_set_time if userdata[admin_user].pw_history: admin_userdata.pw_history = userdata[admin_user].pw_history s4_passdb.update_sam_account(admin_userdata) logger.info("Administrator password has been set to password of user '%s'", admin_user) if result.server_role == "active directory domain controller": setsysvolacl(result.samdb, result.paths.netlogon, result.paths.sysvol, result.paths.root_uid, result.paths.root_gid, security.dom_sid(result.domainsid), result.names.dnsdomain, result.names.domaindn, result.lp, use_ntvfs) # FIXME: import_registry(registry.Registry(), samba3.get_registry()) # FIXME: shares
KeyError
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/upgrade.py/upgrade_from_samba3
5,769
def main(): try: ascii_art = """\ ____ _ _ _ | _ \ | | | | | | | |_) | __ _ ___| |__ | |__ _ _| |__ ___ ___ _ __ ___ | _ < / _` / __| '_ \| '_ \| | | | '_ \ / __/ _ \| '_ ` _ \\ | |_) | (_| \__ \ | | | | | | |_| | |_) | (_| (_) | | | | | | |____/ \__,_|___/_| |_|_| |_|\__,_|_.__(_)___\___/|_| |_| |_| """ print(ascii_art) print("Welcome to bashhub setup!") is_new_user = query_yes_no("Are you a new user?") # Initialize variaous Credentials for logging in. username = None password = None access_token = None # If this is a new user walk them through the registration flow if is_new_user: register_user = get_new_user_information() register_result = rest_client.register_user(register_user) if register_result: print("Registered new user {0}\n".format( register_user.username)) # Set our credentials to login later username = register_user.username password = register_user.password else: print("Sorry, registering a new user failed.") print("You can rerun setup using 'bashhub setup' in a new " "terminal window.\n") sys.exit(0) (username, password, access_token) = get_user_information_and_login( username, password) if access_token == None: print("\nSorry looks like logging in failed.") print("If you forgot your password please reset it. " "https://bashhub.com/password-reset") print("You can rerun setup using 'bashhub setup' in a new " "terminal window.\n") sys.exit(0) # write out our user scoped access token config_write_result = write_to_config_file("access_token", access_token) if not config_write_result: print("Writing your config file failed.") sys.exit(1) (access_token, system_name) = handle_system_information(username, password) if access_token == None: print("Sorry looks like getting your info failed.\ Exiting...") sys.exit(0) # write out our system scoped token and the system name write_to_config_file("access_token", access_token) write_to_config_file("system_name", system_name) update_system_info() sys.exit(0) except Exception, err: sys.stderr.write('Setup Error:\n%s\n' % str(err)) traceback.print_exc() sys.exit(1) except __HOLE__: # To allow Ctrl+C (^C). Print a new line to drop the prompt. print sys.exit()
KeyboardInterrupt
dataset/ETHPy150Open rcaloras/bashhub-client/bashhub/bashhub_setup.py/main
5,770
def test_nonexistent(self): 'Test trying to use a genome with no Ensembl data at UCSC' badname = 'Nonexistent.Fake.Bogus' try: badiface = UCSCEnsemblInterface(badname) except __HOLE__: return raise ValueError("Bad sequence name %s has failed to return an error" % badname)
KeyError
dataset/ETHPy150Open cjlee112/pygr/tests/apps_ucscensembl_test.py/UCSCEnsembl_Test.test_nonexistent
5,771
def main(): """ Script main, parses arguments and invokes Dummy.setup indirectly. """ parser = ArgumentParser(description='Utility to read setup.py values from cmake macros. Creates a file with CMake set commands setting variables.') parser.add_argument('package_name', help='Name of catkin package') parser.add_argument('setupfile_path', help='Full path to setup.py') parser.add_argument('outfile', help='Where to write result to') args = parser.parse_args() # print("%s" % sys.argv) # PACKAGE_NAME = sys.argv[1] # OUTFILE = sys.argv[3] # print("Interrogating setup.py for package %s into %s " % (PACKAGE_NAME, OUTFILE), # file=sys.stderr) # print("executing %s" % args.setupfile_path) # be sure you're in the directory containing # setup.py so the sys.path manipulation works, # so the import of __version__ works os.chdir(os.path.dirname(os.path.abspath(args.setupfile_path))) # patch setup() function of distutils and setuptools for the # context of evaluating setup.py try: fake_setup = _create_mock_setup_function(package_name=args.package_name, outfile=args.outfile) distutils_backup = distutils.core.setup distutils.core.setup = fake_setup try: setuptools_backup = setuptools.setup setuptools.setup = fake_setup except __HOLE__: pass with open(args.setupfile_path, 'r') as fh: local_vars = { '__doc__': None, '__file__': os.path.abspath(args.setupfile_path), '__name__': '__main__', '__package__': None, } exec(fh.read(), {}, local_vars) finally: distutils.core.setup = distutils_backup try: setuptools.setup = setuptools_backup except NameError: pass
NameError
dataset/ETHPy150Open ros/catkin/cmake/interrogate_setup_dot_py.py/main
5,772
def _mathfun_real(f_real, f_complex): def f(x, **kwargs): if type(x) is float: return f_real(x) if type(x) is complex: return f_complex(x) try: x = float(x) return f_real(x) except (TypeError, __HOLE__): x = complex(x) return f_complex(x) f.__name__ = f_real.__name__ return f
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/_mathfun_real
5,773
def _mathfun(f_real, f_complex): def f(x, **kwargs): if type(x) is complex: return f_complex(x) try: return f_real(float(x)) except (TypeError, __HOLE__): return f_complex(complex(x)) f.__name__ = f_real.__name__ return f
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/_mathfun
5,774
def _mathfun_n(f_real, f_complex): def f(*args, **kwargs): try: return f_real(*(float(x) for x in args)) except (TypeError, __HOLE__): return f_complex(*(complex(x) for x in args)) f.__name__ = f_real.__name__ return f # Workaround for non-raising log and sqrt in Python 2.5 and 2.4 # on Unix system
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/_mathfun_n
5,775
def nthroot(x, n): r = 1./n try: return float(x) ** r except (ValueError, __HOLE__): return complex(x) ** r
TypeError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/nthroot
5,776
def loggamma(x): if type(x) not in (float, complex): try: x = float(x) except (ValueError, TypeError): x = complex(x) try: xreal = x.real ximag = x.imag except AttributeError: # py2.5 xreal = x ximag = 0.0 # Reflection formula # http://functions.wolfram.com/GammaBetaErf/LogGamma/16/01/01/0003/ if xreal < 0.0: if abs(x) < 0.5: v = log(gamma(x)) if ximag == 0: v = v.conjugate() return v z = 1-x try: re = z.real im = z.imag except __HOLE__: # py2.5 re = z im = 0.0 refloor = floor(re) if im == 0.0: imsign = 0 elif im < 0.0: imsign = -1 else: imsign = 1 return (-pi*1j)*abs(refloor)*(1-abs(imsign)) + logpi - \ log(sinpi(z-refloor)) - loggamma(z) + 1j*pi*refloor*imsign if x == 1.0 or x == 2.0: return x*0 p = 0. while abs(x) < 11: p -= log(x) x += 1.0 s = 0.918938533204672742 + (x-0.5)*log(x) - x r = 1./x r2 = r*r s += 0.083333333333333333333*r; r *= r2 s += -0.0027777777777777777778*r; r *= r2 s += 0.00079365079365079365079*r; r *= r2 s += -0.0005952380952380952381*r; r *= r2 s += 0.00084175084175084175084*r; r *= r2 s += -0.0019175269175269175269*r; r *= r2 s += 0.0064102564102564102564*r; r *= r2 s += -0.02955065359477124183*r return s + p
AttributeError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/loggamma
5,777
def ei(z, _e1=False): typez = type(z) if typez not in (float, complex): try: z = float(z) typez = float except (TypeError, __HOLE__): z = complex(z) typez = complex if not z: return -INF absz = abs(z) if absz > EI_ASYMP_CONVERGENCE_RADIUS: return ei_asymp(z, _e1) elif absz <= 2.0 or (typez is float and z > 0.0): return ei_taylor(z, _e1) # Integrate, starting from whichever is smaller of a Taylor # series value or an asymptotic series value if typez is complex and z.real > 0.0: zref = z / absz ref = ei_taylor(zref, _e1) else: zref = EI_ASYMP_CONVERGENCE_RADIUS * z / absz ref = ei_asymp(zref, _e1) C = (zref-z)*0.5 D = (zref+z)*0.5 s = 0.0 if type(z) is complex: _exp = cmath.exp else: _exp = math.exp for x,w in gauss42: t = C*x+D s += w*_exp(t)/t ref -= C*s return ref
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/ei
5,778
def e1(z): # hack to get consistent signs if the imaginary part if 0 # and signed typez = type(z) if type(z) not in (float, complex): try: z = float(z) typez = float except (TypeError, __HOLE__): z = complex(z) typez = complex if typez is complex and not z.imag: z = complex(z.real, 0.0) # end hack return -ei(-z, _e1=True)
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/e1
5,779
def zeta(s): """ Riemann zeta function, real argument """ if not isinstance(s, (float, int)): try: s = float(s) except (__HOLE__, TypeError): try: s = complex(s) if not s.imag: return complex(zeta(s.real)) except (ValueError, TypeError): pass raise NotImplementedError if s == 1: raise ValueError("zeta(1) pole") if s >= 27: return 1.0 + 2.0**(-s) + 3.0**(-s) n = int(s) if n == s: if n >= 0: return _zeta_int[n] if not (n % 2): return 0.0 if s <= 0.0: return 2.**s*pi**(s-1)*_sinpi_real(0.5*s)*_gamma_real(1-s)*zeta(1-s) if s <= 2.0: if s <= 1.0: return _polyval(_zeta_0,s)/(s-1) return _polyval(_zeta_1,s)/(s-1) z = _polyval(_zeta_P,s) / _polyval(_zeta_Q,s) return 1.0 + 2.0**(-s) + 3.0**(-s) + 4.0**(-s)*z
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/math2.py/zeta
5,780
def _get_addons(request, addons, addon_id, action): """Create a list of ``MenuItem``s for the activity feed.""" items = [] a = MenuItem() a.selected = (not addon_id) (a.text, a.url) = (_('All My Add-ons'), reverse('devhub.feed_all')) if action: a.url += '?action=' + action items.append(a) for addon in addons: item = MenuItem() try: item.selected = (addon_id and addon.id == int(addon_id)) except __HOLE__: pass # We won't get here... EVER url = reverse('devhub.feed', args=[addon.slug]) if action: url += '?action=' + action item.text, item.url = addon.name, url items.append(item) return items
ValueError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/devhub/views.py/_get_addons
5,781
@dev_required @submit_step(7) def submit_done(request, addon_id, addon, step): # Bounce to the versions page if they don't have any versions. if not addon.versions.exists(): return redirect(addon.get_dev_url('versions')) sp = addon.current_version.supported_platforms is_platform_specific = sp != [amo.PLATFORM_ALL] try: author = addon.authors.all()[0] except __HOLE__: # This should never happen. author = None if author: submitted_addons = (author.addons .exclude(status=amo.STATUS_NULL).count()) if submitted_addons == 1: # We can use locale-prefixed URLs because the submitter probably # speaks the same language by the time he/she reads the email. context = { 'app': unicode(request.APP.pretty), 'detail_url': absolutify(addon.get_url_path()), 'version_url': absolutify(addon.get_dev_url('versions')), 'edit_url': absolutify(addon.get_dev_url('edit')), 'full_review': addon.status == amo.STATUS_NOMINATED } tasks.send_welcome_email.delay(addon.id, [author.email], context) return render(request, 'devhub/addons/submit/done.html', {'addon': addon, 'step': step, 'is_platform_specific': is_platform_specific})
IndexError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/devhub/views.py/submit_done
5,782
def update_badges(overwrite=False): from django.utils.importlib import import_module for app in settings.INSTALLED_APPS: mod = import_module(app) try: badges_mod = import_module('%s.badges' % app) fixture_label = '%s_badges' % app.replace('.','_') call_command('loaddata', fixture_label, verbosity=1) if hasattr(badges_mod, 'badges'): badger.utils.update_badges(badges_mod.badges, overwrite) if hasattr(badges_mod, 'update_badges'): badges_mod.update_badges(overwrite) except __HOLE__: if module_has_submodule(mod, 'badges'): raise
ImportError
dataset/ETHPy150Open mozilla/django-badger/badger/management/__init__.py/update_badges
5,783
def parseStreamPacket(self): try: packet = mirror.Packet.Packet(self.rfile) except __HOLE__: return self.closeConnection() # TODO: Really? except EOFError: return self.closeConnection() except socket.timeout, e: self.log_error("Request timed out: %r", e) return self.closeConnection() self.handlePacket(packet)
IOError
dataset/ETHPy150Open tzwenn/PyOpenAirMirror/mirror/service.py/MirrorService.parseStreamPacket
5,784
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" data = self._user_data(access_token) if not data.get('email'): try: emails = self._user_data(access_token, '/emails') except (HTTPError, __HOLE__, TypeError): emails = [] if emails: email = emails[0] primary_emails = [ e for e in emails if not isinstance(e, dict) or e.get('primary') ] if primary_emails: email = primary_emails[0] if isinstance(email, dict): email = email.get('email', '') data['email'] = email return data
ValueError
dataset/ETHPy150Open omab/python-social-auth/social/backends/github.py/GithubOAuth2.user_data
5,785
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" user_data = super(GithubMemberOAuth2, self).user_data( access_token, *args, **kwargs ) try: self.request(self.member_url(user_data), params={ 'access_token': access_token }) except __HOLE__ as err: # if the user is a member of the organization, response code # will be 204, see http://bit.ly/ZS6vFl if err.response.status_code != 204: raise AuthFailed(self, 'User doesn\'t belong to the organization') return user_data
HTTPError
dataset/ETHPy150Open omab/python-social-auth/social/backends/github.py/GithubMemberOAuth2.user_data
5,786
def definition_to_api_objects(definition): if 'objects' not in definition: raise PipelineDefinitionError('Missing "objects" key', definition) api_elements = [] # To convert to the structure expected by the service, # we convert the existing structure to a list of dictionaries. # Each dictionary has a 'fields', 'id', and 'name' key. for element in definition['objects']: try: element_id = element.pop('id') except __HOLE__: raise PipelineDefinitionError('Missing "id" key of element: %s' % json.dumps(element), definition) api_object = {'id': element_id} # If a name is provided, then we use that for the name, # otherwise the id is used for the name. name = element.pop('name', element_id) api_object['name'] = name # Now we need the field list. Each element in the field list is a dict # with a 'key', 'stringValue'|'refValue' fields = [] for key, value in sorted(element.items()): fields.extend(_parse_each_field(key, value)) api_object['fields'] = fields api_elements.append(api_object) return api_elements
KeyError
dataset/ETHPy150Open aws/aws-cli/awscli/customizations/datapipeline/translator.py/definition_to_api_objects
5,787
def definition_to_api_parameters(definition): if 'parameters' not in definition: return None parameter_objects = [] for element in definition['parameters']: try: parameter_id = element.pop('id') except __HOLE__: raise PipelineDefinitionError('Missing "id" key of parameter: %s' % json.dumps(element), definition) parameter_object = {'id': parameter_id} # Now we need the attribute list. Each element in the attribute list # is a dict with a 'key', 'stringValue' attributes = [] for key, value in sorted(element.items()): attributes.extend(_parse_each_field(key, value)) parameter_object['attributes'] = attributes parameter_objects.append(parameter_object) return parameter_objects
KeyError
dataset/ETHPy150Open aws/aws-cli/awscli/customizations/datapipeline/translator.py/definition_to_api_parameters
5,788
def warmup(request): """ Provides default procedure for handling warmup requests on App Engine. Just add this view to your main urls.py. """ for app in settings.INSTALLED_APPS: for name in ('urls', 'views', 'models'): try: import_module('%s.%s' % (app, name)) except __HOLE__: pass content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET return HttpResponse('Warmup done', content_type=content_type)
ImportError
dataset/ETHPy150Open adieu/djangoappengine/views.py/warmup
5,789
def clean_doi(input_doi): input_doi = remove_nonprinting_characters(input_doi) try: input_doi = input_doi.lower() if input_doi.startswith("http"): match = re.match("^https*://(dx\.)*doi.org/(10\..+)", input_doi) doi = match.group(2) elif "doi.org" in input_doi: match = re.match("^(dx\.)*doi.org/(10\..+)", input_doi) doi = match.group(2) elif input_doi.startswith("doi:"): match = re.match("^doi:(10\..+)", input_doi) doi = match.group(1) elif input_doi.startswith("10."): doi = input_doi elif "10." in input_doi: match = re.match(".*(10\.\d+.+)", input_doi, re.DOTALL) doi = match.group(1) else: doi = None try: logger.debug(u"MALFORMED DOI {input_doi}".format( input_doi=input_doi)) except: logger.debug(u"MALFORMED DOI, can't print doi") except __HOLE__: doi = None return doi
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/clean_doi
5,790
def _lookup_issn_from_doi(self, id, url, cache_enabled): # try to get a response from the data provider response = self.http_get(url, cache_enabled=cache_enabled, allow_redirects=True, headers={"Accept": "application/json", "User-Agent": "impactstory.org"}) if response.status_code != 200: self.logger.info(u"%s status_code=%i" % (self.provider_name, response.status_code)) if response.status_code == 404: #not found return {} elif response.status_code == 403: #forbidden return {} elif (response.status_code == 406) or (response.status_code == 500): #this call isn't supported for datacite dois return {} elif ((response.status_code >= 300) and (response.status_code < 400)): #redirect return {} else: self._get_error(response.status_code, response) # extract the aliases try: biblio_dict = self._extract_biblio_issn(response.text, id) except (AttributeError, __HOLE__): biblio_dict = {} return biblio_dict
TypeError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/Crossref._lookup_issn_from_doi
5,791
def _lookup_biblio_from_doi(self, id, url, cache_enabled): # try to get a response from the data provider response = self.http_get(url, cache_enabled=cache_enabled, allow_redirects=True, headers={"Accept": "application/vnd.citationstyles.csl+json", "User-Agent": "impactstory.org"}) if response.status_code != 200: self.logger.info(u"%s status_code=%i" % (self.provider_name, response.status_code)) if response.status_code == 404: #not found return {} elif response.status_code == 403: #forbidden return {} elif ((response.status_code >= 300) and (response.status_code < 400)): #redirect return {} else: self._get_error(response.status_code, response) # extract the aliases try: biblio_dict = self._extract_biblio(response.text, id) except (__HOLE__, TypeError): biblio_dict = {} return biblio_dict
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/Crossref._lookup_biblio_from_doi
5,792
def _extract_biblio(self, page, id=None): dict_of_keylists = { 'title' : ['title'], 'year' : ['issued'], 'repository' : ['publisher'], 'journal' : ['container-title'], 'authors_literal' : ['author'] } biblio_dict = provider._extract_from_json(page, dict_of_keylists) if not biblio_dict: return {} try: surname_list = [author["family"] for author in biblio_dict["authors_literal"]] if surname_list: biblio_dict["authors"] = u", ".join(surname_list) del biblio_dict["authors_literal"] except (IndexError, KeyError): try: literal_list = [author["literal"] for author in biblio_dict["authors_literal"]] if literal_list: biblio_dict["authors_literal"] = u"; ".join(literal_list) except (IndexError, KeyError): pass try: if "year" in biblio_dict: if "raw" in biblio_dict["year"]: biblio_dict["year"] = str(biblio_dict["year"]["raw"]) elif "date-parts" in biblio_dict["year"]: biblio_dict["year"] = str(biblio_dict["year"]["date-parts"][0][0]) biblio_dict["year"] = re.sub("\D", "", biblio_dict["year"]) if not biblio_dict["year"]: del biblio_dict["year"] except __HOLE__: logger.info(u"/biblio_print could not parse year {biblio_dict}".format( biblio_dict=biblio_dict)) del biblio_dict["year"] # replace many white spaces and \n with just one space try: biblio_dict["title"] = re.sub(u"\s+", u" ", biblio_dict["title"]) except KeyError: pass return biblio_dict #overriding default # if no doi, try to get doi from biblio # after that, if doi, get url and biblio
IndexError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/Crossref._extract_biblio
5,793
def _lookup_doi_from_biblio(self, biblio, cache_enabled): if not biblio: return [] try: if (biblio["journal"] == ""): # need to have journal or can't look up with current api call logger.info(u"%20s /biblio_print NO DOI because no journal in %s" % ( self.provider_name, biblio)) return [] query_string = (u"|%s|%s|%s|%s|%s|%s||%s|" % ( biblio.get("journal", ""), biblio.get("first_author", biblio.get("authors", "").split(",")[0].strip()), biblio.get("volume", ""), biblio.get("number", ""), biblio.get("first_page", ""), biblio.get("year", ""), "ImpactStory" )) except __HOLE__: logger.info(u"%20s /biblio_print NO DOI because missing needed attribute in %s" % ( self.provider_name, biblio)) return [] # for more info on crossref spec, see # http://ftp.crossref.org/02publishers/25query_spec.html url = "http://doi.crossref.org/servlet/query?pid=totalimpactdev@gmail.com&qdata=%s" % query_string try: logger.debug(u"%20s /biblio_print calling crossref at %s" % (self.provider_name, url)) # doi-lookup call to crossref can take a while, give it a long timeout response = self.http_get(url, timeout=30, cache_enabled=cache_enabled) except ProviderTimeout: raise ProviderTimeout("CrossRef timeout") if response.status_code != 200: raise ProviderServerError("CrossRef status code was not 200") if not biblio["journal"].lower() in response.text.lower(): raise ProviderServerError("CrossRef returned invalid text response") response_lines = response.text.split("\n") split_lines = [line.split("|") for line in response_lines if line] line_keys = [line[-2].strip() for line in split_lines] dois = [line[-1].strip() for line in split_lines] for key, doi in zip(line_keys, dois): if not doi: try: logger.debug(u"%20s /biblio_print NO DOI from %s, %s" %(self.provider_name, biblio, key)) except KeyError: logger.debug(u"%20s /biblio_print NO DOI from %s, %s" %(self.provider_name, "", key)) return doi # overriding default # gets url and biblio from doi
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/Crossref._lookup_doi_from_biblio
5,794
def _lookup_urls_from_doi(self, doi, provider_url_template=None, cache_enabled=True): self.logger.debug(u"%s getting aliases for %s" % (self.provider_name, doi)) if not provider_url_template: provider_url_template = self.aliases_url_template # make it this way because don't want escaping doi_url = provider_url_template % doi # add url for http://dx.doi.org/ without escaping new_aliases = [("url", doi_url)] # add biblio biblio_dict = self.biblio([("doi", doi)]) if biblio_dict: new_aliases += [("biblio", biblio_dict)] # try to get the redirect as well response = self.http_get(doi_url, cache_enabled=cache_enabled, allow_redirects=True) if response.status_code >= 400: self.logger.info(u"%s http_get status code=%i" % (self.provider_name, response.status_code)) #raise provider.ProviderServerError("doi resolve") else: try: # url the doi resolved to redirected_url = response.url # remove session stuff at the end url_to_store = redirected_url.split(";jsessionid")[0] new_aliases += [("url", url_to_store)] except (TypeError, __HOLE__): pass return new_aliases # overriding because don't need to look up
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/crossref.py/Crossref._lookup_urls_from_doi
5,795
def parse(self, parse_until=None): if parse_until is None: parse_until = [] nodelist = self.create_nodelist() while self.tokens: token = self.next_token() # We only need to parse var and block tokens. if token.token_type == TOKEN_VAR: if not token.contents: self.empty_variable(token) filter_expression = self.compile_filter(token.contents) var_node = self.create_variable_node(filter_expression) self.extend_nodelist(nodelist, var_node,token) elif token.token_type == TOKEN_BLOCK: if token.contents in parse_until: # put token back on token list so calling code knows why it terminated self.prepend_token(token) return nodelist try: command = token.contents.split()[0] except __HOLE__: self.empty_block_tag(token) if (self.allow and command not in self.allow) or (self.disallow and command in self.disallow): self.disallowed_tag(command) self.enter_command(command, token) try: compile_func = self.tags[command] except KeyError: self.invalid_block_tag(token, command, parse_until) try: compiled_result = compile_func(self, token) except TemplateSyntaxError, e: if not self.compile_function_error(token, e): raise self.extend_nodelist(nodelist, compiled_result, token) self.exit_command() if parse_until: self.unclosed_block_tag(parse_until) return nodelist
IndexError
dataset/ETHPy150Open ithinksw/philo/philo/validators.py/TemplateValidationParser.parse
5,796
def __call__(self, value): try: self.validate_template(value) except __HOLE__: raise except Exception, e: if hasattr(e, 'source') and isinstance(e, TemplateSyntaxError): origin, (start, end) = e.source template_source = origin.reload() upto = 0 for num, next in enumerate(linebreak_iter(template_source)): if start >= upto and end <= next: raise ValidationError(mark_safe("Template code invalid: \"%s\" (%s:%d).<br />%s" % (escape(template_source[start:end]), origin.loadname, num, e))) upto = next raise ValidationError("Template code invalid. Error was: %s: %s" % (e.__class__.__name__, e))
ValidationError
dataset/ETHPy150Open ithinksw/philo/philo/validators.py/TemplateValidator.__call__
5,797
def visualize(self, filename=None, tables_to_show=None): """Visualize databases and create an er-diagram Args: filename(str): filepath for saving the er-diagram tables_to_show(list): A list of tables to actually visualize. Tables not included in this list will not be visualized, but their foreign keys will be visualize if it refers to a table in this list """ # Import pygraphviz for plotting the graphs try: import pygraphviz except __HOLE__: logger.error('Install pygraphviz for visualizing databases') raise if filename is None: raise DatabaseInputError( 'Filename must be provided for visualization') logger.info('Creating a visualization of the database') graph = pygraphviz.AGraph(name='Database', label='Database') tables = [r for r in self.relations() if isinstance(r, Table)] if tables_to_show is None: tables_to_show = [table.full_name for table in tables] # Add nodes for table in tables: if table.full_name in tables_to_show: graph.add_node(table.full_name, shape='none', label=self._make_node_label(table)) # Add edges for table in tables: for cols, ref_table, ref_cols in table.foreign_key_references(): if table.full_name in tables_to_show or \ ref_table in tables_to_show: graph.add_edge( ref_table, table.full_name, tailport=ref_cols[0], headport=cols[0], dir='both', arrowhead='crow', arrowtail='dot', ) # Plotting the graph with dot layout graph.layout(prog='dot') graph.draw(filename)
ImportError
dataset/ETHPy150Open coursera/dataduct/dataduct/database/database.py/Database.visualize
5,798
def get_datetime_now(): """ Returns datetime object with current point in time. In Django 1.4+ it uses Django's django.utils.timezone.now() which returns an aware or naive datetime that represents the current point in time when ``USE_TZ`` in project's settings is True or False respectively. In older versions of Django it uses datetime.datetime.now(). """ try: from django.utils import timezone return timezone.now() # pragma: no cover except __HOLE__: # pragma: no cover return datetime.datetime.now() # Django 1.5 compatibility utilities, providing support for custom User models. # Since get_user_model() causes a circular import if called when app models are # being loaded, the user_model_label should be used when possible, with calls # to get_user_model deferred to execution time
ImportError
dataset/ETHPy150Open clione/django-kanban/src/core/userena/utils.py/get_datetime_now
5,799
def is_date(string): """ Check if input string is date-formatted. :param string: Input date :type string: ``str`` :rtype: ``bool`` """ try: dateutil.parser.parse(string) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open StackStorm/st2contrib/packs/trello/sensors/list_actions_sensor.py/is_date