text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""
This reader opens images taken using an Eiger detector at NSLS-II.
It expects a "master file" and a "data file" in the same directory.
It requires h5py and the hdf5-lz4 library from Dectris. The latter
has been packaged for conda and is available on the NSLS-II internal
conda channels and externally from:
conda install -c danielballan hdf5-lz4
"""
import re
import os
import numpy as np
import h5py
from pims import FramesSequence, Frame
class EigerImages(FramesSequence):
pattern = re.compile('(.*)master.*')
def __init__(self, master_filepath):
# The 'master' file points to data in other files.
# Construct a list of those filepaths and check that they exist.
self.master_filepath = master_filepath
m = self.pattern.match(os.path.basename(master_filepath))
if m is None:
raise ValueError("This reader expects filenames containing "
"the word 'master'. If the file was renamed, "
"revert to the original name given by the "
"detector.")
prefix = m.group(1)
with h5py.File(master_filepath) as f:
self.keys = [k for k in f['entry'].keys() if k.startswith('data')]
lengths = [f['entry'][key].shape[0] for key in self.keys]
for k in self.keys:
filename = prefix + k + '.h5'
filepath = os.path.join(os.path.dirname(master_filepath), filename)
if not os.path.isfile(filepath):
raise IOError("Cannot locate expected data file: {0}".format(
filepath))
# Table of Contents return a tuple:
# self._toc[5] -> [which file, which element in that file]
self._toc = np.concatenate(
[zip(i*np.ones(length, dtype=int),
np.arange(length, dtype=int))
for i, length in enumerate(lengths)])
def get_frame(self, i):
key_number, elem_number = self._toc[i]
key = self.keys[key_number]
with h5py.File(self.master_filepath) as f:
img = f['entry'][key][elem_number]
return Frame(img, frame_no=i)
def __len__(self):
return len(self._toc)
@property
def frame_shape(self):
return self[0].shape
@property
def pixel_type(self):
return self[0].dtype
| {
"content_hash": "df338b14b6b20aacdfe865fe92ddda3c",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 79,
"avg_line_length": 36.53846153846154,
"alnum_prop": 0.5970526315789474,
"repo_name": "brunoseivam/chxtools",
"id": "00975413b29cac52f1973a9dfd08f6299284ef32",
"size": "2375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chxtools/pims_readers/eiger.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "102907"
},
{
"name": "Shell",
"bytes": "38"
}
],
"symlink_target": ""
} |
import Axon
import Axon.Ipc as Ipc
import re
class Sandbox(Axon.Component.component):
"""\
Component Sandbox
Rather likea kind of graphline where components can be added and removed at runtime
by sending commands to the inbox.
("ADD", "importpath:factorycmd", ...)
("DEL", id)
("LINK", introspector-outbox-id, introspector-inbox-id)
("UNLINK", introspector-outbox-id, introspector-inbox-id)
Eventually need to add UNLINK and a way to replace components, eg. by specifying the id
"""
Inboxes = { "inbox" : "Commands to drive the sandbox",
"control" : "NOT USED",
}
Outboxes = { "outbox" : "NOT USED",
"signal" : "NOT USED",
}
def __init__(self):
super(Sandbox,self).__init__()
self.linkages = {}
def main(self):
yield 1
while 1:
yield 1
self.childrenDone() # clean up any children we've lost!
while self.dataReady("inbox"):
cmd = self.recv("inbox")
if cmd[0] == "ADD":
self.makeComponent(spec=cmd[2],uid=cmd[1])
elif cmd[0] == "DEL":
self.destroyComponent(uid=cmd[1])
elif cmd[0] == "UPDATE_NAME":
if cmd[1] == "NODE":
if self.destroyComponent(uid=cmd[2]):
self.makeComponent(spec=cmd[3],uid=cmd[2])
elif cmd[0] == "LINK":
self.makeLink( cmd[1], cmd[2] )
elif cmd[0] == "UNLINK":
self.unmakeLink( cmd[1], cmd[2] )
elif cmd[0] == "GO":
yield self.go()
self.pause()
def makeComponent(self, spec, uid=None):
"""\
Takes spec of the form:
"importname:classname(arguments)"
and constructs it, eg
"Kamaelia.Util.Console:consoleEchoer()"
"""
match = re.match("^([^:]*):([^(]*)(.*)$", spec)
(modulename, classname, arguments) = match.groups()
module = __import__(modulename, [], [], [classname])
try:
thecomponent = eval("module."+classname+arguments) ### XXX Probably a gaping security hole!!!
except e:
print "Couldn't instantiate component: ",str(e)
if not uid is None:
thecomponent.id = eval(uid)
thecomponent.name = spec + "_" + str(thecomponent.id)
self.addChildren(thecomponent)
return thecomponent
def destroyComponent(self, uid):
for c in self.childComponents():
if str(c.id) == uid:
c.stop()
self.removeChild(c)
return True
return False
def makeLink(self, src, dst):
# get right way around if back to front
src, dst = eval(src), eval(dst) # XXX SECURITY RISK
print src
if src[1] == "i" and dst[1] == "o":
src, dst = dst, src
sid, sboxtype, sbox = src
did, dboxtype, dbox = dst
if sboxtype == "o" and dboxtype == "i":
passthrough = 0
elif sboxtype == "i" and dboxtype == "i":
passthrough = 1
elif sboxtype == "o" and dboxtype == "o":
passthrough = 2
else:
raise "Unrecognised box types!"
components = self.childComponents()[:]
components.append(self)
source = [c for c in components if c.id == sid]
dest = [c for c in components if c.id == did]
linkage = self.link( (source[0], sbox), (dest[0], dbox), passthrough=passthrough )
self.linkages[ (src,dst) ] = linkage
def unmakeLink(self, src, dst):
# get right way around if back to front
src, dst = eval(src), eval(dst) # XXX SECURITY RISK
print src
if src[1] == "i" and dst[1] == "o":
src, dst = dst, src
linkage = self.linkages.get((src,dst),None)
if linkage:
self.unlink(thelinkage=linkage)
del self.linkages[(src,dst)]
def go(self):
return Ipc.newComponent(*[c for c in self.childComponents()])
def childrenDone(self):
"""\
Unplugs any children that have terminated, and returns true if there are no
running child components left (ie. their microproceses have finished)
"""
for child in self.childComponents():
if child._isStopped():
self.removeChild(child) # deregisters linkages for us
return 0==len(self.childComponents())
| {
"content_hash": "d7039a6c80b83203b4db91b7325aba7f",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 107,
"avg_line_length": 33.645833333333336,
"alnum_prop": 0.5021671826625387,
"repo_name": "sparkslabs/kamaelia",
"id": "542bf6357a2d6653f5c708ffde76247e3e2bf153",
"size": "5670",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MH/Editor2/Sandbox.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "M4",
"bytes": "12224"
},
{
"name": "Makefile",
"bytes": "150947"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "OCaml",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Python",
"bytes": "18900785"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707588"
}
],
"symlink_target": ""
} |
"""
Views for blob-migrator tool.
"""
import os
import json
import cloudstorage
import jinja2
from google.appengine.api import app_identity
from google.appengine.api import modules
from google.appengine.api import users
import webapp2
from app import config
from app import migrator
from app import progress
from app import scrubber
import appengine_config
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(
os.path.join(os.path.dirname(__file__), '..', 'templates')
),
extensions=['jinja2.ext.autoescape'],
autoescape=True,
)
class UserView(webapp2.RequestHandler):
"""A user-facing view."""
def render_response(self, template_name, **context):
self.response.headers['Content-Type'] = 'text/html'
template = JINJA_ENVIRONMENT.get_template(template_name)
context['user'] = users.get_current_user()
context['application_id'] = app_identity.get_application_id()
context['module_id'] = modules.get_current_module_name()
context['version_id'] = modules.get_current_version_name()
context['IS_DEVSERVER'] = appengine_config.IS_DEVSERVER
self.response.write(template.render(**context))
class JsonHandler(webapp2.RequestHandler):
"""A JSON-emitting handler."""
def emit_json(self, data):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(json.dumps(data))
class IndexView(UserView):
"""Main migration tool entry point."""
def _get_base_context(self):
"""Returns context common to GET and POST."""
context = {
'service_account': (app_identity.get_service_account_name() or
'[unknown service account on dev_appserver]'),
'mapping_kind': config.config.MAPPING_DATASTORE_KIND_NAME,
'config': config.config,
'config_keys': config.CONFIGURATION_KEYS_FOR_INDEX,
}
return context
def get(self):
"""GET"""
context = self._get_base_context()
context['bucket'] = app_identity.get_default_gcs_bucket_name() or ''
self.render_response('index.html', **context)
def post(self):
"""
POST
'bucket' is required.
"""
context = self._get_base_context()
bucket = self.request.POST.get('bucket', '').strip()
context['bucket'] = bucket
errors = []
if not bucket:
errors.append('Bucket name is required.')
if bucket:
try:
cloudstorage.validate_bucket_name(bucket)
except ValueError as e:
bucket = None
errors.append('Invalid bucket name. %s' % e.message)
# try to write a small file
if not errors:
try:
migrator.write_test_file(bucket)
except Exception as e:
errors.append('Could not write a file to <code>%s</code>. '
'Ensure that <code>%s</code> '
'has Writer access. Message: <code>%s</code>' % (
bucket,
context['service_account'],
e.message))
if errors:
context['errors'] = errors
self.render_response('index.html', **context)
return
pipeline = migrator.MigrateAllBlobsPipeline(bucket)
pipeline.start(queue_name=config.config.QUEUE_NAME)
context['root_pipeline_id'] = pipeline.root_pipeline_id
self.render_response('started.html', **context)
class DeleteMappingEntitiesView(UserView):
"""Forms to delete the Blobstore->GCS mapping entities from Datastore.
DO NOT USE THIS AFTER DELETING SOURCE BLOBS!!!
"""
def _get_base_context(self):
"""Generates a context for both GET and POST."""
context = {
'mapping_kind': config.config.MAPPING_DATASTORE_KIND_NAME,
}
return context
def get(self):
"""GET"""
context = self._get_base_context()
self.render_response('delete-mappings.html', **context)
def post(self):
"""POST"""
context = self._get_base_context()
confirm = 'confirm' in self.request.POST
errors = []
if not confirm:
errors.append('You must select the checkbox if you want to delete the ' +
'Blobstore to Cloud Storage mapping entities.')
if errors:
context['errors'] = errors
else:
pipeline = scrubber.DeleteBlobstoreToGcsFilenameMappings()
pipeline.start(queue_name=config.config.QUEUE_NAME)
context['pipeline_id'] = pipeline.root_pipeline_id
self.render_response('delete-mappings.html', **context)
class DeleteSourceBlobsView(UserView):
"""Forms to delete the source blobs.
VERIFY THAT ALL BLOBS CORRECTLY MIGRATED BEFORE USING THIS!!!
"""
def _get_base_context(self):
"""Generates a context for both GET and POST."""
context = {
'mapping_kind': config.config.MAPPING_DATASTORE_KIND_NAME,
}
return context
def get(self):
"""GET"""
context = self._get_base_context()
self.render_response('delete-blobs.html', **context)
def post(self):
"""POST"""
context = self._get_base_context()
confirm = 'confirm' in self.request.POST
errors = []
if not confirm:
errors.append('You must select the checkbox if you want to delete the ' +
'source blobs.')
if errors:
context['errors'] = errors
else:
pipeline = scrubber.DeleteBlobstoreBlobs()
pipeline.start(queue_name=config.config.QUEUE_NAME)
context['pipeline_id'] = pipeline.root_pipeline_id
self.render_response('delete-blobs.html', **context)
class StatusInfoHandler(JsonHandler):
def get(self):
pipeline_id = self.request.GET['pipelineId'].strip()
status = progress.get_status(pipeline_id)
self.emit_json(status)
| {
"content_hash": "f52f71b2027cb2dff39a11211b57d4f3",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 79,
"avg_line_length": 29.74074074074074,
"alnum_prop": 0.6514855008005693,
"repo_name": "squee1945/appengine-blobstoremigrator-python",
"id": "9106ac969a03f118696bf642ed599dcd344f7526",
"size": "6218",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/app/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "581"
},
{
"name": "HTML",
"bytes": "18827"
},
{
"name": "Python",
"bytes": "142696"
},
{
"name": "Shell",
"bytes": "1104"
}
],
"symlink_target": ""
} |
"""
Integrat HTTP USSD API.
"""
from vumi.transports.integrat.integrat import IntegratTransport
__all__ = ['IntegratTransport']
| {
"content_hash": "7f1913b7d573c53fa812283d775651ab",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 63,
"avg_line_length": 16.375,
"alnum_prop": 0.732824427480916,
"repo_name": "vishwaprakashmishra/xmatrix",
"id": "f1f6d7514c5b77eb26f8ae124ad54cdd6273e796",
"size": "131",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vumi/transports/integrat/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Erlang",
"bytes": "29735"
},
{
"name": "JavaScript",
"bytes": "5556"
},
{
"name": "Puppet",
"bytes": "2557"
},
{
"name": "Python",
"bytes": "2968329"
},
{
"name": "Shell",
"bytes": "3435"
}
],
"symlink_target": ""
} |
__author__ = 'nuok'
from location import Location
class City(Location):
def __init__(self, config):
Location.__init__(self, config)
self.inventory_price = config['inventory_price']
self.extra_price = config['extra_price']
def sell(self, player):
player_inventory = player.get_inventory()
extra = 0
if len(player_inventory.keys()) > 2:
extra = self.extra_price
for inventory in player_inventory:
player.add_money(self.get_total_price(inventory, player_inventory[inventory]))
player.add_money(extra)
player.clear_inventory()
def get_total_price(self, inventory, quantity):
return self.inventory_price[inventory] * quantity
def get_action(self):
return {'sell': self.sell}
if __name__ == '__main__':
main_city = {'name': 'main_city',
'neighbor': ['dun_1', 'dun_2'],
'inventory_price': {'apple': 10, 'banana': 15, 'orange': 20},
'extra_price': 30}
city = City(main_city)
print city.name
print city.get_movable_location()
import json
json.dump(main_city, open('main_city.config', 'w')) | {
"content_hash": "99d788ddd5231810eceb684bbab648da",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 90,
"avg_line_length": 32.270270270270274,
"alnum_prop": 0.5862646566164154,
"repo_name": "chaiso-krit/reinforcement_learning",
"id": "3bf6bb3a4831e402a7e738ea679c8a753cd31b74",
"size": "1194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "the_dungeon/city.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12716"
}
],
"symlink_target": ""
} |
"""
idx2numpy package provides a tool for converting files from IDX format to
numpy.ndarray. You can meet files in IDX format, e.g. when you're going
to read the MNIST database of handwritten digits provided by Yann LeCun at
http://yann.lecun.com/exdb/mnist/
The description of IDX format also can be found on this page.
"""
from __future__ import absolute_import
from .converters import convert_from_string
from .converters import convert_from_file
from .converters import convert_to_string
from .converters import convert_to_file
from .FormatError import FormatError
from .version import __version__
__all__ = ['convert_from_string', 'convert_from_file', 'FormatError']
| {
"content_hash": "9450f91df8a654d1ba925e1f0e0d2d4e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 35.578947368421055,
"alnum_prop": 0.7751479289940828,
"repo_name": "ivanyu/idx2numpy",
"id": "9a9c38871d60de966b73f47339ca108d651f8be2",
"size": "701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "idx2numpy/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "298"
},
{
"name": "Python",
"bytes": "19182"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm['samples.Cohort'].objects.filter(autocreated=True, batch__isnull=False).delete()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'avocado.datacontext': {
'Meta': {'object_name': 'DataContext'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'composite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'_count'"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datacontext+'", 'null': 'True', 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'genome.chromosome': {
'Meta': {'ordering': "['order']", 'object_name': 'Chromosome', 'db_table': "'chromosome'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'})
},
'genome.genome': {
'Meta': {'object_name': 'Genome', 'db_table': "'genome'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'released': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'genome.genotype': {
'Meta': {'object_name': 'Genotype', 'db_table': "'genotype'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'literature.pubmed': {
'Meta': {'object_name': 'PubMed', 'db_table': "'pubmed'"},
'pmid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'})
},
'phenotypes.phenotype': {
'Meta': {'object_name': 'Phenotype', 'db_table': "'phenotype'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hpo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'})
},
'samples.batch': {
'Meta': {'ordering': "('project', 'label')", 'unique_together': "(('project', 'name'),)", 'object_name': 'Batch', 'db_table': "'batch'"},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'batches'", 'to': "orm['samples.Project']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'samples.cohort': {
'Meta': {'ordering': "('-order', 'name')", 'object_name': 'Cohort', 'db_table': "'cohort'"},
'allele_freq_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'autocreated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'batch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Batch']", 'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['avocado.DataContext']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Project']", 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'samples': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['samples.Sample']", 'through': "orm['samples.CohortSample']", 'symmetrical': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'samples.cohortsample': {
'Meta': {'unique_together': "(('object_set', 'set_object'),)", 'object_name': 'CohortSample', 'db_table': "'cohort_sample'"},
'added': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Cohort']", 'db_column': "'cohort_id'"}),
'removed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'set_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Sample']", 'db_column': "'sample_id'"})
},
'samples.cohortvariant': {
'Meta': {'unique_together': "(('variant', 'cohort'),)", 'object_name': 'CohortVariant', 'db_table': "'cohort_variant'"},
'af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'cohort': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Cohort']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cohort_details'", 'to': "orm['variants.Variant']"})
},
'samples.person': {
'Meta': {'object_name': 'Person', 'db_table': "'person'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'mrn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'proband': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['samples.Person']", 'through': "orm['samples.Relation']", 'symmetrical': 'False'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'samples.project': {
'Meta': {'unique_together': "(('name',),)", 'object_name': 'Project', 'db_table': "'project'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'samples.relation': {
'Meta': {'ordering': "('person', '-generation')", 'object_name': 'Relation', 'db_table': "'relation'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'generation': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'family'", 'to': "orm['samples.Person']"}),
'relative': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relative_of'", 'to': "orm['samples.Person']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'samples.result': {
'Meta': {'unique_together': "(('sample', 'variant'),)", 'object_name': 'Result', 'db_table': "'sample_result'"},
'base_counts': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'baseq_rank_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'coverage_alt': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coverage_ref': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'downsampling': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'fisher_strand': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Genotype']", 'null': 'True', 'blank': 'True'}),
'genotype_quality': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'haplotype_score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'homopolymer_run': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_dbsnp': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'mq': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mq0': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mq_rank_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phred_scaled_likelihood': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'quality': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'quality_by_depth': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'raw_read_depth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'read_depth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'read_pos_rank_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'sample': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['samples.Sample']"}),
'spanning_deletions': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'strand_bias': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Variant']"})
},
'samples.sample': {
'Meta': {'ordering': "('project', 'batch', 'label')", 'unique_together': "(('batch', 'name', 'version'),)", 'object_name': 'Sample', 'db_table': "'sample'"},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'samples'", 'to': "orm['samples.Batch']"}),
'bio_sample': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'samples'", 'null': 'True', 'to': "orm['samples.Person']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'samples'", 'to': "orm['samples.Project']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'version': ('django.db.models.fields.IntegerField', [], {})
},
'samples.samplerun': {
'Meta': {'object_name': 'SampleRun', 'db_table': "'sample_run'"},
'completed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Genome']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sample': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['samples.Sample']"})
},
'variants.variant': {
'Meta': {'unique_together': "(('chr', 'pos', 'ref', 'alt'),)", 'object_name': 'Variant', 'db_table': "'variant'"},
'alt': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'variant_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'liftover': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['variants.VariantPhenotype']", 'symmetrical': 'False'}),
'pos': ('django.db.models.fields.IntegerField', [], {}),
'ref': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'rsid': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.VariantType']", 'null': 'True'})
},
'variants.variantphenotype': {
'Meta': {'object_name': 'VariantPhenotype', 'db_table': "'variant_phenotype'"},
'hgmd_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variant_phenotypes'", 'to': "orm['variants.Variant']"})
},
'variants.varianttype': {
'Meta': {'ordering': "['order']", 'object_name': 'VariantType', 'db_table': "'variant_type'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '20'})
}
}
complete_apps = ['samples']
symmetrical = True
| {
"content_hash": "842ede7f3ff82263fdda618fdbc95447",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 192,
"avg_line_length": 86.05166051660517,
"alnum_prop": 0.5438679245283019,
"repo_name": "chop-dbhi/varify-data-warehouse",
"id": "e5c457a1438df79cd84354e742d32674686998a7",
"size": "23344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vdw/samples/migrations/0026_delete_autocreated_batch_cohorts.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Puppet",
"bytes": "14864"
},
{
"name": "Python",
"bytes": "1796480"
},
{
"name": "Shell",
"bytes": "37"
}
],
"symlink_target": ""
} |
import os
# import sys
# sys.path.append("..")
from source.mySpider import XJTUSpider
if __name__ == '__main__':
mySpider = XJTUSpider('ssfw')
if mySpider.login(username=os.environ.get('username'), password=os.environ.get('password')) == 0:
mySpider.schedule('20162')
mySpider.logout()
| {
"content_hash": "b59a53aa86daabba281de4c1809fd974",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 101,
"avg_line_length": 31.1,
"alnum_prop": 0.6527331189710611,
"repo_name": "Macsnow14/XJTU-API",
"id": "dabe6c2da58224013bfe34eca16ff6cd4dfac056",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18275"
}
],
"symlink_target": ""
} |
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from mysite import views as mysite_views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^$', mysite_views.login_redirect, name = 'login_redirect'),
url(r'^jet/', include('jet.urls', 'jet')), #jet app
url(r'^jet/dashboard/', include('jet.dashboard.urls', 'jet-dashboard')), # Django JET dashboard URLS
url(r'^admin/', admin.site.urls),
url(r'^polls/', include('polls.urls')),
url(r'^froala_editor/', include('froala_editor.urls')),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| {
"content_hash": "69a4346e738fee682f5150552fbb8a75",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 105,
"avg_line_length": 43.96666666666667,
"alnum_prop": 0.6838514025777104,
"repo_name": "dinhkute/Incisive-AIESEC",
"id": "12c2fe20b17e100ea5bb61619e9c0779be67cac2",
"size": "1319",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "webroot/Pj/mysite/mysite/mysite/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13330"
},
{
"name": "HTML",
"bytes": "126457"
},
{
"name": "JavaScript",
"bytes": "80058"
},
{
"name": "Python",
"bytes": "137221"
}
],
"symlink_target": ""
} |
from flask import url_for
from psi.app import const
from psi.app.utils import db_util
from tests import fixture
from tests.base_test_case import BaseTestCase
from tests.object_faker import object_faker
from tests.views.organization.base_organization_test import BaseOrganizationTestCase
class TestEditOrganization(BaseOrganizationTestCase):
def test_edit(self):
from psi.app.models import EnumValues
type_id = EnumValues.get(const.DIRECT_SELLING_STORE_ORG_TYPE_KEY).id
with self.test_client:
fixture.login_as_admin(self.test_client)
desc = object_faker.faker.text(max_nb_chars=20)
name = object_faker.faker.name()
self.assertPageRendered(
endpoint=self.edit_endpoint(view='organization'),
method=self.test_client.post,
expect_contents=[name, desc],
data={
"type": type_id,
"name": name,
"description": desc,
"parent": u'__None'
})
self.create_organization(type_id, parent_id=1)
self.create_organization(type_id, parent_id=2)
self.create_organization(type_id, parent_id=3)
desc = object_faker.faker.text(max_nb_chars=20)
name = object_faker.faker.name()
self.assertPageRendered(
endpoint=url_for(
'organization.edit_view',
id=4,
url=url_for('organization.index_view')),
method=self.test_client.post,
expect_contents=[name, desc],
data={
"type": type_id,
"name": name,
"description": desc,
"parent": 1
})
| {
"content_hash": "d27e484945fc6c2b0f4476fad749030a",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 84,
"avg_line_length": 36.05882352941177,
"alnum_prop": 0.5492115280043501,
"repo_name": "betterlife/psi",
"id": "f9d0d66e14302d571b0d57df96e0068923722fac",
"size": "1854",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/views/organization/edit_organization_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14410"
},
{
"name": "HTML",
"bytes": "52928"
},
{
"name": "JavaScript",
"bytes": "493605"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "528554"
}
],
"symlink_target": ""
} |
import pytest
from pyconll import load_from_string, load_from_file, iter_from_string, iter_from_file
from tests.util import fixture_location
from tests.unit.util import assert_token_members
def test_load_from_string():
"""
Test that a CoNLL file can properly be loaded from a string.
"""
with open(fixture_location('basic.conll'), encoding='utf-8') as f:
contents = f.read()
c = load_from_string(contents)
sent = c[1]
assert len(c) == 4
assert len(sent) == 14
assert sent['10'].form == 'donc'
def test_load_from_file():
"""
Test that a CoNLL file can properly be loaded from a filename.
"""
c = load_from_file(fixture_location('basic.conll'))
sent = c[1]
assert len(c) == 4
assert len(sent) == 14
assert sent['10'].form == 'donc'
def test_load_from_file_and_string_equivalence():
"""
Test that the Conll object created from a string and file is the same if
the underlying source is the same.
"""
with open(fixture_location('long.conll'), encoding='utf-8') as f:
contents = f.read()
str_c = load_from_string(contents)
file_c = load_from_file(fixture_location('long.conll'))
assert len(str_c) == len(file_c)
for i in range(len(str_c)):
assert str_c[i].id == file_c[i].id
assert str_c[i].text == file_c[i].text
print(str_c[i].conll())
print(file_c[i].conll())
for str_token in str_c[i]:
file_token = file_c[i][str_token.id]
assert_token_members(str_token, file_token.id, file_token.form,
file_token.lemma, file_token.upos,
file_token.xpos, file_token.feats,
file_token.head, file_token.deprel,
file_token.deps, file_token.misc)
def test_iter_from_string():
"""
Test that CoNLL files in string form can be iterated over without memory.
"""
with open(fixture_location('basic.conll'), encoding='utf-8') as f:
contents = f.read()
expected_ids = ['fr-ud-dev_0000{}'.format(i) for i in range(1, 5)]
actual_ids = [sent.id for sent in iter_from_string(contents)]
assert expected_ids == actual_ids
def test_iter_from_file():
"""
Test that CoNLL files can be iterated over without memory given the
filename.
"""
expected_ids = ['fr-ud-dev_0000{}'.format(i) for i in range(1, 5)]
actual_ids = [
sent.id for sent in iter_from_file(fixture_location('basic.conll'))
]
assert expected_ids == actual_ids
| {
"content_hash": "eb21fbdef8ed872979b3e5ce74ccbb5b",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 86,
"avg_line_length": 30.904761904761905,
"alnum_prop": 0.599768875192604,
"repo_name": "pyconll/pyconll",
"id": "29c6d39dd18483f9f96673bd3c5fa878d225b353",
"size": "2596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_load.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1449"
},
{
"name": "Python",
"bytes": "178508"
},
{
"name": "Shell",
"bytes": "465"
}
],
"symlink_target": ""
} |
from automat import MethodicalMachine
class Door(object):
def unlock(self):
print("Opening the door so you can get your food.")
def lock(self):
print("Locking the door so you can't steal the food.")
class Light(object):
def on(self):
print("Need some food over here.")
def off(self):
print("We're good on food for now.")
class FoodSlot(object):
"""
Automats were a popular kind of business in the 1950s and 60s; a sort of
restaurant-sized vending machine that served cooked food out of a
coin-operated dispenser.
This class represents the logic associated with a single food slot.
"""
machine = MethodicalMachine()
def __init__(self, door, light):
self._door = door
self._light = light
self.start()
@machine.state(initial=True)
def initial(self):
"""
The initial state when we are constructed.
Note that applications never see this state, because the constructor
provides an input to transition out of it immediately.
"""
@machine.state()
def empty(self):
"""
The machine is empty (and the light asking for food is on).
"""
@machine.input()
def start(self):
"""
A private input, for transitioning to the initial blank state to
'empty', making sure the door and light are properly configured.
"""
@machine.state()
def ready(self):
"""
We've got some food and we're ready to serve it.
"""
@machine.state()
def serving(self):
"""
The door is open, we're serving food.
"""
@machine.input()
def coin(self):
"""
A coin (of the appropriate denomination) was inserted.
"""
@machine.input()
def food(self):
"""
Food was prepared and inserted into the back of the machine.
"""
@machine.output()
def turnOnFoodLight(self):
"""
Turn on the 'we need food' light.
"""
self._light.on()
@machine.output()
def turnOffFoodLight(self):
"""
Turn off the 'we need food' light.
"""
self._light.off()
@machine.output()
def lockDoor(self):
"""
Lock the door, we don't need food.
"""
self._door.lock()
@machine.output()
def unlockDoor(self):
"""
Lock the door, we don't need food.
"""
self._door.unlock()
@machine.input()
def closeDoor(self):
"""
The door was closed.
"""
initial.upon(start, enter=empty, outputs=[lockDoor, turnOnFoodLight])
empty.upon(food, enter=ready, outputs=[turnOffFoodLight])
ready.upon(coin, enter=serving, outputs=[unlockDoor])
serving.upon(closeDoor, enter=empty, outputs=[lockDoor,
turnOnFoodLight])
slot = FoodSlot(Door(), Light())
if __name__ == '__main__':
import sys
# sys.stdout.writelines(FoodSlot.machine.asDigraph())
slot = FoodSlot(Door(), Light())
# raw_input("Hit enter to make some food and put it in the slot: ")
slot.food()
# raw_input("Hit enter to insert a coin: ")
slot.coin()
# raw_input("Hit enter to retrieve the food and close the door: ")
slot.closeDoor()
# raw_input("Hit enter to make some more food: ")
slot.food()
| {
"content_hash": "b7b92fcba3de65e49cd4478bbf6990b3",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 76,
"avg_line_length": 25.544776119402986,
"alnum_prop": 0.5761028337715455,
"repo_name": "dingzhi86/zim",
"id": "1ffc1e2c0503486f3b8cfc0e5374887d113a8a34",
"size": "3423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zim/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "9982"
},
{
"name": "Shell",
"bytes": "449"
}
],
"symlink_target": ""
} |
import json
import optparse
import os
import polib
import re
import string
import sys
parser = optparse.OptionParser(usage="usage: %prog [options] pofile...")
parser.add_option("--callback", default="_.setTranslation", dest="callback", help="callback function to call with data")
parser.add_option("--quiet", action="store_false", default=True, dest="verbose", help="don't print status messages to stdout")
(options, args) = parser.parse_args()
if args == None or len(args) == 0:
print("ERROR: you must specify at least one po file to translate");
sys.exit(1)
paramFix = re.compile("(\\(([0-9])\\))")
for srcfile in args:
destfile = os.path.splitext(srcfile)[0] + ".js"
if options.verbose:
print("INFO: converting %s to %s" % (srcfile, destfile))
xlate_map = {}
po = polib.pofile(srcfile, autodetect_encoding=False, encoding="utf-8", wrapwidth=-1)
for entry in po:
if entry.obsolete or entry.msgstr == '':
continue
xlate_map[entry.msgid] = entry.msgstr;
dest = open(destfile, "w")
dest.write('i18n = ')
encoder = json.JSONEncoder()
for part in encoder.iterencode(xlate_map):
if part.startswith('"function('):
dest.write(part[1:-1]);
else:
dest.write(part);
dest.write(";\n")
dest.close()
| {
"content_hash": "1efead2c2d10924b2d88db7a43159d7c",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 126,
"avg_line_length": 25.6875,
"alnum_prop": 0.6853203568532036,
"repo_name": "ToureNPlaner/tourenplaner-web",
"id": "9cb477852e933518dd329a94b4d8f6c336558c90",
"size": "1277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/lang/po2js.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "187509"
},
{
"name": "JavaScript",
"bytes": "1244883"
},
{
"name": "Python",
"bytes": "1277"
},
{
"name": "Shell",
"bytes": "554"
}
],
"symlink_target": ""
} |
from ajenti.api import *
from ajenti.plugins import *
info = PluginInfo(
title='NGINX',
icon='globe',
dependencies=[
PluginDependency('webserver_common'),
BinaryDependency('nginx'),
],
)
def init():
import main
| {
"content_hash": "09648ad516d030489419673c3b8de95b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 45,
"avg_line_length": 15.6875,
"alnum_prop": 0.6215139442231076,
"repo_name": "lupyuen/RaspberryPiImage",
"id": "91779a597a80ccbe35629ab6c489a8d0fa7c21ea",
"size": "251",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "usr/share/pyshared/ajenti/plugins/nginx/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "82308"
},
{
"name": "C",
"bytes": "3197439"
},
{
"name": "C#",
"bytes": "33056"
},
{
"name": "C++",
"bytes": "1020255"
},
{
"name": "CSS",
"bytes": "208338"
},
{
"name": "CoffeeScript",
"bytes": "87200"
},
{
"name": "Eagle",
"bytes": "1632170"
},
{
"name": "Go",
"bytes": "3646"
},
{
"name": "Groff",
"bytes": "286691"
},
{
"name": "HTML",
"bytes": "41527"
},
{
"name": "JavaScript",
"bytes": "403603"
},
{
"name": "Makefile",
"bytes": "33808"
},
{
"name": "Objective-C",
"bytes": "69457"
},
{
"name": "Perl",
"bytes": "96047"
},
{
"name": "Processing",
"bytes": "1304"
},
{
"name": "Python",
"bytes": "13358098"
},
{
"name": "Shell",
"bytes": "68795"
},
{
"name": "TeX",
"bytes": "4317"
}
],
"symlink_target": ""
} |
"""Add activity tables
Revision ID: 19f590834366
Revises: 43cda5e14cf0
Create Date: 2012-11-14 23:31:56.202053
"""
# revision identifiers, used by Alembic.
revision = '19f590834366'
down_revision = '43cda5e14cf0'
from alembic import op
import sqlalchemy as db
def upgrade():
op.create_table('activities',
db.Column('id', db.Integer, primary_key=True),
db.Column('user_id', db.Integer, db.ForeignKey('users.id')),
db.Column('title', db.String(255)),
db.Column('content', db.Text),
db.Column('slug', db.String(255)),
db.Column('start_time', db.DateTime),
db.Column('end_time', db.DateTime),
db.Column('address', db.String(255)),
db.Column('longitude', db.Numeric(10, 7)),
db.Column('latitude', db.Numeric(10, 7)),
db.Column('created_time', db.DateTime),
db.Column('modified_time', db.DateTime))
op.create_table('activity_users',
db.Column('activity_id', db.Integer, db.ForeignKey('activities.id'), primary_key=True),
db.Column('user_id', db.Integer, db.ForeignKey('users.id'), primary_key=True))
op.create_table('resources',
db.Column('id', db.Integer, primary_key=True),
db.Column('cser_id', db.Integer, db.ForeignKey('users.id')),
db.Column('filetype', db.String(50)),
db.Column('url', db.String(255)),
db.Column('created_time', db.DateTime),
db.Column('modified_time', db.DateTime))
op.create_table('activity_resources',
db.Column('activity_id', db.Integer, db.ForeignKey('activities.id'), primary_key=True),
db.Column('resource_id', db.Integer, db.ForeignKey('resources.id'), primary_key=True))
op.create_table('activity_comments',
db.Column('id', db.Integer, primary_key=True),
db.Column('author_name', db.String(50)),
db.Column('author_email', db.String(255)),
db.Column('author_site', db.String(255)),
db.Column('content', db.Text, nullable=False),
db.Column('created_time', db.DateTime),
db.Column('modified_time', db.DateTime),
db.Column('parent_id', db.Integer, db.ForeignKey('activity_comments.id')),
db.Column('user_id', db.Integer, db.ForeignKey('users.id')))
op.create_table('topics',
db.Column('id', db.Integer, primary_key=True),
db.Column('name', db.String(255)),
db.Column('inro', db.Text),
db.Column('rate_count', db.Integer, default=0),
db.Column('user_id', db.Integer, db.ForeignKey('users.id'), nullable=False))
op.create_table('topic_resources',
db.Column('topic_id', db.Integer, db.ForeignKey('topics.id'), primary_key=True),
db.Column('resource_id', db.Integer, db.ForeignKey('resources.id'), primary_key=True))
op.create_table('topic_users',
db.Column('topic_id', db.Integer, db.ForeignKey('topics.id'), primary_key=True),
db.Column('user_id', db.Integer, db.ForeignKey('users.id'), primary_key=True))
def downgrade():
op.drop_table('topic_users')
op.drop_table('topic_resources')
op.drop_table('topics')
op.drop_table('activity_comments')
op.drop_table('activity_resources')
op.drop_table('resources')
op.drop_table('activity_users')
op.drop_table('activities')
| {
"content_hash": "4795d3983cfa4d7a9b93a77f1d403b87",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 95,
"avg_line_length": 40.0609756097561,
"alnum_prop": 0.634703196347032,
"repo_name": "chenmingd/ScriptFan.com",
"id": "013bbc325df37f57f6df4410d882d16ca0d95e1d",
"size": "3285",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "website/migrate/versions/19f590834366_add_activity_tables.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1143"
},
{
"name": "HTML",
"bytes": "16199"
},
{
"name": "JavaScript",
"bytes": "6997"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "34554"
}
],
"symlink_target": ""
} |
from w1thermsensor import W1ThermSensor
def get_temperature_celsius():
sensor = W1ThermSensor()
return sensor.get_temperature() | {
"content_hash": "155f173baac67f499f4998e9734c1090",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 39,
"avg_line_length": 27.2,
"alnum_prop": 0.7720588235294118,
"repo_name": "Andruschenko/lora-remote-server",
"id": "9efd55fc9def0f2c375154f9d58f414499cf7b8a",
"size": "136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2106"
}
],
"symlink_target": ""
} |
def sqlQuery_StyleWithInventory():
import sqlalchemy,sys
orcl_engine = sqlalchemy.create_engine('oracle+cx_oracle://prod_team_ro:9thfl00r@borac101-vip.l3.bluefly.com:1521/bfyprd11')
connection = orcl_engine.connect()
query_marketplace_inprog ="""select
distinct POMGR.PRODUCT_COLOR.id as COLORSTYLE,
POMGR.INVENTORY.CURR_ON_HAND
FROM
POMGR.PRODUCT_COLOR
INNER JOIN POMGR.SKU
ON
POMGR.SKU.PRODUCT_COLOR_ID = POMGR.PRODUCT_COLOR.ID
LEFT JOIN POMGR.INVENTORY
ON
POMGR.SKU.PRODUCT_COLOR_ID = POMGR.INVENTORY.PRODUCT_COLOR_ID
WHERE
POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT >= TRUNC(sysdate - 30)
--AND SUBSTR(POMGR.SKU.SKU_CODE, 1, 1) = '8'
group by
POMGR.PRODUCT_COLOR.id ,
POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT,
POMGR.SKU.SKU_CODE,
POMGR.INVENTORY.TOTAL_ON_HAND,
POMGR.INVENTORY.AVAL_ON_HAND,
POMGR.INVENTORY.CURR_ON_HAND
having POMGR.INVENTORY.AVAL_ON_HAND > 0
or POMGR.INVENTORY.TOTAL_ON_HAND > 0
or POMGR.INVENTORY.CURR_ON_HAND > 0
order by
colorstyle DESC"""
## WHERE POMGR.PO_LINE.PO_HDR_ID = '" + ponum + "'"
## AND POMGR.PRODUCT_COLOR.COPY_READY_DT IS NOT NULL
##
result = connection.execute(query_marketplace_inprog)
styles = {}
filepaths = []
for row in result:
styledata = {}
styledata['colorstyle'] = row['colorstyle']
fname = str(row['colorstyle']) + '.png'
filepath = os.path.join('/mnt/netsrv101.l3.bluefly.com', fname[:4], fname)
#consigstyle['vendor_style'] = row['vendor_style']
filepaths.append(filepath)
#styles[style_alt] = styledata
#print consigstyles
connection.close()
return filepaths
## Walk Root Directory and Return List or all Files in all Subdirs too
def recursive_dirlist(rootdir):
import os,re
regex_bflyfile = re.compile(r'^(.*?/?)?.*?([0-9]{9})((_[1-7xX])|(_alt0[1-6]))?(\.[jpngJPNG]{3,4})?$')
walkedlist = []
for dirname, subdirnames, filenames in os.walk(rootdir):
# append path of all filenames to walkedlist
for filename in filenames:
file_path = os.path.abspath(os.path.join(dirname, filename))
if os.path.isfile(file_path) and regex_bflyfile.findall(file_path):
walkedlist.append(file_path)
# Advanced usage:
# editing the 'dirnames' list will stop os.walk() from recursing into there.
#if '.git' in dirnames:
# don't go into any .git directories.
# dirnames.remove('.git')
walkedset = list(set(sorted(walkedlist)))
return walkedset
def get_exif_all_data(image_filepath):
import exiftool
with exiftool.ExifTool() as et:
metadata = et.get_metadata(image_filepath)#['XMP:DateCreated'][:10].replace(':','-')
return metadata
## Returns False if file is Zero KB, True if file is valid - does not catch corrupt files greater than 1KB
def zero_byte_file_filter(image_filepath,error_dir=None):
import os, shutil
if not error_dir:
imagedir = os.path.dirname(image_filepath)
rootdir = os.path.dirname(imagedir)
error_root = os.path.join(rootdir,'zero_byte_errors')
error_details_drop_dir = os.path.join(error_root, 'originated_in_' + imagedir.split('/')[-1])
mdata = get_exif_all_data(os.path.abspath(image_filepath))
if mdata.get('File:FileSize') <= 1:
try:
os.makedirs(error_details_drop_dir, 16877)
except:
pass
error_file_stored = os.path.join(error_details_drop_dir, os.path.basename(image_filepath))
if os.path.isfile(error_file_stored):
os.remove(error_file_stored)
shutil.move(image_filepath, error_file_stored)
else:
shutil.move(image_filepath, error_file_stored)
return False
else:
return True
def getparse_metadata_from_imagefile(image_filepath):
import os, re
from collections import defaultdict
image_filepath = os.path.abspath(image_filepath)
mdata = get_exif_all_data(image_filepath)
mdatainsert = {} #defaultdict(list)
groupdict = defaultdict(list)
for k,v in mdata.iteritems():
try:
mgroup, mtag = k.split(':')
mvalue = v
metakvpairs = {mtag: mvalue}
groupdict[mgroup].append(metakvpairs)
#print mgroup, mtag, mvalue, '----_----', metagroupdict, '----\n----',groupdict
#metagroupdict[mgroup].append(metatagval)
except ValueError:
pass
#print groupdict datagroupkey, datagroupvalues = groupdict.popitem()
mdatainsert[image_filepath] = groupdict #.items()
return mdatainsert
def insert_gridfs_extract_metadata(image_filepath):
from mongo_gridfs_insert_file import insert_file_gridfs_file7
import os,sys
try:
db_name = sys.argv[2]
except IndexError:
db_name='gridfs_bfly'
metadata = getparse_metadata_from_imagefile(image_filepath).items()[0][1]
print image_filepath, metadata
insert_record = insert_file_gridfs_file7(filepath=image_filepath, metadata=metadata, db_name=db_name)
return #insert_record
def update_gridfs_extract_metadata(image_filepath):
from mongo_gridfs_insert_file import update_file_gridfs_file7
import os,sys
try:
db_name = sys.argv[2]
except IndexError:
db_name='gridfs_file7'
metadata = getparse_metadata_from_imagefile(image_filepath).items()[0][1]
print image_filepath, metadata
insert_record = insert_file_gridfs_file7(filepath=image_filepath, metadata=metadata, db_name=db_name)
return #insert_record
if __name__ == '__main__':
import sys,os
try:
#directory = sys.argv[1]
dirfileslist = sqlQuery_StyleWithInventory() #recursive_dirlist(directory)
for f in dirfileslist:
insert_gridfs_extract_metadata(f)
#print dirfileslist
except IndexError:
print 'FAILED INDEX ERROR'
pass
| {
"content_hash": "7694e7895775b08c98e9d7157211a253",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 128,
"avg_line_length": 38.56172839506173,
"alnum_prop": 0.6252601248599328,
"repo_name": "relic7/prodimages",
"id": "7b0de93b0254b57e31712bfd2f4d393c0239d581",
"size": "6294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/gridfs_import_Bluefly_withinventory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16783"
},
{
"name": "HTML",
"bytes": "88323"
},
{
"name": "JavaScript",
"bytes": "158855"
},
{
"name": "PHP",
"bytes": "70412"
},
{
"name": "PLSQL",
"bytes": "72767"
},
{
"name": "Perl",
"bytes": "7143"
},
{
"name": "Python",
"bytes": "4922301"
},
{
"name": "Shell",
"bytes": "423422"
},
{
"name": "Smarty",
"bytes": "571"
},
{
"name": "VimL",
"bytes": "6045"
}
],
"symlink_target": ""
} |
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth import logout
from django.contrib.messages import debug, info, success, warning, error, add_message
from django.http import (
HttpResponse, HttpResponseForbidden, Http404, HttpResponseNotAllowed,
HttpResponseRedirect, HttpResponsePermanentRedirect, HttpResponseNotModified,
HttpResponseBadRequest, HttpResponseNotFound, HttpResponseGone,
HttpResponseServerError
)
from mailchimp.settings import API_KEY, SECURE, REAL_CACHE, CACHE_TIMEOUT
import re
import warnings
try:
from django.utils import simplejson
except ImportError:
import json as simplejson
class KeywordArguments(dict):
def __getattr__(self, attr):
return self[attr]
class Cache(object):
def __init__(self, prefix=''):
self._data = {}
self._clear_lock = False
self._prefix = prefix
if REAL_CACHE:
self._set = getattr(self, '_real_set')
self._get = getattr(self, '_real_get')
self._del = getattr(self, '_real_del')
else:
self._set = getattr(self, '_fake_set')
self._get = getattr(self, '_fake_get')
self._del = getattr(self, '_fake_del')
def get(self, key, obj, *args, **kwargs):
if self._clear_lock:
self.flush(key)
self._clear_lock = False
value = self._get(key)
if value is None:
value = obj(*args, **kwargs) if callable(obj) else obj
self._set(key, value)
return value
def _real_set(self, key, value):
cache.set(key, value, CACHE_TIMEOUT)
def _real_get(self, key):
return cache.get(key, None)
def _real_del(self, key):
cache.delete(key)
def _fake_set(self, key, value):
self._data[key] = value
def _fake_get(self, key):
return self._data.get(key, None)
def _fake_del(self, key):
if key in self._data:
del self._data[key]
def get_child_cache(self, key):
return Cache('%s_%s_' % (self._prefix, key))
def flush(self, *keys):
for key in keys:
if key in self._data:
self._del(key)
def lock(self):
self._clear_lock = True
def clear(self, call):
self.lock()
return call()
def wrap(base, parent, name, *baseargs, **basekwargs):
def _wrapped(*args, **kwargs):
fullargs = baseargs + args
kwargs.update(basekwargs)
return getattr(parent, '%s_%s' % (base, name))(*fullargs, **kwargs)
return _wrapped
def build_dict(master, klass, data, key='id'):
return dict([(info[key], klass(master, info)) for info in data])
def _convert(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
class Bullet(object):
def __init__(self, number, link, active):
self.number = number
self.link = link
self.active = active
class Paginator(object):
def __init__(self, objects, page, get_link, per_page=20, bullets=5):
page = int(page)
self.page = page
self.get_link = get_link
self.all_objects = objects
self.objects_count = objects.count()
per_page = per_page() if callable(per_page) else per_page
self.pages_count = int(float(self.objects_count) / float(per_page)) + 1
self.bullets_count = 5
self.per_page = per_page
self.start = (page - 1) * per_page
self.end = page * per_page
self.is_first = page == 1
self.first_bullet = Bullet(1, self.get_link(1), False)
self.is_last = page == self.pages_count
self.last_bullet = Bullet(self.pages_count, self.get_link(self.pages_count), False)
self.has_pages = self.pages_count != 1
self._objects = None
self._bullets = None
@property
def bullets(self):
if self._bullets is None:
pre = int(float(self.bullets_count) / 2)
bullets = [Bullet(self.page, self.get_link(self.page), True)]
diff = 0
for i in range(1, pre + 1):
this = self.page - i
if this:
bullets.insert(0, Bullet(this, self.get_link(this), False))
else:
diff = pre - this
break
for i in range(1, pre + 1 + diff):
this = self.page + i
if this <= self.pages_count:
bullets.append(Bullet(this, self.get_link(this), False))
else:
break
self._bullets = bullets
return self._bullets
@property
def objects(self):
if self._objects is None:
self._objects = self.all_objects[self.start:self.end]
return self._objects
class InternalRequest(object):
def __init__(self, request, args, kwargs):
self.request = request
self.args = args
self.kwargs = kwargs
def contribute_to_class(self, cls):
cls.request = self.request
cls.args = self.args
cls.kwargs = self.kwargs
class BaseView(object):
"""
A base class to create class based views.
It will automatically check allowed methods if a list of allowed methods are
given. It also automatically tries to route to 'handle_`method`' methods if
they're available. So if for example you define a 'handle_post' method and
the request method is 'POST', this one will be called instead of 'handle'.
For each request a new instance of this class will be created and it will get
three attributes set: request, args and kwargs.
"""
# A list of allowed methods (if empty any method will be allowed)
allowed_methods = []
# The template to use in the render_to_response helper
template = 'base.html'
# Only allow access to logged in users
login_required = False
# Only allow access to users with certain permissions
required_permissions = []
# Only allow access to superusers
superuser_required = False
# Response to send when request is automatically declined
auto_decline_response = 'not_found'
#===========================================================================
# Dummy Attributes (DO NOT OVERWRITE)
#===========================================================================
request = None
args = tuple()
kwargs = {}
#===========================================================================
# Internal Methods
#===========================================================================
def __init__(self, *args, **kwargs):
# Preserve args and kwargs
self._initial_args = args
self._initial_kwargs = kwargs
@property
def __name__(self):
"""
INTERNAL: required by django
"""
return self.get_view_name()
def __call__(self, request, *args, **kwargs):
"""
INTERNAL: Called by django when a request should be handled by this view.
Creates a new instance of this class to sandbox
"""
if self.allowed_methods and request.method not in self.allowed_methods:
return getattr(self, self.auto_decline_response)()
if self.login_required and not request.user.is_authenticated():
return getattr(self, self.auto_decline_response)()
if self.superuser_required and not request.user.is_superuser:
return getattr(self, self.auto_decline_response)()
if self.required_permissions and not request.user.has_perms(self.required_permissions):
return getattr(self, self.auto_decline_response)()
handle_func_name = 'handle_%s' % request.method.lower()
if not hasattr(self, handle_func_name):
handle_func_name = 'handle'
# Create a sandbox instance of this class to safely set the request, args and kwargs attributes
sandbox = self.__class__(*self._initial_args, **self._initial_kwargs)
sandbox.args = args
sandbox.kwargs = kwargs
sandbox.request = request
return getattr(sandbox, handle_func_name)()
#===========================================================================
# Misc Helpers
#===========================================================================
def get_view_name(self):
"""
Returns the name of this view
"""
return self.__class__.__name__
def get_template(self):
return self.template
def logout(self):
logout(self.request)
def get_page_link(self, page):
return '%s?page=%s' % (self.request.path, page)
def paginate(self, objects, page):
return Paginator(objects, page, self.get_page_link, 20, 5)
def reverse(self, view_name, *args, **kwargs):
return reverse(view_name, args=args or (), kwargs=kwargs or {})
#===========================================================================
# Handlers
#===========================================================================
def handle(self):
"""
Write your view logic here
"""
pass
#===========================================================================
# Response Helpers
#===========================================================================
def not_allowed(self, data=''):
return HttpResponseNotAllowed(data)
def forbidden(self, data=''):
return HttpResponseForbidden(data)
def redirect(self, url):
return HttpResponseRedirect(url)
def named_redirect(self, viewname, urlconf=None, args=None, kwargs=None,
prefix=None, current_app=None):
return self.redirect(reverse(view, urlconf, args, kwargs, prefix, current_app))
def permanent_redirect(self, url):
return HttpResponsePermanentRedirect(url)
def named_permanent_redirect(self, viewname, urlconf=None, args=None,
kwargs=None, prefix=None, current_app=None):
return self.permanent_redirect(reverse(view, urlconf, args, kwargs, prefix, current_app))
def not_modified(self, data=''):
return HttpResponseNotModified(data)
def bad_request(self, data=''):
return HttpResponseBadRequest(data)
def not_found(self, data=''):
return HttpResponseNotFound(data)
def gone(self, data=''):
return HttpResponseGone(data)
def server_error(self, data=''):
return HttpResponseServerError(data)
def simplejson(self, data):
return HttpResponse(simplejson.dumps(data), content_type='application/json')
def response(self, data):
return HttpResponse(data)
def render_to_response(self, data, request_context=True):
if request_context:
return render_to_response(self.get_template(), data, RequestContext(self.request))
return render_to_response(self.get_template(), data)
#===========================================================================
# Message Helpers
#===========================================================================
def message_debug(self, message):
debug(self.request, message)
def message_info(self, message):
info(self.request, message)
def message_success(self, message):
success(self.request, message)
def message_warning(self, message):
warning(self.request, message)
def message_error(self, message):
error(self.request, message)
def add_message(self, msgtype, message):
add_message(self.request, msgtype, message)
class WarningProxy(object):
__stuff = {}
def __init__(self, logger, obj):
WarningProxy.__stuff[self] = {}
WarningProxy.__stuff[self]['logger'] = logger
WarningProxy.__stuff[self]['obj'] = obj
def __getattr__(self, attr):
WarningProxy.__stuff[self]['logger'].lock()
val = getattr(WarningProxy.__stuff[self]['obj'], attr)
WarningProxy.__stuff[self]['logger'].release()
return WarningProxy(WarningProxy.__stuff[self]['logger'], val)
def __setattr__(self, attr, value):
WarningProxy.__stuff[self]['logger'].lock()
setattr(WarningProxy.__stuff[self]['obj'], attr)
WarningProxy.__stuff[self]['logger'].release()
def __call__(self, *args, **kwargs):
WarningProxy.__stuff[self]['logger'].lock()
val = WarningProxy.__stuff[self]['obj'](*args, **kwargs)
WarningProxy.__stuff[self]['logger'].release()
return val
class WarningLogger(object):
def __init__(self):
self.proxies = []
self.queue = []
self._old = warnings.showwarning
def proxy(self, obj):
return WarningProxy(self, obj)
def lock(self):
warnings.showwarning = self._showwarning
def _showwarning(self, message, category, filename, lineno, fileobj=None):
self.queue.append((message, category, filename, lineno))
self._old(message, category, filename, lineno, fileobj)
def release(self):
warnings.showwarning = self._old
def get(self):
queue = list(self.queue)
self.queue = []
return queue
def reset(self):
self.queue = []
class Lazy(object):
def __init__(self, real):
self.__real = real
self.__cache = {}
def __getattr__(self, attr):
if attr not in self.__cache:
self.__cache[attr] = getattr(self.__real, attr)
return self.__cache[attr]
def dequeue(limit=None):
from mailchimp.models import Queue
for camp in Queue.objects.dequeue(limit):
yield camp
def is_queued_or_sent(object):
from mailchimp.models import Queue, Campaign
object_id = object.pk
content_type = ContentType.objects.get_for_model(object)
q = Queue.objects.filter(content_type=content_type, object_id=object_id)
if q.count():
return q[0]
c = Campaign.objects.filter(content_type=content_type, object_id=object_id)
if c.count():
return c[0]
return False
# this has to be down here to prevent circular imports
from mailchimp.chimp import Connection
# open a non-connected connection (lazily connect on first get_connection call)
CONNECTION = Connection(secure=SECURE)
def get_connection():
if not CONNECTION.is_connected:
CONNECTION.connect(API_KEY)
return CONNECTION
| {
"content_hash": "a31881acce300d1ab7b1606b96057c69",
"timestamp": "",
"source": "github",
"line_count": 443,
"max_line_length": 103,
"avg_line_length": 33.95033860045147,
"alnum_prop": 0.5626994680851064,
"repo_name": "extertioner/django-mailchimp",
"id": "c1bc6001814304ef7c9838ef20c7767a2473ff97",
"size": "15040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mailchimp/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "6972"
},
{
"name": "Python",
"bytes": "125489"
}
],
"symlink_target": ""
} |
from traitlets import Unicode
from .base import BasePlugin
from ..api import MissingEntry
class ExportPlugin(BasePlugin):
"""Base class for export plugins."""
to = Unicode("", config=True, help="destination to export to")
def export(self, gradebook):
"""Export grades to another format.
This method MUST be implemented by subclasses. Users should be able to
pass the ``--to`` flag on the command line, which will set the
``self.to`` variable. By default, this variable will be an empty string,
which allows you to specify whatever default you would like.
Arguments
---------
gradebook: :class:`nbgrader.api.Gradebook`
An instance of the gradebook
"""
raise NotImplementedError
class CsvExportPlugin(ExportPlugin):
"""CSV exporter plugin."""
def export(self, gradebook):
if self.to == "":
dest = "grades.csv"
else:
dest = self.to
self.log.info("Exporting grades to %s", dest)
fh = open(dest, "w")
keys = [
"assignment",
"duedate",
"timestamp",
"student_id",
"last_name",
"first_name",
"email",
"raw_score",
"late_submission_penalty",
"score",
"max_score"
]
fh.write(",".join(keys) + "\n")
fmt = ",".join(["{" + x + "}" for x in keys]) + "\n"
# Loop over each assignment in the database
for assignment in gradebook.assignments:
# Loop over each student in the database
for student in gradebook.students:
# Create a dictionary that will store information about this
# student's submitted assignment
score = {}
score['assignment'] = assignment.name
score['duedate'] = assignment.duedate
score['student_id'] = student.id
score['last_name'] = student.last_name
score['first_name'] = student.first_name
score['email'] = student.email
score['max_score'] = assignment.max_score
# Try to find the submission in the database. If it doesn't
# exist, the `MissingEntry` exception will be raised, which
# means the student didn't submit anything, so we assign them a
# score of zero.
try:
submission = gradebook.find_submission(
assignment.name, student.id)
except MissingEntry:
score['timestamp'] = ''
score['raw_score'] = 0.0
score['late_submission_penalty'] = 0.0
score['score'] = 0.0
else:
penalty = submission.late_submission_penalty
score['timestamp'] = submission.timestamp
score['raw_score'] = submission.score
score['late_submission_penalty'] = penalty
score['score'] = max(0.0, submission.score - penalty)
for key in score:
if score[key] is None:
score[key] = ''
if not isinstance(score[key], str):
score[key] = str(score[key])
fh.write(fmt.format(**score))
fh.close()
| {
"content_hash": "acb7853719685729e07f7d4819225c3f",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 80,
"avg_line_length": 34.46534653465346,
"alnum_prop": 0.514220051709279,
"repo_name": "ellisonbg/nbgrader",
"id": "a7cce9a386633ec46e30b73f6aba14e24c9e4e9c",
"size": "3481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nbgrader/plugins/export.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7909"
},
{
"name": "CSS",
"bytes": "6267"
},
{
"name": "HTML",
"bytes": "1053323"
},
{
"name": "JavaScript",
"bytes": "193443"
},
{
"name": "Jupyter Notebook",
"bytes": "841138"
},
{
"name": "Makefile",
"bytes": "7317"
},
{
"name": "Python",
"bytes": "685770"
},
{
"name": "Shell",
"bytes": "537"
},
{
"name": "Smarty",
"bytes": "25996"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="sunburst.hoverlabel.font", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "style"),
**kwargs
)
| {
"content_hash": "86a2c11ff8e130e74f7cc5d5598d7010",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 83,
"avg_line_length": 34.46666666666667,
"alnum_prop": 0.5880077369439072,
"repo_name": "plotly/python-api",
"id": "1ceab2c94fbebf24bd5e3b202bad5b555fcdf414",
"size": "517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/sunburst/hoverlabel/font/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import json
import io
import zipfile
from decimal import Decimal
import boto
import boto.cloudformation
import boto.datapipeline
import boto.ec2
import boto.ec2.autoscale
import boto.ec2.elb
from boto.exception import BotoServerError
import boto.iam
import boto.rds
import boto.redshift
import boto.sns
import boto.sqs
import boto.vpc
import boto3
import sure # noqa
from string import Template
from moto import (
mock_autoscaling_deprecated,
mock_autoscaling,
mock_cloudformation,
mock_cloudformation_deprecated,
mock_datapipeline_deprecated,
mock_dynamodb2,
mock_ec2,
mock_ec2_deprecated,
mock_elb_deprecated,
mock_events,
mock_iam_deprecated,
mock_kms,
mock_lambda,
mock_logs,
mock_rds_deprecated,
mock_rds2,
mock_redshift_deprecated,
mock_route53_deprecated,
mock_s3,
mock_sns_deprecated,
mock_sqs_deprecated,
mock_elbv2,
)
from moto.core import ACCOUNT_ID
from tests import EXAMPLE_AMI_ID, EXAMPLE_AMI_ID2
from tests.test_cloudformation.fixtures import (
ec2_classic_eip,
fn_join,
rds_mysql_with_db_parameter_group,
rds_mysql_with_read_replica,
redshift,
route53_ec2_instance_with_public_ip,
route53_health_check,
route53_roundrobin,
single_instance_with_ebs_volume,
vpc_eip,
vpc_single_instance_in_subnet,
)
@mock_cloudformation_deprecated()
def test_stack_sqs_integration():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=sqs_template_json)
stack = conn.describe_stacks()[0]
queue = stack.describe_resources()[0]
queue.resource_type.should.equal("AWS::SQS::Queue")
queue.logical_resource_id.should.equal("QueueGroup")
queue.physical_resource_id.should.equal("my-queue")
@mock_cloudformation_deprecated()
def test_stack_list_resources():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=sqs_template_json)
resources = conn.list_stack_resources("test_stack")
assert len(resources) == 1
queue = resources[0]
queue.resource_type.should.equal("AWS::SQS::Queue")
queue.logical_resource_id.should.equal("QueueGroup")
queue.physical_resource_id.should.equal("my-queue")
@mock_cloudformation_deprecated()
@mock_sqs_deprecated()
def test_update_stack():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=sqs_template_json)
sqs_conn = boto.sqs.connect_to_region("us-west-1")
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(1)
queues[0].get_attributes("VisibilityTimeout")["VisibilityTimeout"].should.equal(
"60"
)
sqs_template["Resources"]["QueueGroup"]["Properties"]["VisibilityTimeout"] = 100
sqs_template_json = json.dumps(sqs_template)
conn.update_stack("test_stack", sqs_template_json)
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(1)
queues[0].get_attributes("VisibilityTimeout")["VisibilityTimeout"].should.equal(
"100"
)
@mock_cloudformation_deprecated()
@mock_sqs_deprecated()
def test_update_stack_and_remove_resource():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=sqs_template_json)
sqs_conn = boto.sqs.connect_to_region("us-west-1")
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(1)
sqs_template["Resources"].pop("QueueGroup")
sqs_template_json = json.dumps(sqs_template)
conn.update_stack("test_stack", sqs_template_json)
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(0)
@mock_cloudformation_deprecated()
@mock_sqs_deprecated()
def test_update_stack_and_add_resource():
sqs_template = {"AWSTemplateFormatVersion": "2010-09-09", "Resources": {}}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=sqs_template_json)
sqs_conn = boto.sqs.connect_to_region("us-west-1")
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(0)
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn.update_stack("test_stack", sqs_template_json)
queues = sqs_conn.get_all_queues()
queues.should.have.length_of(1)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_stack_ec2_integration():
ec2_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"WebServerGroup": {
"Type": "AWS::EC2::Instance",
"Properties": {"ImageId": EXAMPLE_AMI_ID, "UserData": "some user data"},
}
},
}
ec2_template_json = json.dumps(ec2_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("ec2_stack", template_body=ec2_template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
stack = conn.describe_stacks()[0]
instance = stack.describe_resources()[0]
instance.resource_type.should.equal("AWS::EC2::Instance")
instance.logical_resource_id.should.contain("WebServerGroup")
instance.physical_resource_id.should.equal(ec2_instance.id)
@mock_ec2_deprecated()
@mock_elb_deprecated()
@mock_cloudformation_deprecated()
def test_stack_elb_integration_with_attached_ec2_instances():
elb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MyELB": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
"Instances": [{"Ref": "Ec2Instance1"}],
"LoadBalancerName": "test-elb",
"AvailabilityZones": ["us-east-1"],
"Listeners": [
{
"InstancePort": "80",
"LoadBalancerPort": "80",
"Protocol": "HTTP",
}
],
},
},
"Ec2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {"ImageId": EXAMPLE_AMI_ID, "UserData": "some user data"},
},
},
}
elb_template_json = json.dumps(elb_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("elb_stack", template_body=elb_template_json)
elb_conn = boto.ec2.elb.connect_to_region("us-west-1")
load_balancer = elb_conn.get_all_load_balancers()[0]
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
load_balancer.instances[0].id.should.equal(ec2_instance.id)
list(load_balancer.availability_zones).should.equal(["us-east-1"])
@mock_elb_deprecated()
@mock_cloudformation_deprecated()
def test_stack_elb_integration_with_health_check():
elb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MyELB": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
"LoadBalancerName": "test-elb",
"AvailabilityZones": ["us-west-1"],
"HealthCheck": {
"HealthyThreshold": "3",
"Interval": "5",
"Target": "HTTP:80/healthcheck",
"Timeout": "4",
"UnhealthyThreshold": "2",
},
"Listeners": [
{
"InstancePort": "80",
"LoadBalancerPort": "80",
"Protocol": "HTTP",
}
],
},
}
},
}
elb_template_json = json.dumps(elb_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("elb_stack", template_body=elb_template_json)
elb_conn = boto.ec2.elb.connect_to_region("us-west-1")
load_balancer = elb_conn.get_all_load_balancers()[0]
health_check = load_balancer.health_check
health_check.healthy_threshold.should.equal(3)
health_check.interval.should.equal(5)
health_check.target.should.equal("HTTP:80/healthcheck")
health_check.timeout.should.equal(4)
health_check.unhealthy_threshold.should.equal(2)
@mock_elb_deprecated()
@mock_cloudformation_deprecated()
def test_stack_elb_integration_with_update():
elb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MyELB": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
"LoadBalancerName": "test-elb",
"AvailabilityZones": ["us-west-1a"],
"Listeners": [
{
"InstancePort": "80",
"LoadBalancerPort": "80",
"Protocol": "HTTP",
}
],
"Policies": {"Ref": "AWS::NoValue"},
},
}
},
}
elb_template_json = json.dumps(elb_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("elb_stack", template_body=elb_template_json)
elb_conn = boto.ec2.elb.connect_to_region("us-west-1")
load_balancer = elb_conn.get_all_load_balancers()[0]
load_balancer.availability_zones[0].should.equal("us-west-1a")
elb_template["Resources"]["MyELB"]["Properties"]["AvailabilityZones"] = [
"us-west-1b"
]
elb_template_json = json.dumps(elb_template)
conn.update_stack("elb_stack", template_body=elb_template_json)
load_balancer = elb_conn.get_all_load_balancers()[0]
load_balancer.availability_zones[0].should.equal("us-west-1b")
@mock_ec2_deprecated()
@mock_redshift_deprecated()
@mock_cloudformation_deprecated()
def test_redshift_stack():
redshift_template_json = json.dumps(redshift.template)
vpc_conn = boto.vpc.connect_to_region("us-west-2")
conn = boto.cloudformation.connect_to_region("us-west-2")
conn.create_stack(
"redshift_stack",
template_body=redshift_template_json,
parameters=[
("DatabaseName", "mydb"),
("ClusterType", "multi-node"),
("NumberOfNodes", 2),
("NodeType", "dw1.xlarge"),
("MasterUsername", "myuser"),
("MasterUserPassword", "mypass"),
("InboundTraffic", "10.0.0.1/16"),
("PortNumber", 5439),
],
)
redshift_conn = boto.redshift.connect_to_region("us-west-2")
cluster_res = redshift_conn.describe_clusters()
clusters = cluster_res["DescribeClustersResponse"]["DescribeClustersResult"][
"Clusters"
]
clusters.should.have.length_of(1)
cluster = clusters[0]
cluster["DBName"].should.equal("mydb")
cluster["NumberOfNodes"].should.equal(2)
cluster["NodeType"].should.equal("dw1.xlarge")
cluster["MasterUsername"].should.equal("myuser")
cluster["Port"].should.equal(5439)
cluster["VpcSecurityGroups"].should.have.length_of(1)
security_group_id = cluster["VpcSecurityGroups"][0]["VpcSecurityGroupId"]
groups = vpc_conn.get_all_security_groups(group_ids=[security_group_id])
groups.should.have.length_of(1)
group = groups[0]
group.rules.should.have.length_of(1)
group.rules[0].grants[0].cidr_ip.should.equal("10.0.0.1/16")
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_stack_security_groups():
security_group_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-security-group": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {"GroupDescription": "My other group"},
},
"Ec2Instance2": {
"Type": "AWS::EC2::Instance",
"Properties": {
"SecurityGroups": [{"Ref": "InstanceSecurityGroup"}],
"ImageId": EXAMPLE_AMI_ID,
},
},
"InstanceSecurityGroup": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "My security group",
"Tags": [{"Key": "bar", "Value": "baz"}],
"SecurityGroupIngress": [
{
"IpProtocol": "tcp",
"FromPort": "22",
"ToPort": "22",
"CidrIp": "123.123.123.123/32",
},
{
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "8000",
"SourceSecurityGroupId": {"Ref": "my-security-group"},
},
],
},
},
},
}
security_group_template_json = json.dumps(security_group_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"security_group_stack",
template_body=security_group_template_json,
tags={"foo": "bar"},
)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
instance_group = ec2_conn.get_all_security_groups(
filters={"description": ["My security group"]}
)[0]
other_group = ec2_conn.get_all_security_groups(
filters={"description": ["My other group"]}
)[0]
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
ec2_instance.groups[0].id.should.equal(instance_group.id)
instance_group.description.should.equal("My security group")
instance_group.tags.should.have.key("foo").which.should.equal("bar")
instance_group.tags.should.have.key("bar").which.should.equal("baz")
rule1, rule2 = instance_group.rules
int(rule1.to_port).should.equal(22)
int(rule1.from_port).should.equal(22)
rule1.grants[0].cidr_ip.should.equal("123.123.123.123/32")
rule1.ip_protocol.should.equal("tcp")
int(rule2.to_port).should.equal(8000)
int(rule2.from_port).should.equal(80)
rule2.ip_protocol.should.equal("tcp")
rule2.grants[0].group_id.should.equal(other_group.id)
@mock_autoscaling_deprecated()
@mock_elb_deprecated()
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
def test_autoscaling_group_with_elb():
web_setup_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-as-group": {
"Type": "AWS::AutoScaling::AutoScalingGroup",
"Properties": {
"AvailabilityZones": ["us-east-1a"],
"LaunchConfigurationName": {"Ref": "my-launch-config"},
"MinSize": "2",
"MaxSize": "2",
"DesiredCapacity": "2",
"LoadBalancerNames": [{"Ref": "my-elb"}],
"Tags": [
{
"Key": "propagated-test-tag",
"Value": "propagated-test-tag-value",
"PropagateAtLaunch": True,
},
{
"Key": "not-propagated-test-tag",
"Value": "not-propagated-test-tag-value",
"PropagateAtLaunch": False,
},
],
},
},
"my-launch-config": {
"Type": "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId": EXAMPLE_AMI_ID,
"InstanceType": "t2.medium",
"UserData": "some user data",
},
},
"my-elb": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
"AvailabilityZones": ["us-east-1a"],
"Listeners": [
{
"LoadBalancerPort": "80",
"InstancePort": "80",
"Protocol": "HTTP",
}
],
"LoadBalancerName": "my-elb",
"HealthCheck": {
"Target": "HTTP:80",
"HealthyThreshold": "3",
"UnhealthyThreshold": "5",
"Interval": "30",
"Timeout": "5",
},
},
},
},
}
web_setup_template_json = json.dumps(web_setup_template)
conn = boto.cloudformation.connect_to_region("us-east-1")
conn.create_stack("web_stack", template_body=web_setup_template_json)
autoscale_conn = boto.ec2.autoscale.connect_to_region("us-east-1")
autoscale_group = autoscale_conn.get_all_groups()[0]
autoscale_group.launch_config_name.should.contain("my-launch-config")
autoscale_group.load_balancers[0].should.equal("my-elb")
# Confirm the Launch config was actually created
autoscale_conn.get_all_launch_configurations().should.have.length_of(1)
# Confirm the ELB was actually created
elb_conn = boto.ec2.elb.connect_to_region("us-east-1")
elb_conn.get_all_load_balancers().should.have.length_of(1)
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
as_group_resource = [
resource
for resource in resources
if resource.resource_type == "AWS::AutoScaling::AutoScalingGroup"
][0]
as_group_resource.physical_resource_id.should.contain("my-as-group")
launch_config_resource = [
resource
for resource in resources
if resource.resource_type == "AWS::AutoScaling::LaunchConfiguration"
][0]
launch_config_resource.physical_resource_id.should.contain("my-launch-config")
elb_resource = [
resource
for resource in resources
if resource.resource_type == "AWS::ElasticLoadBalancing::LoadBalancer"
][0]
elb_resource.physical_resource_id.should.contain("my-elb")
# confirm the instances were created with the right tags
ec2_conn = boto.ec2.connect_to_region("us-east-1")
reservations = ec2_conn.get_all_reservations()
len(reservations).should.equal(1)
reservation = reservations[0]
len(reservation.instances).should.equal(2)
for instance in reservation.instances:
instance.tags["propagated-test-tag"].should.equal("propagated-test-tag-value")
instance.tags.keys().should_not.contain("not-propagated-test-tag")
@mock_autoscaling_deprecated()
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
def test_autoscaling_group_update():
asg_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-as-group": {
"Type": "AWS::AutoScaling::AutoScalingGroup",
"Properties": {
"AvailabilityZones": ["us-west-1a"],
"LaunchConfigurationName": {"Ref": "my-launch-config"},
"MinSize": "2",
"MaxSize": "2",
"DesiredCapacity": "2",
},
},
"my-launch-config": {
"Type": "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId": EXAMPLE_AMI_ID,
"InstanceType": "t2.medium",
"UserData": "some user data",
},
},
},
}
asg_template_json = json.dumps(asg_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("asg_stack", template_body=asg_template_json)
autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1")
asg = autoscale_conn.get_all_groups()[0]
asg.min_size.should.equal(2)
asg.max_size.should.equal(2)
asg.desired_capacity.should.equal(2)
asg_template["Resources"]["my-as-group"]["Properties"]["MaxSize"] = 3
asg_template["Resources"]["my-as-group"]["Properties"]["Tags"] = [
{
"Key": "propagated-test-tag",
"Value": "propagated-test-tag-value",
"PropagateAtLaunch": True,
},
{
"Key": "not-propagated-test-tag",
"Value": "not-propagated-test-tag-value",
"PropagateAtLaunch": False,
},
]
asg_template_json = json.dumps(asg_template)
conn.update_stack("asg_stack", template_body=asg_template_json)
asg = autoscale_conn.get_all_groups()[0]
asg.min_size.should.equal(2)
asg.max_size.should.equal(3)
asg.desired_capacity.should.equal(2)
# confirm the instances were created with the right tags
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservations = ec2_conn.get_all_reservations()
running_instance_count = 0
for res in reservations:
for instance in res.instances:
if instance.state == "running":
running_instance_count += 1
instance.tags["propagated-test-tag"].should.equal(
"propagated-test-tag-value"
)
instance.tags.keys().should_not.contain("not-propagated-test-tag")
running_instance_count.should.equal(2)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_vpc_single_instance_in_subnet():
template_json = json.dumps(vpc_single_instance_in_subnet.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"test_stack", template_body=template_json, parameters=[("KeyName", "my_key")]
)
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc = vpc_conn.get_all_vpcs(filters={"cidrBlock": "10.0.0.0/16"})[0]
vpc.cidr_block.should.equal("10.0.0.0/16")
# Add this once we implement the endpoint
# vpc_conn.get_all_internet_gateways().should.have.length_of(1)
subnet = vpc_conn.get_all_subnets(filters={"vpcId": vpc.id})[0]
subnet.vpc_id.should.equal(vpc.id)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
instance = reservation.instances[0]
instance.tags["Foo"].should.equal("Bar")
# Check that the EIP is attached the the EC2 instance
eip = ec2_conn.get_all_addresses()[0]
eip.domain.should.equal("vpc")
eip.instance_id.should.equal(instance.id)
security_group = ec2_conn.get_all_security_groups(filters={"vpc_id": [vpc.id]})[0]
security_group.vpc_id.should.equal(vpc.id)
stack = conn.describe_stacks()[0]
vpc.tags.should.have.key("Application").which.should.equal(stack.stack_id)
resources = stack.describe_resources()
vpc_resource = [
resource for resource in resources if resource.resource_type == "AWS::EC2::VPC"
][0]
vpc_resource.physical_resource_id.should.equal(vpc.id)
subnet_resource = [
resource
for resource in resources
if resource.resource_type == "AWS::EC2::Subnet"
][0]
subnet_resource.physical_resource_id.should.equal(subnet.id)
eip_resource = [
resource for resource in resources if resource.resource_type == "AWS::EC2::EIP"
][0]
eip_resource.physical_resource_id.should.equal(eip.public_ip)
@mock_cloudformation()
@mock_ec2()
@mock_rds2()
def test_rds_db_parameter_groups():
ec2_conn = boto3.client("ec2", region_name="us-west-1")
ec2_conn.create_security_group(
GroupName="application", Description="Our Application Group"
)
template_json = json.dumps(rds_mysql_with_db_parameter_group.template)
cf_conn = boto3.client("cloudformation", "us-west-1")
cf_conn.create_stack(
StackName="test_stack",
TemplateBody=template_json,
Parameters=[
{"ParameterKey": key, "ParameterValue": value}
for key, value in [
("DBInstanceIdentifier", "master_db"),
("DBName", "my_db"),
("DBUser", "my_user"),
("DBPassword", "my_password"),
("DBAllocatedStorage", "20"),
("DBInstanceClass", "db.m1.medium"),
("EC2SecurityGroup", "application"),
("MultiAZ", "true"),
]
],
)
rds_conn = boto3.client("rds", region_name="us-west-1")
db_parameter_groups = rds_conn.describe_db_parameter_groups()
len(db_parameter_groups["DBParameterGroups"]).should.equal(1)
db_parameter_group_name = db_parameter_groups["DBParameterGroups"][0][
"DBParameterGroupName"
]
found_cloudformation_set_parameter = False
for db_parameter in rds_conn.describe_db_parameters(
DBParameterGroupName=db_parameter_group_name
)["Parameters"]:
if (
db_parameter["ParameterName"] == "BACKLOG_QUEUE_LIMIT"
and db_parameter["ParameterValue"] == "2048"
):
found_cloudformation_set_parameter = True
found_cloudformation_set_parameter.should.equal(True)
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
@mock_rds_deprecated()
def test_rds_mysql_with_read_replica():
ec2_conn = boto.ec2.connect_to_region("us-west-1")
ec2_conn.create_security_group("application", "Our Application Group")
template_json = json.dumps(rds_mysql_with_read_replica.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"test_stack",
template_body=template_json,
parameters=[
("DBInstanceIdentifier", "master_db"),
("DBName", "my_db"),
("DBUser", "my_user"),
("DBPassword", "my_password"),
("DBAllocatedStorage", "20"),
("DBInstanceClass", "db.m1.medium"),
("EC2SecurityGroup", "application"),
("MultiAZ", "true"),
],
)
rds_conn = boto.rds.connect_to_region("us-west-1")
primary = rds_conn.get_all_dbinstances("master_db")[0]
primary.master_username.should.equal("my_user")
primary.allocated_storage.should.equal(20)
primary.instance_class.should.equal("db.m1.medium")
primary.multi_az.should.equal(True)
list(primary.read_replica_dbinstance_identifiers).should.have.length_of(1)
replica_id = primary.read_replica_dbinstance_identifiers[0]
replica = rds_conn.get_all_dbinstances(replica_id)[0]
replica.instance_class.should.equal("db.m1.medium")
security_group_name = primary.security_groups[0].name
security_group = rds_conn.get_all_dbsecurity_groups(security_group_name)[0]
security_group.ec2_groups[0].name.should.equal("application")
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
@mock_rds_deprecated()
def test_rds_mysql_with_read_replica_in_vpc():
template_json = json.dumps(rds_mysql_with_read_replica.template)
conn = boto.cloudformation.connect_to_region("eu-central-1")
conn.create_stack(
"test_stack",
template_body=template_json,
parameters=[
("DBInstanceIdentifier", "master_db"),
("DBName", "my_db"),
("DBUser", "my_user"),
("DBPassword", "my_password"),
("DBAllocatedStorage", "20"),
("DBInstanceClass", "db.m1.medium"),
("MultiAZ", "true"),
],
)
rds_conn = boto.rds.connect_to_region("eu-central-1")
primary = rds_conn.get_all_dbinstances("master_db")[0]
subnet_group_name = primary.subnet_group.name
subnet_group = rds_conn.get_all_db_subnet_groups(subnet_group_name)[0]
subnet_group.description.should.equal("my db subnet group")
@mock_autoscaling_deprecated()
@mock_iam_deprecated()
@mock_cloudformation_deprecated()
def test_iam_roles():
iam_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-launch-config": {
"Properties": {
"IamInstanceProfile": {"Ref": "my-instance-profile-with-path"},
"ImageId": EXAMPLE_AMI_ID,
"InstanceType": "t2.medium",
},
"Type": "AWS::AutoScaling::LaunchConfiguration",
},
"my-instance-profile-with-path": {
"Properties": {
"Path": "my-path",
"Roles": [{"Ref": "my-role-with-path"}],
},
"Type": "AWS::IAM::InstanceProfile",
},
"my-instance-profile-no-path": {
"Properties": {"Roles": [{"Ref": "my-role-no-path"}]},
"Type": "AWS::IAM::InstanceProfile",
},
"my-role-with-path": {
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": ["sts:AssumeRole"],
"Effect": "Allow",
"Principal": {"Service": ["ec2.amazonaws.com"]},
}
]
},
"Path": "/my-path/",
"Policies": [
{
"PolicyDocument": {
"Statement": [
{
"Action": [
"ec2:CreateTags",
"ec2:DescribeInstances",
"ec2:DescribeTags",
],
"Effect": "Allow",
"Resource": ["*"],
}
],
"Version": "2012-10-17",
},
"PolicyName": "EC2_Tags",
},
{
"PolicyDocument": {
"Statement": [
{
"Action": ["sqs:*"],
"Effect": "Allow",
"Resource": ["*"],
}
],
"Version": "2012-10-17",
},
"PolicyName": "SQS",
},
],
},
"Type": "AWS::IAM::Role",
},
"my-role-no-path": {
"Properties": {
"RoleName": "my-role-no-path-name",
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": ["sts:AssumeRole"],
"Effect": "Allow",
"Principal": {"Service": ["ec2.amazonaws.com"]},
}
]
},
},
"Type": "AWS::IAM::Role",
},
},
}
iam_template_json = json.dumps(iam_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=iam_template_json)
iam_conn = boto.iam.connect_to_region("us-west-1")
role_results = iam_conn.list_roles()["list_roles_response"]["list_roles_result"][
"roles"
]
role_name_to_id = {}
for role_result in role_results:
role = iam_conn.get_role(role_result.role_name)
# Role name is not specified, so randomly generated - can't check exact name
if "with-path" in role.role_name:
role_name_to_id["with-path"] = role.role_id
role.path.should.equal("/my-path/")
else:
role_name_to_id["no-path"] = role.role_id
role.role_name.should.equal("my-role-no-path-name")
role.path.should.equal("/")
instance_profile_responses = iam_conn.list_instance_profiles()[
"list_instance_profiles_response"
]["list_instance_profiles_result"]["instance_profiles"]
instance_profile_responses.should.have.length_of(2)
instance_profile_names = []
for instance_profile_response in instance_profile_responses:
instance_profile = iam_conn.get_instance_profile(
instance_profile_response.instance_profile_name
)
instance_profile_names.append(instance_profile.instance_profile_name)
instance_profile.instance_profile_name.should.contain("my-instance-profile")
if "with-path" in instance_profile.instance_profile_name:
instance_profile.path.should.equal("my-path")
instance_profile.role_id.should.equal(role_name_to_id["with-path"])
else:
instance_profile.instance_profile_name.should.contain("no-path")
instance_profile.role_id.should.equal(role_name_to_id["no-path"])
instance_profile.path.should.equal("/")
autoscale_conn = boto.ec2.autoscale.connect_to_region("us-west-1")
launch_config = autoscale_conn.get_all_launch_configurations()[0]
launch_config.instance_profile_name.should.contain("my-instance-profile-with-path")
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
instance_profile_resources = [
resource
for resource in resources
if resource.resource_type == "AWS::IAM::InstanceProfile"
]
{ip.physical_resource_id for ip in instance_profile_resources}.should.equal(
set(instance_profile_names)
)
role_resources = [
resource for resource in resources if resource.resource_type == "AWS::IAM::Role"
]
{r.physical_resource_id for r in role_resources}.should.equal(
set(role_name_to_id.values())
)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_single_instance_with_ebs_volume():
template_json = json.dumps(single_instance_with_ebs_volume.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"test_stack", template_body=template_json, parameters=[("KeyName", "key_name")]
)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
volumes = ec2_conn.get_all_volumes()
# Grab the mounted drive
volume = [volume for volume in volumes if volume.attach_data.device == "/dev/sdh"][
0
]
volume.volume_state().should.equal("in-use")
volume.attach_data.instance_id.should.equal(ec2_instance.id)
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
ebs_volumes = [
resource
for resource in resources
if resource.resource_type == "AWS::EC2::Volume"
]
ebs_volumes[0].physical_resource_id.should.equal(volume.id)
@mock_cloudformation_deprecated()
def test_create_template_without_required_param():
template_json = json.dumps(single_instance_with_ebs_volume.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack.when.called_with(
"test_stack", template_body=template_json
).should.throw(BotoServerError)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_classic_eip():
template_json = json.dumps(ec2_classic_eip.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
cfn_eip = [
resource for resource in resources if resource.resource_type == "AWS::EC2::EIP"
][0]
cfn_eip.physical_resource_id.should.equal(eip.public_ip)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_vpc_eip():
template_json = json.dumps(vpc_eip.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
cfn_eip = [
resource for resource in resources if resource.resource_type == "AWS::EC2::EIP"
][0]
cfn_eip.physical_resource_id.should.equal(eip.public_ip)
@mock_ec2_deprecated()
@mock_cloudformation_deprecated()
def test_fn_join():
template_json = json.dumps(fn_join.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
fn_join_output = stack.outputs[0]
fn_join_output.value.should.equal("test eip:{0}".format(eip.public_ip))
@mock_cloudformation_deprecated()
@mock_sqs_deprecated()
def test_conditional_resources():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Parameters": {
"EnvType": {"Description": "Environment type.", "Type": "String"}
},
"Conditions": {"CreateQueue": {"Fn::Equals": [{"Ref": "EnvType"}, "prod"]}},
"Resources": {
"QueueGroup": {
"Condition": "CreateQueue",
"Type": "AWS::SQS::Queue",
"Properties": {"QueueName": "my-queue", "VisibilityTimeout": 60},
}
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"test_stack_without_queue",
template_body=sqs_template_json,
parameters=[("EnvType", "staging")],
)
sqs_conn = boto.sqs.connect_to_region("us-west-1")
list(sqs_conn.get_all_queues()).should.have.length_of(0)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack(
"test_stack_with_queue",
template_body=sqs_template_json,
parameters=[("EnvType", "prod")],
)
sqs_conn = boto.sqs.connect_to_region("us-west-1")
list(sqs_conn.get_all_queues()).should.have.length_of(1)
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
def test_conditional_if_handling():
dummy_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Conditions": {"EnvEqualsPrd": {"Fn::Equals": [{"Ref": "ENV"}, "prd"]}},
"Parameters": {
"ENV": {
"Default": "dev",
"Description": "Deployment environment for the stack (dev/prd)",
"Type": "String",
}
},
"Description": "Stack 1",
"Resources": {
"App1": {
"Properties": {
"ImageId": {
"Fn::If": ["EnvEqualsPrd", EXAMPLE_AMI_ID, EXAMPLE_AMI_ID2]
}
},
"Type": "AWS::EC2::Instance",
}
},
}
dummy_template_json = json.dumps(dummy_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack1", template_body=dummy_template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal(EXAMPLE_AMI_ID2)
ec2_instance.terminate()
conn = boto.cloudformation.connect_to_region("us-west-2")
conn.create_stack(
"test_stack1", template_body=dummy_template_json, parameters=[("ENV", "prd")]
)
ec2_conn = boto.ec2.connect_to_region("us-west-2")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal(EXAMPLE_AMI_ID)
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
def test_cloudformation_mapping():
dummy_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Mappings": {
"RegionMap": {
"us-east-1": {"32": EXAMPLE_AMI_ID, "64": EXAMPLE_AMI_ID2},
"us-west-1": {"32": EXAMPLE_AMI_ID, "64": EXAMPLE_AMI_ID2},
"eu-west-1": {"32": EXAMPLE_AMI_ID, "64": EXAMPLE_AMI_ID2},
"ap-southeast-1": {"32": EXAMPLE_AMI_ID, "64": EXAMPLE_AMI_ID2},
"ap-northeast-1": {"32": EXAMPLE_AMI_ID, "64": EXAMPLE_AMI_ID2},
}
},
"Resources": {
"WebServer": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": {
"Fn::FindInMap": ["RegionMap", {"Ref": "AWS::Region"}, "32"]
},
"InstanceType": "m1.small",
},
}
},
}
dummy_template_json = json.dumps(dummy_template)
conn = boto.cloudformation.connect_to_region("us-east-1")
conn.create_stack("test_stack1", template_body=dummy_template_json)
ec2_conn = boto.ec2.connect_to_region("us-east-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal(EXAMPLE_AMI_ID)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack1", template_body=dummy_template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
reservation = ec2_conn.get_all_reservations()[0]
ec2_instance = reservation.instances[0]
ec2_instance.image_id.should.equal(EXAMPLE_AMI_ID)
@mock_cloudformation_deprecated()
@mock_route53_deprecated()
def test_route53_roundrobin():
route53_conn = boto.connect_route53()
template_json = json.dumps(route53_roundrobin.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
stack = conn.create_stack("test_stack", template_body=template_json)
zones = route53_conn.get_all_hosted_zones()["ListHostedZonesResponse"][
"HostedZones"
]
list(zones).should.have.length_of(1)
zone_id = zones[0]["Id"]
zone_id = zone_id.split("/")
zone_id = zone_id[2]
rrsets = route53_conn.get_all_rrsets(zone_id)
rrsets.hosted_zone_id.should.equal(zone_id)
rrsets.should.have.length_of(2)
record_set1 = rrsets[0]
record_set1.name.should.equal("test_stack.us-west-1.my_zone.")
record_set1.identifier.should.equal("test_stack AWS")
record_set1.type.should.equal("CNAME")
record_set1.ttl.should.equal("900")
record_set1.weight.should.equal("3")
record_set1.resource_records[0].should.equal("aws.amazon.com")
record_set2 = rrsets[1]
record_set2.name.should.equal("test_stack.us-west-1.my_zone.")
record_set2.identifier.should.equal("test_stack Amazon")
record_set2.type.should.equal("CNAME")
record_set2.ttl.should.equal("900")
record_set2.weight.should.equal("1")
record_set2.resource_records[0].should.equal("www.amazon.com")
stack = conn.describe_stacks()[0]
output = stack.outputs[0]
output.key.should.equal("DomainName")
output.value.should.equal("arn:aws:route53:::hostedzone/{0}".format(zone_id))
@mock_cloudformation_deprecated()
@mock_ec2_deprecated()
@mock_route53_deprecated()
def test_route53_ec2_instance_with_public_ip():
route53_conn = boto.connect_route53()
ec2_conn = boto.ec2.connect_to_region("us-west-1")
template_json = json.dumps(route53_ec2_instance_with_public_ip.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=template_json)
instance_id = ec2_conn.get_all_reservations()[0].instances[0].id
zones = route53_conn.get_all_hosted_zones()["ListHostedZonesResponse"][
"HostedZones"
]
list(zones).should.have.length_of(1)
zone_id = zones[0]["Id"]
zone_id = zone_id.split("/")
zone_id = zone_id[2]
rrsets = route53_conn.get_all_rrsets(zone_id)
rrsets.should.have.length_of(1)
record_set1 = rrsets[0]
record_set1.name.should.equal("{0}.us-west-1.my_zone.".format(instance_id))
record_set1.identifier.should.equal(None)
record_set1.type.should.equal("A")
record_set1.ttl.should.equal("900")
record_set1.weight.should.equal(None)
record_set1.resource_records[0].should.equal("10.0.0.25")
@mock_cloudformation_deprecated()
@mock_route53_deprecated()
def test_route53_associate_health_check():
route53_conn = boto.connect_route53()
template_json = json.dumps(route53_health_check.template)
conn = boto.cloudformation.connect_to_region("us-west-1")
conn.create_stack("test_stack", template_body=template_json)
checks = route53_conn.get_list_health_checks()["ListHealthChecksResponse"][
"HealthChecks"
]
list(checks).should.have.length_of(1)
check = checks[0]
health_check_id = check["Id"]
config = check["HealthCheckConfig"]
config["FailureThreshold"].should.equal("3")
config["IPAddress"].should.equal("10.0.0.4")
config["Port"].should.equal("80")
config["RequestInterval"].should.equal("10")
config["ResourcePath"].should.equal("/")
config["Type"].should.equal("HTTP")
zones = route53_conn.get_all_hosted_zones()["ListHostedZonesResponse"][
"HostedZones"
]
list(zones).should.have.length_of(1)
zone_id = zones[0]["Id"]
zone_id = zone_id.split("/")
zone_id = zone_id[2]
rrsets = route53_conn.get_all_rrsets(zone_id)
rrsets.should.have.length_of(1)
record_set = rrsets[0]
record_set.health_check.should.equal(health_check_id)
@mock_cloudformation_deprecated()
@mock_route53_deprecated()
def test_route53_with_update():
route53_conn = boto.connect_route53()
template_json = json.dumps(route53_health_check.template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
zones = route53_conn.get_all_hosted_zones()["ListHostedZonesResponse"][
"HostedZones"
]
list(zones).should.have.length_of(1)
zone_id = zones[0]["Id"]
zone_id = zone_id.split("/")
zone_id = zone_id[2]
rrsets = route53_conn.get_all_rrsets(zone_id)
rrsets.should.have.length_of(1)
record_set = rrsets[0]
record_set.resource_records.should.equal(["my.example.com"])
route53_health_check.template["Resources"]["myDNSRecord"]["Properties"][
"ResourceRecords"
] = ["my_other.example.com"]
template_json = json.dumps(route53_health_check.template)
cf_conn.update_stack("test_stack", template_body=template_json)
zones = route53_conn.get_all_hosted_zones()["ListHostedZonesResponse"][
"HostedZones"
]
list(zones).should.have.length_of(1)
zone_id = zones[0]["Id"]
zone_id = zone_id.split("/")
zone_id = zone_id[2]
rrsets = route53_conn.get_all_rrsets(zone_id)
rrsets.should.have.length_of(1)
record_set = rrsets[0]
record_set.resource_records.should.equal(["my_other.example.com"])
@mock_cloudformation_deprecated()
@mock_sns_deprecated()
def test_sns_topic():
dummy_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MySNSTopic": {
"Type": "AWS::SNS::Topic",
"Properties": {
"Subscription": [
{"Endpoint": "https://example.com", "Protocol": "https"}
],
"TopicName": "my_topics",
},
}
},
"Outputs": {
"topic_name": {"Value": {"Fn::GetAtt": ["MySNSTopic", "TopicName"]}},
"topic_arn": {"Value": {"Ref": "MySNSTopic"}},
},
}
template_json = json.dumps(dummy_template)
conn = boto.cloudformation.connect_to_region("us-west-1")
stack = conn.create_stack("test_stack", template_body=template_json)
sns_conn = boto.sns.connect_to_region("us-west-1")
topics = sns_conn.get_all_topics()["ListTopicsResponse"]["ListTopicsResult"][
"Topics"
]
topics.should.have.length_of(1)
topic_arn = topics[0]["TopicArn"]
topic_arn.should.contain("my_topics")
subscriptions = sns_conn.get_all_subscriptions()["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["Subscriptions"]
subscriptions.should.have.length_of(1)
subscription = subscriptions[0]
subscription["TopicArn"].should.equal(topic_arn)
subscription["Protocol"].should.equal("https")
subscription["SubscriptionArn"].should.contain(topic_arn)
subscription["Endpoint"].should.equal("https://example.com")
stack = conn.describe_stacks()[0]
topic_name_output = [x for x in stack.outputs if x.key == "topic_name"][0]
topic_name_output.value.should.equal("my_topics")
topic_arn_output = [x for x in stack.outputs if x.key == "topic_arn"][0]
topic_arn_output.value.should.equal(topic_arn)
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_vpc_gateway_attachment_creation_should_attach_itself_to_vpc():
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"internetgateway": {"Type": "AWS::EC2::InternetGateway"},
"testvpc": {
"Type": "AWS::EC2::VPC",
"Properties": {
"CidrBlock": "10.0.0.0/16",
"EnableDnsHostnames": "true",
"EnableDnsSupport": "true",
"InstanceTenancy": "default",
},
},
"vpcgatewayattachment": {
"Type": "AWS::EC2::VPCGatewayAttachment",
"Properties": {
"InternetGatewayId": {"Ref": "internetgateway"},
"VpcId": {"Ref": "testvpc"},
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc = vpc_conn.get_all_vpcs(filters={"cidrBlock": "10.0.0.0/16"})[0]
igws = vpc_conn.get_all_internet_gateways(filters={"attachment.vpc-id": vpc.id})
igws.should.have.length_of(1)
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_vpc_peering_creation():
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc_source = vpc_conn.create_vpc("10.0.0.0/16")
peer_vpc = vpc_conn.create_vpc("10.1.0.0/16")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"vpcpeeringconnection": {
"Type": "AWS::EC2::VPCPeeringConnection",
"Properties": {"PeerVpcId": peer_vpc.id, "VpcId": vpc_source.id},
}
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
peering_connections = vpc_conn.get_all_vpc_peering_connections()
peering_connections.should.have.length_of(1)
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_multiple_security_group_ingress_separate_from_security_group_by_id():
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"test-security-group1": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"Tags": [{"Key": "sg-name", "Value": "sg1"}],
},
},
"test-security-group2": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"Tags": [{"Key": "sg-name", "Value": "sg2"}],
},
},
"test-sg-ingress": {
"Type": "AWS::EC2::SecurityGroupIngress",
"Properties": {
"GroupId": {"Ref": "test-security-group1"},
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "8080",
"SourceSecurityGroupId": {"Ref": "test-security-group2"},
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
ec2_conn = boto.ec2.connect_to_region("us-west-1")
security_group1 = ec2_conn.get_all_security_groups(filters={"tag:sg-name": "sg1"})[
0
]
security_group2 = ec2_conn.get_all_security_groups(filters={"tag:sg-name": "sg2"})[
0
]
security_group1.rules.should.have.length_of(1)
security_group1.rules[0].grants.should.have.length_of(1)
security_group1.rules[0].grants[0].group_id.should.equal(security_group2.id)
security_group1.rules[0].ip_protocol.should.equal("tcp")
security_group1.rules[0].from_port.should.equal("80")
security_group1.rules[0].to_port.should.equal("8080")
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_security_group_ingress_separate_from_security_group_by_id():
ec2_conn = boto.ec2.connect_to_region("us-west-1")
ec2_conn.create_security_group("test-security-group1", "test security group")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"test-security-group2": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"Tags": [{"Key": "sg-name", "Value": "sg2"}],
},
},
"test-sg-ingress": {
"Type": "AWS::EC2::SecurityGroupIngress",
"Properties": {
"GroupName": "test-security-group1",
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "8080",
"SourceSecurityGroupId": {"Ref": "test-security-group2"},
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
security_group1 = ec2_conn.get_all_security_groups(
groupnames=["test-security-group1"]
)[0]
security_group2 = ec2_conn.get_all_security_groups(filters={"tag:sg-name": "sg2"})[
0
]
security_group1.rules.should.have.length_of(1)
security_group1.rules[0].grants.should.have.length_of(1)
security_group1.rules[0].grants[0].group_id.should.equal(security_group2.id)
security_group1.rules[0].ip_protocol.should.equal("tcp")
security_group1.rules[0].from_port.should.equal("80")
security_group1.rules[0].to_port.should.equal("8080")
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_security_group_ingress_separate_from_security_group_by_id_using_vpc():
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc = vpc_conn.create_vpc("10.0.0.0/16")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"test-security-group1": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"VpcId": vpc.id,
"Tags": [{"Key": "sg-name", "Value": "sg1"}],
},
},
"test-security-group2": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"VpcId": vpc.id,
"Tags": [{"Key": "sg-name", "Value": "sg2"}],
},
},
"test-sg-ingress": {
"Type": "AWS::EC2::SecurityGroupIngress",
"Properties": {
"GroupId": {"Ref": "test-security-group1"},
"VpcId": vpc.id,
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "8080",
"SourceSecurityGroupId": {"Ref": "test-security-group2"},
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
security_group1 = vpc_conn.get_all_security_groups(filters={"tag:sg-name": "sg1"})[
0
]
security_group2 = vpc_conn.get_all_security_groups(filters={"tag:sg-name": "sg2"})[
0
]
security_group1.rules.should.have.length_of(1)
security_group1.rules[0].grants.should.have.length_of(1)
security_group1.rules[0].grants[0].group_id.should.equal(security_group2.id)
security_group1.rules[0].ip_protocol.should.equal("tcp")
security_group1.rules[0].from_port.should.equal("80")
security_group1.rules[0].to_port.should.equal("8080")
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_security_group_with_update():
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc1 = vpc_conn.create_vpc("10.0.0.0/16")
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"test-security-group": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "test security group",
"VpcId": vpc1.id,
"Tags": [{"Key": "sg-name", "Value": "sg"}],
},
}
},
}
template_json = json.dumps(template)
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
cf_conn.create_stack("test_stack", template_body=template_json)
security_group = vpc_conn.get_all_security_groups(filters={"tag:sg-name": "sg"})[0]
security_group.vpc_id.should.equal(vpc1.id)
vpc2 = vpc_conn.create_vpc("10.1.0.0/16")
template["Resources"]["test-security-group"]["Properties"]["VpcId"] = vpc2.id
template_json = json.dumps(template)
cf_conn.update_stack("test_stack", template_body=template_json)
security_group = vpc_conn.get_all_security_groups(filters={"tag:sg-name": "sg"})[0]
security_group.vpc_id.should.equal(vpc2.id)
@mock_cloudformation_deprecated
@mock_ec2_deprecated
def test_subnets_should_be_created_with_availability_zone():
vpc_conn = boto.vpc.connect_to_region("us-west-1")
vpc = vpc_conn.create_vpc("10.0.0.0/16")
subnet_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"testSubnet": {
"Type": "AWS::EC2::Subnet",
"Properties": {
"VpcId": vpc.id,
"CidrBlock": "10.0.0.0/24",
"AvailabilityZone": "us-west-1b",
},
}
},
}
cf_conn = boto.cloudformation.connect_to_region("us-west-1")
template_json = json.dumps(subnet_template)
cf_conn.create_stack("test_stack", template_body=template_json)
subnet = vpc_conn.get_all_subnets(filters={"cidrBlock": "10.0.0.0/24"})[0]
subnet.availability_zone.should.equal("us-west-1b")
@mock_cloudformation_deprecated
@mock_datapipeline_deprecated
def test_datapipeline():
dp_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"dataPipeline": {
"Properties": {
"Activate": "true",
"Name": "testDataPipeline",
"PipelineObjects": [
{
"Fields": [
{
"Key": "failureAndRerunMode",
"StringValue": "CASCADE",
},
{"Key": "scheduleType", "StringValue": "cron"},
{"Key": "schedule", "RefValue": "DefaultSchedule"},
{
"Key": "pipelineLogUri",
"StringValue": "s3://bucket/logs",
},
{"Key": "type", "StringValue": "Default"},
],
"Id": "Default",
"Name": "Default",
},
{
"Fields": [
{
"Key": "startDateTime",
"StringValue": "1970-01-01T01:00:00",
},
{"Key": "period", "StringValue": "1 Day"},
{"Key": "type", "StringValue": "Schedule"},
],
"Id": "DefaultSchedule",
"Name": "RunOnce",
},
],
"PipelineTags": [],
},
"Type": "AWS::DataPipeline::Pipeline",
}
},
}
cf_conn = boto.cloudformation.connect_to_region("us-east-1")
template_json = json.dumps(dp_template)
stack_id = cf_conn.create_stack("test_stack", template_body=template_json)
dp_conn = boto.datapipeline.connect_to_region("us-east-1")
data_pipelines = dp_conn.list_pipelines()
data_pipelines["pipelineIdList"].should.have.length_of(1)
data_pipelines["pipelineIdList"][0]["name"].should.equal("testDataPipeline")
stack_resources = cf_conn.list_stack_resources(stack_id)
stack_resources.should.have.length_of(1)
stack_resources[0].physical_resource_id.should.equal(
data_pipelines["pipelineIdList"][0]["id"]
)
@mock_cloudformation
@mock_lambda
def test_lambda_function():
# switch this to python as backend lambda only supports python execution.
lambda_code = """
def lambda_handler(event, context):
return (event, context)
"""
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"lambdaTest": {
"Type": "AWS::Lambda::Function",
"Properties": {
"Code": {
# CloudFormation expects a string as ZipFile, not a ZIP file base64-encoded
"ZipFile": {"Fn::Join": ["\n", lambda_code.splitlines()]}
},
"Handler": "lambda_function.handler",
"Description": "Test function",
"MemorySize": 128,
"Role": {"Fn::GetAtt": ["MyRole", "Arn"]},
"Runtime": "python2.7",
"Environment": {"Variables": {"TEST_ENV_KEY": "test-env-val"}},
"ReservedConcurrentExecutions": 10,
},
},
"MyRole": {
"Type": "AWS::IAM::Role",
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": ["sts:AssumeRole"],
"Effect": "Allow",
"Principal": {"Service": ["ec2.amazonaws.com"]},
}
]
}
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto3.client("cloudformation", "us-east-1")
cf_conn.create_stack(StackName="test_stack", TemplateBody=template_json)
conn = boto3.client("lambda", "us-east-1")
result = conn.list_functions()
result["Functions"].should.have.length_of(1)
result["Functions"][0]["Description"].should.equal("Test function")
result["Functions"][0]["Handler"].should.equal("lambda_function.handler")
result["Functions"][0]["MemorySize"].should.equal(128)
result["Functions"][0]["Runtime"].should.equal("python2.7")
result["Functions"][0]["Environment"].should.equal(
{"Variables": {"TEST_ENV_KEY": "test-env-val"}}
)
function_name = result["Functions"][0]["FunctionName"]
result = conn.get_function(FunctionName=function_name)
result["Concurrency"]["ReservedConcurrentExecutions"].should.equal(10)
def _make_zipfile(func_str):
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
zip_file.writestr("lambda_function.py", func_str)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
@mock_cloudformation
@mock_s3
@mock_lambda
def test_lambda_layer():
# switch this to python as backend lambda only supports python execution.
layer_code = """
def lambda_handler(event, context):
return (event, context)
"""
region = "us-east-1"
bucket_name = "test_bucket"
s3_conn = boto3.client("s3", region)
s3_conn.create_bucket(Bucket=bucket_name)
zip_content = _make_zipfile(layer_code)
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"lambdaTest": {
"Type": "AWS::Lambda::LayerVersion",
"Properties": {
"Content": {"S3Bucket": bucket_name, "S3Key": "test.zip",},
"LayerName": "testLayer",
"Description": "Test Layer",
"CompatibleRuntimes": ["python2.7", "python3.6"],
"LicenseInfo": "MIT",
},
},
},
}
template_json = json.dumps(template)
cf_conn = boto3.client("cloudformation", region)
cf_conn.create_stack(StackName="test_stack", TemplateBody=template_json)
lambda_conn = boto3.client("lambda", region)
result = lambda_conn.list_layers()
layer_name = result["Layers"][0]["LayerName"]
result = lambda_conn.list_layer_versions(LayerName=layer_name)
result["LayerVersions"][0].pop("CreatedDate")
result["LayerVersions"].should.equal(
[
{
"Version": 1,
"LayerVersionArn": "arn:aws:lambda:{}:{}:layer:{}:1".format(
region, ACCOUNT_ID, layer_name
),
"CompatibleRuntimes": ["python2.7", "python3.6"],
"Description": "Test Layer",
"LicenseInfo": "MIT",
}
]
)
@mock_cloudformation
@mock_ec2
def test_nat_gateway():
ec2_conn = boto3.client("ec2", "us-east-1")
vpc_id = ec2_conn.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]["VpcId"]
subnet_id = ec2_conn.create_subnet(CidrBlock="10.0.1.0/24", VpcId=vpc_id)["Subnet"][
"SubnetId"
]
route_table_id = ec2_conn.create_route_table(VpcId=vpc_id)["RouteTable"][
"RouteTableId"
]
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"NAT": {
"DependsOn": "vpcgatewayattachment",
"Type": "AWS::EC2::NatGateway",
"Properties": {
"AllocationId": {"Fn::GetAtt": ["EIP", "AllocationId"]},
"SubnetId": subnet_id,
},
},
"EIP": {"Type": "AWS::EC2::EIP", "Properties": {"Domain": "vpc"}},
"Route": {
"Type": "AWS::EC2::Route",
"Properties": {
"RouteTableId": route_table_id,
"DestinationCidrBlock": "0.0.0.0/0",
"NatGatewayId": {"Ref": "NAT"},
},
},
"internetgateway": {"Type": "AWS::EC2::InternetGateway"},
"vpcgatewayattachment": {
"Type": "AWS::EC2::VPCGatewayAttachment",
"Properties": {
"InternetGatewayId": {"Ref": "internetgateway"},
"VpcId": vpc_id,
},
},
},
}
cf_conn = boto3.client("cloudformation", "us-east-1")
cf_conn.create_stack(StackName="test_stack", TemplateBody=json.dumps(template))
stack_resources = cf_conn.list_stack_resources(StackName="test_stack")
nat_gateway_resource = stack_resources.get("StackResourceSummaries")[0]
for resource in stack_resources["StackResourceSummaries"]:
if resource["ResourceType"] == "AWS::EC2::NatGateway":
nat_gateway_resource = resource
elif resource["ResourceType"] == "AWS::EC2::Route":
route_resource = resource
result = ec2_conn.describe_nat_gateways()
result["NatGateways"].should.have.length_of(1)
result["NatGateways"][0]["VpcId"].should.equal(vpc_id)
result["NatGateways"][0]["SubnetId"].should.equal(subnet_id)
result["NatGateways"][0]["State"].should.equal("available")
result["NatGateways"][0]["NatGatewayId"].should.equal(
nat_gateway_resource.get("PhysicalResourceId")
)
route_resource.get("PhysicalResourceId").should.contain("rtb-")
@mock_cloudformation()
@mock_kms()
def test_stack_kms():
kms_key_template = {
"Resources": {
"kmskey": {
"Properties": {
"Description": "A kms key",
"EnableKeyRotation": True,
"Enabled": True,
"KeyPolicy": "a policy",
},
"Type": "AWS::KMS::Key",
}
}
}
kms_key_template_json = json.dumps(kms_key_template)
cf_conn = boto3.client("cloudformation", "us-east-1")
cf_conn.create_stack(StackName="test_stack", TemplateBody=kms_key_template_json)
kms_conn = boto3.client("kms", "us-east-1")
keys = kms_conn.list_keys()["Keys"]
len(keys).should.equal(1)
result = kms_conn.describe_key(KeyId=keys[0]["KeyId"])
result["KeyMetadata"]["Enabled"].should.equal(True)
result["KeyMetadata"]["KeyUsage"].should.equal("ENCRYPT_DECRYPT")
@mock_cloudformation()
@mock_ec2()
def test_stack_spot_fleet():
conn = boto3.client("ec2", "us-east-1")
vpc = conn.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]
subnet = conn.create_subnet(
VpcId=vpc["VpcId"], CidrBlock="10.0.0.0/16", AvailabilityZone="us-east-1a"
)["Subnet"]
subnet_id = subnet["SubnetId"]
spot_fleet_template = {
"Resources": {
"SpotFleet": {
"Type": "AWS::EC2::SpotFleet",
"Properties": {
"SpotFleetRequestConfigData": {
"IamFleetRole": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID),
"SpotPrice": "0.12",
"TargetCapacity": 6,
"AllocationStrategy": "diversified",
"LaunchSpecifications": [
{
"EbsOptimized": "false",
"InstanceType": "t2.small",
"ImageId": EXAMPLE_AMI_ID,
"SubnetId": subnet_id,
"WeightedCapacity": "2",
"SpotPrice": "0.13",
},
{
"EbsOptimized": "true",
"InstanceType": "t2.large",
"ImageId": EXAMPLE_AMI_ID,
"Monitoring": {"Enabled": "true"},
"SecurityGroups": [{"GroupId": "sg-123"}],
"SubnetId": subnet_id,
"IamInstanceProfile": {
"Arn": "arn:aws:iam::{}:role/fleet".format(
ACCOUNT_ID
)
},
"WeightedCapacity": "4",
"SpotPrice": "10.00",
},
],
}
},
}
}
}
spot_fleet_template_json = json.dumps(spot_fleet_template)
cf_conn = boto3.client("cloudformation", "us-east-1")
stack_id = cf_conn.create_stack(
StackName="test_stack", TemplateBody=spot_fleet_template_json
)["StackId"]
stack_resources = cf_conn.list_stack_resources(StackName=stack_id)
stack_resources["StackResourceSummaries"].should.have.length_of(1)
spot_fleet_id = stack_resources["StackResourceSummaries"][0]["PhysicalResourceId"]
spot_fleet_requests = conn.describe_spot_fleet_requests(
SpotFleetRequestIds=[spot_fleet_id]
)["SpotFleetRequestConfigs"]
len(spot_fleet_requests).should.equal(1)
spot_fleet_request = spot_fleet_requests[0]
spot_fleet_request["SpotFleetRequestState"].should.equal("active")
spot_fleet_config = spot_fleet_request["SpotFleetRequestConfig"]
spot_fleet_config["SpotPrice"].should.equal("0.12")
spot_fleet_config["TargetCapacity"].should.equal(6)
spot_fleet_config["IamFleetRole"].should.equal(
"arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID)
)
spot_fleet_config["AllocationStrategy"].should.equal("diversified")
spot_fleet_config["FulfilledCapacity"].should.equal(6.0)
len(spot_fleet_config["LaunchSpecifications"]).should.equal(2)
launch_spec = spot_fleet_config["LaunchSpecifications"][0]
launch_spec["EbsOptimized"].should.equal(False)
launch_spec["ImageId"].should.equal(EXAMPLE_AMI_ID)
launch_spec["InstanceType"].should.equal("t2.small")
launch_spec["SubnetId"].should.equal(subnet_id)
launch_spec["SpotPrice"].should.equal("0.13")
launch_spec["WeightedCapacity"].should.equal(2.0)
@mock_cloudformation()
@mock_ec2()
def test_stack_spot_fleet_should_figure_out_default_price():
conn = boto3.client("ec2", "us-east-1")
vpc = conn.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]
subnet = conn.create_subnet(
VpcId=vpc["VpcId"], CidrBlock="10.0.0.0/16", AvailabilityZone="us-east-1a"
)["Subnet"]
subnet_id = subnet["SubnetId"]
spot_fleet_template = {
"Resources": {
"SpotFleet1": {
"Type": "AWS::EC2::SpotFleet",
"Properties": {
"SpotFleetRequestConfigData": {
"IamFleetRole": "arn:aws:iam::{}:role/fleet".format(ACCOUNT_ID),
"TargetCapacity": 6,
"AllocationStrategy": "diversified",
"LaunchSpecifications": [
{
"EbsOptimized": "false",
"InstanceType": "t2.small",
"ImageId": EXAMPLE_AMI_ID,
"SubnetId": subnet_id,
"WeightedCapacity": "2",
},
{
"EbsOptimized": "true",
"InstanceType": "t2.large",
"ImageId": EXAMPLE_AMI_ID,
"Monitoring": {"Enabled": "true"},
"SecurityGroups": [{"GroupId": "sg-123"}],
"SubnetId": subnet_id,
"IamInstanceProfile": {
"Arn": "arn:aws:iam::{}:role/fleet".format(
ACCOUNT_ID
)
},
"WeightedCapacity": "4",
},
],
}
},
}
}
}
spot_fleet_template_json = json.dumps(spot_fleet_template)
cf_conn = boto3.client("cloudformation", "us-east-1")
stack_id = cf_conn.create_stack(
StackName="test_stack", TemplateBody=spot_fleet_template_json
)["StackId"]
stack_resources = cf_conn.list_stack_resources(StackName=stack_id)
stack_resources["StackResourceSummaries"].should.have.length_of(1)
spot_fleet_id = stack_resources["StackResourceSummaries"][0]["PhysicalResourceId"]
spot_fleet_requests = conn.describe_spot_fleet_requests(
SpotFleetRequestIds=[spot_fleet_id]
)["SpotFleetRequestConfigs"]
len(spot_fleet_requests).should.equal(1)
spot_fleet_request = spot_fleet_requests[0]
spot_fleet_request["SpotFleetRequestState"].should.equal("active")
spot_fleet_config = spot_fleet_request["SpotFleetRequestConfig"]
assert "SpotPrice" not in spot_fleet_config
len(spot_fleet_config["LaunchSpecifications"]).should.equal(2)
launch_spec1 = spot_fleet_config["LaunchSpecifications"][0]
launch_spec2 = spot_fleet_config["LaunchSpecifications"][1]
assert "SpotPrice" not in launch_spec1
assert "SpotPrice" not in launch_spec2
@mock_ec2
@mock_elbv2
@mock_cloudformation
def test_stack_elbv2_resources_integration():
alb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Outputs": {
"albdns": {
"Description": "Load balanacer DNS",
"Value": {"Fn::GetAtt": ["alb", "DNSName"]},
},
"albname": {
"Description": "Load balancer name",
"Value": {"Fn::GetAtt": ["alb", "LoadBalancerName"]},
},
"canonicalhostedzoneid": {
"Description": "Load balancer canonical hosted zone ID",
"Value": {"Fn::GetAtt": ["alb", "CanonicalHostedZoneID"]},
},
},
"Resources": {
"alb": {
"Type": "AWS::ElasticLoadBalancingV2::LoadBalancer",
"Properties": {
"Name": "myelbv2",
"Scheme": "internet-facing",
"Subnets": [{"Ref": "mysubnet"}],
"SecurityGroups": [{"Ref": "mysg"}],
"Type": "application",
"IpAddressType": "ipv4",
},
},
"mytargetgroup1": {
"Type": "AWS::ElasticLoadBalancingV2::TargetGroup",
"Properties": {
"HealthCheckIntervalSeconds": 30,
"HealthCheckPath": "/status",
"HealthCheckPort": 80,
"HealthCheckProtocol": "HTTP",
"HealthCheckTimeoutSeconds": 5,
"HealthyThresholdCount": 30,
"UnhealthyThresholdCount": 5,
"Matcher": {"HttpCode": "200,201"},
"Name": "mytargetgroup1",
"Port": 80,
"Protocol": "HTTP",
"TargetType": "instance",
"Targets": [{"Id": {"Ref": "ec2instance", "Port": 80}}],
"VpcId": {"Ref": "myvpc"},
},
},
"mytargetgroup2": {
"Type": "AWS::ElasticLoadBalancingV2::TargetGroup",
"Properties": {
"HealthCheckIntervalSeconds": 30,
"HealthCheckPath": "/status",
"HealthCheckPort": 8080,
"HealthCheckProtocol": "HTTP",
"HealthCheckTimeoutSeconds": 5,
"HealthyThresholdCount": 30,
"UnhealthyThresholdCount": 5,
"Name": "mytargetgroup2",
"Port": 8080,
"Protocol": "HTTP",
"TargetType": "instance",
"Targets": [{"Id": {"Ref": "ec2instance", "Port": 8080}}],
"VpcId": {"Ref": "myvpc"},
},
},
"listener": {
"Type": "AWS::ElasticLoadBalancingV2::Listener",
"Properties": {
"DefaultActions": [
{"Type": "forward", "TargetGroupArn": {"Ref": "mytargetgroup1"}}
],
"LoadBalancerArn": {"Ref": "alb"},
"Port": "80",
"Protocol": "HTTP",
},
},
"myvpc": {
"Type": "AWS::EC2::VPC",
"Properties": {"CidrBlock": "10.0.0.0/16"},
},
"mysubnet": {
"Type": "AWS::EC2::Subnet",
"Properties": {"CidrBlock": "10.0.0.0/27", "VpcId": {"Ref": "myvpc"}},
},
"mysg": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupName": "mysg",
"GroupDescription": "test security group",
"VpcId": {"Ref": "myvpc"},
},
},
"ec2instance": {
"Type": "AWS::EC2::Instance",
"Properties": {"ImageId": EXAMPLE_AMI_ID, "UserData": "some user data"},
},
},
}
alb_template_json = json.dumps(alb_template)
cfn_conn = boto3.client("cloudformation", "us-west-1")
cfn_conn.create_stack(StackName="elb_stack", TemplateBody=alb_template_json)
elbv2_conn = boto3.client("elbv2", "us-west-1")
load_balancers = elbv2_conn.describe_load_balancers()["LoadBalancers"]
len(load_balancers).should.equal(1)
load_balancers[0]["LoadBalancerName"].should.equal("myelbv2")
load_balancers[0]["Scheme"].should.equal("internet-facing")
load_balancers[0]["Type"].should.equal("application")
load_balancers[0]["IpAddressType"].should.equal("ipv4")
target_groups = sorted(
elbv2_conn.describe_target_groups()["TargetGroups"],
key=lambda tg: tg["TargetGroupName"],
) # sort to do comparison with indexes
len(target_groups).should.equal(2)
target_groups[0]["HealthCheckIntervalSeconds"].should.equal(30)
target_groups[0]["HealthCheckPath"].should.equal("/status")
target_groups[0]["HealthCheckPort"].should.equal("80")
target_groups[0]["HealthCheckProtocol"].should.equal("HTTP")
target_groups[0]["HealthCheckTimeoutSeconds"].should.equal(5)
target_groups[0]["HealthyThresholdCount"].should.equal(30)
target_groups[0]["UnhealthyThresholdCount"].should.equal(5)
target_groups[0]["Matcher"].should.equal({"HttpCode": "200,201"})
target_groups[0]["TargetGroupName"].should.equal("mytargetgroup1")
target_groups[0]["Port"].should.equal(80)
target_groups[0]["Protocol"].should.equal("HTTP")
target_groups[0]["TargetType"].should.equal("instance")
target_groups[1]["HealthCheckIntervalSeconds"].should.equal(30)
target_groups[1]["HealthCheckPath"].should.equal("/status")
target_groups[1]["HealthCheckPort"].should.equal("8080")
target_groups[1]["HealthCheckProtocol"].should.equal("HTTP")
target_groups[1]["HealthCheckTimeoutSeconds"].should.equal(5)
target_groups[1]["HealthyThresholdCount"].should.equal(30)
target_groups[1]["UnhealthyThresholdCount"].should.equal(5)
target_groups[1]["Matcher"].should.equal({"HttpCode": "200"})
target_groups[1]["TargetGroupName"].should.equal("mytargetgroup2")
target_groups[1]["Port"].should.equal(8080)
target_groups[1]["Protocol"].should.equal("HTTP")
target_groups[1]["TargetType"].should.equal("instance")
listeners = elbv2_conn.describe_listeners(
LoadBalancerArn=load_balancers[0]["LoadBalancerArn"]
)["Listeners"]
len(listeners).should.equal(1)
listeners[0]["LoadBalancerArn"].should.equal(load_balancers[0]["LoadBalancerArn"])
listeners[0]["Port"].should.equal(80)
listeners[0]["Protocol"].should.equal("HTTP")
listeners[0]["DefaultActions"].should.equal(
[{"Type": "forward", "TargetGroupArn": target_groups[0]["TargetGroupArn"]}]
)
# test outputs
stacks = cfn_conn.describe_stacks(StackName="elb_stack")["Stacks"]
len(stacks).should.equal(1)
dns = list(
filter(lambda item: item["OutputKey"] == "albdns", stacks[0]["Outputs"])
)[0]
name = list(
filter(lambda item: item["OutputKey"] == "albname", stacks[0]["Outputs"])
)[0]
dns["OutputValue"].should.equal(load_balancers[0]["DNSName"])
name["OutputValue"].should.equal(load_balancers[0]["LoadBalancerName"])
@mock_dynamodb2
@mock_cloudformation
def test_stack_dynamodb_resources_integration():
dynamodb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"myDynamoDBTable": {
"Type": "AWS::DynamoDB::Table",
"Properties": {
"AttributeDefinitions": [
{"AttributeName": "Album", "AttributeType": "S"},
{"AttributeName": "Artist", "AttributeType": "S"},
{"AttributeName": "Sales", "AttributeType": "N"},
{"AttributeName": "NumberOfSongs", "AttributeType": "N"},
],
"KeySchema": [
{"AttributeName": "Album", "KeyType": "HASH"},
{"AttributeName": "Artist", "KeyType": "RANGE"},
],
"ProvisionedThroughput": {
"ReadCapacityUnits": "5",
"WriteCapacityUnits": "5",
},
"TableName": "myTableName",
"GlobalSecondaryIndexes": [
{
"IndexName": "myGSI",
"KeySchema": [
{"AttributeName": "Sales", "KeyType": "HASH"},
{"AttributeName": "Artist", "KeyType": "RANGE"},
],
"Projection": {
"NonKeyAttributes": ["Album", "NumberOfSongs"],
"ProjectionType": "INCLUDE",
},
"ProvisionedThroughput": {
"ReadCapacityUnits": "5",
"WriteCapacityUnits": "5",
},
},
{
"IndexName": "myGSI2",
"KeySchema": [
{"AttributeName": "NumberOfSongs", "KeyType": "HASH"},
{"AttributeName": "Sales", "KeyType": "RANGE"},
],
"Projection": {
"NonKeyAttributes": ["Album", "Artist"],
"ProjectionType": "INCLUDE",
},
"ProvisionedThroughput": {
"ReadCapacityUnits": "5",
"WriteCapacityUnits": "5",
},
},
],
"LocalSecondaryIndexes": [
{
"IndexName": "myLSI",
"KeySchema": [
{"AttributeName": "Album", "KeyType": "HASH"},
{"AttributeName": "Sales", "KeyType": "RANGE"},
],
"Projection": {
"NonKeyAttributes": ["Artist", "NumberOfSongs"],
"ProjectionType": "INCLUDE",
},
}
],
"StreamSpecification": {"StreamViewType": "KEYS_ONLY"},
},
}
},
}
dynamodb_template_json = json.dumps(dynamodb_template)
cfn_conn = boto3.client("cloudformation", "us-east-1")
cfn_conn.create_stack(
StackName="dynamodb_stack", TemplateBody=dynamodb_template_json
)
dynamodb_client = boto3.client("dynamodb", region_name="us-east-1")
table_desc = dynamodb_client.describe_table(TableName="myTableName")["Table"]
table_desc["StreamSpecification"].should.equal(
{"StreamEnabled": True, "StreamViewType": "KEYS_ONLY",}
)
dynamodb_conn = boto3.resource("dynamodb", region_name="us-east-1")
table = dynamodb_conn.Table("myTableName")
table.name.should.equal("myTableName")
table.put_item(
Item={"Album": "myAlbum", "Artist": "myArtist", "Sales": 10, "NumberOfSongs": 5}
)
response = table.get_item(Key={"Album": "myAlbum", "Artist": "myArtist"})
response["Item"]["Album"].should.equal("myAlbum")
response["Item"]["Sales"].should.equal(Decimal("10"))
response["Item"]["NumberOfSongs"].should.equal(Decimal("5"))
response["Item"]["Album"].should.equal("myAlbum")
@mock_cloudformation
@mock_logs
@mock_s3
def test_create_log_group_using_fntransform():
s3_resource = boto3.resource("s3")
s3_resource.create_bucket(
Bucket="owi-common-cf",
CreateBucketConfiguration={"LocationConstraint": "us-west-2"},
)
s3_resource.Object("owi-common-cf", "snippets/test.json").put(
Body=json.dumps({"lgname": {"name": "some-log-group"}})
)
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Mappings": {
"EnvironmentMapping": {
"Fn::Transform": {
"Name": "AWS::Include",
"Parameters": {"Location": "s3://owi-common-cf/snippets/test.json"},
}
}
},
"Resources": {
"LogGroup": {
"Properties": {
"LogGroupName": {
"Fn::FindInMap": ["EnvironmentMapping", "lgname", "name"]
},
"RetentionInDays": 90,
},
"Type": "AWS::Logs::LogGroup",
}
},
}
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(StackName="test_stack", TemplateBody=json.dumps(template))
logs_conn = boto3.client("logs", region_name="us-west-2")
log_group = logs_conn.describe_log_groups()["logGroups"][0]
log_group["logGroupName"].should.equal("some-log-group")
log_group["retentionInDays"].should.be.equal(90)
@mock_cloudformation
@mock_events
def test_stack_events_create_rule_integration():
events_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"Event": {
"Type": "AWS::Events::Rule",
"Properties": {
"Name": "quick-fox",
"State": "ENABLED",
"ScheduleExpression": "rate(5 minutes)",
},
}
},
}
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(
StackName="test_stack", TemplateBody=json.dumps(events_template)
)
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"].should.have.length_of(1)
rules["Rules"][0]["Name"].should.equal("quick-fox")
rules["Rules"][0]["State"].should.equal("ENABLED")
rules["Rules"][0]["ScheduleExpression"].should.equal("rate(5 minutes)")
@mock_cloudformation
@mock_events
def test_stack_events_delete_rule_integration():
events_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"Event": {
"Type": "AWS::Events::Rule",
"Properties": {
"Name": "quick-fox",
"State": "ENABLED",
"ScheduleExpression": "rate(5 minutes)",
},
}
},
}
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(
StackName="test_stack", TemplateBody=json.dumps(events_template)
)
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"].should.have.length_of(1)
cf_conn.delete_stack(StackName="test_stack")
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"].should.have.length_of(0)
@mock_cloudformation
@mock_events
def test_stack_events_create_rule_without_name_integration():
events_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"Event": {
"Type": "AWS::Events::Rule",
"Properties": {
"State": "ENABLED",
"ScheduleExpression": "rate(5 minutes)",
},
}
},
}
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(
StackName="test_stack", TemplateBody=json.dumps(events_template)
)
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"][0]["Name"].should.contain("test_stack-Event-")
@mock_cloudformation
@mock_events
@mock_logs
def test_stack_events_create_rule_as_target():
events_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"SecurityGroup": {
"Type": "AWS::Logs::LogGroup",
"Properties": {
"LogGroupName": {"Fn::GetAtt": ["Event", "Arn"]},
"RetentionInDays": 3,
},
},
"Event": {
"Type": "AWS::Events::Rule",
"Properties": {
"State": "ENABLED",
"ScheduleExpression": "rate(5 minutes)",
},
},
},
}
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(
StackName="test_stack", TemplateBody=json.dumps(events_template)
)
rules = boto3.client("events", "us-west-2").list_rules()
log_groups = boto3.client("logs", "us-west-2").describe_log_groups()
rules["Rules"][0]["Name"].should.contain("test_stack-Event-")
log_groups["logGroups"][0]["logGroupName"].should.equal(rules["Rules"][0]["Arn"])
log_groups["logGroups"][0]["retentionInDays"].should.equal(3)
@mock_cloudformation
@mock_events
def test_stack_events_update_rule_integration():
events_template = Template(
"""{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"Event": {
"Type": "AWS::Events::Rule",
"Properties": {
"Name": "$Name",
"State": "$State",
"ScheduleExpression": "rate(5 minutes)",
},
}
},
} """
)
cf_conn = boto3.client("cloudformation", "us-west-2")
original_template = events_template.substitute(Name="Foo", State="ENABLED")
cf_conn.create_stack(StackName="test_stack", TemplateBody=original_template)
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"].should.have.length_of(1)
rules["Rules"][0]["Name"].should.equal("Foo")
rules["Rules"][0]["State"].should.equal("ENABLED")
update_template = events_template.substitute(Name="Bar", State="DISABLED")
cf_conn.update_stack(StackName="test_stack", TemplateBody=update_template)
rules = boto3.client("events", "us-west-2").list_rules()
rules["Rules"].should.have.length_of(1)
rules["Rules"][0]["Name"].should.equal("Bar")
rules["Rules"][0]["State"].should.equal("DISABLED")
@mock_cloudformation
@mock_autoscaling
def test_autoscaling_propagate_tags():
autoscaling_group_with_tags = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"AutoScalingGroup": {
"Type": "AWS::AutoScaling::AutoScalingGroup",
"Properties": {
"AutoScalingGroupName": "test-scaling-group",
"DesiredCapacity": 1,
"MinSize": 1,
"MaxSize": 50,
"LaunchConfigurationName": "test-launch-config",
"AvailabilityZones": ["us-east-1a"],
"Tags": [
{
"Key": "test-key-propagate",
"Value": "test",
"PropagateAtLaunch": True,
},
{
"Key": "test-key-no-propagate",
"Value": "test",
"PropagateAtLaunch": False,
},
],
},
"DependsOn": "LaunchConfig",
},
"LaunchConfig": {
"Type": "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"LaunchConfigurationName": "test-launch-config",
"ImageId": EXAMPLE_AMI_ID,
"InstanceType": "t2.medium",
},
},
},
}
boto3.client("cloudformation", "us-east-1").create_stack(
StackName="propagate_tags_test",
TemplateBody=json.dumps(autoscaling_group_with_tags),
)
autoscaling = boto3.client("autoscaling", "us-east-1")
autoscaling_group_tags = autoscaling.describe_auto_scaling_groups()[
"AutoScalingGroups"
][0]["Tags"]
propagation_dict = {
tag["Key"]: tag["PropagateAtLaunch"] for tag in autoscaling_group_tags
}
assert propagation_dict["test-key-propagate"]
assert not propagation_dict["test-key-no-propagate"]
@mock_cloudformation
@mock_events
def test_stack_eventbus_create_from_cfn_integration():
eventbus_template = """{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"EventBus": {
"Type": "AWS::Events::EventBus",
"Properties": {
"Name": "MyCustomEventBus"
},
}
},
}"""
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(StackName="test_stack", TemplateBody=eventbus_template)
event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="MyCustom"
)
event_buses["EventBuses"].should.have.length_of(1)
event_buses["EventBuses"][0]["Name"].should.equal("MyCustomEventBus")
@mock_cloudformation
@mock_events
def test_stack_events_delete_eventbus_integration():
eventbus_template = """{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"EventBus": {
"Type": "AWS::Events::EventBus",
"Properties": {
"Name": "MyCustomEventBus"
},
}
},
}"""
cf_conn = boto3.client("cloudformation", "us-west-2")
cf_conn.create_stack(StackName="test_stack", TemplateBody=eventbus_template)
event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="MyCustom"
)
event_buses["EventBuses"].should.have.length_of(1)
cf_conn.delete_stack(StackName="test_stack")
event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="MyCustom"
)
event_buses["EventBuses"].should.have.length_of(0)
@mock_cloudformation
@mock_events
def test_stack_events_delete_from_cfn_integration():
eventbus_template = Template(
"""{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"$resource_name": {
"Type": "AWS::Events::EventBus",
"Properties": {
"Name": "$name"
},
}
},
}"""
)
cf_conn = boto3.client("cloudformation", "us-west-2")
original_template = eventbus_template.substitute(
{"resource_name": "original", "name": "MyCustomEventBus"}
)
cf_conn.create_stack(StackName="test_stack", TemplateBody=original_template)
original_event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="MyCustom"
)
original_event_buses["EventBuses"].should.have.length_of(1)
original_eventbus = original_event_buses["EventBuses"][0]
updated_template = eventbus_template.substitute(
{"resource_name": "updated", "name": "AnotherEventBus"}
)
cf_conn.update_stack(StackName="test_stack", TemplateBody=updated_template)
update_event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="AnotherEventBus"
)
update_event_buses["EventBuses"].should.have.length_of(1)
update_event_buses["EventBuses"][0]["Arn"].shouldnt.equal(original_eventbus["Arn"])
@mock_cloudformation
@mock_events
def test_stack_events_update_from_cfn_integration():
eventbus_template = Template(
"""{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"EventBus": {
"Type": "AWS::Events::EventBus",
"Properties": {
"Name": "$name"
},
}
},
}"""
)
cf_conn = boto3.client("cloudformation", "us-west-2")
original_template = eventbus_template.substitute({"name": "MyCustomEventBus"})
cf_conn.create_stack(StackName="test_stack", TemplateBody=original_template)
original_event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="MyCustom"
)
original_event_buses["EventBuses"].should.have.length_of(1)
original_eventbus = original_event_buses["EventBuses"][0]
updated_template = eventbus_template.substitute({"name": "NewEventBus"})
cf_conn.update_stack(StackName="test_stack", TemplateBody=updated_template)
update_event_buses = boto3.client("events", "us-west-2").list_event_buses(
NamePrefix="NewEventBus"
)
update_event_buses["EventBuses"].should.have.length_of(1)
update_event_buses["EventBuses"][0]["Name"].should.equal("NewEventBus")
update_event_buses["EventBuses"][0]["Arn"].shouldnt.equal(original_eventbus["Arn"])
@mock_cloudformation
@mock_events
def test_stack_events_get_attribute_integration():
eventbus_template = """{
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"EventBus": {
"Type": "AWS::Events::EventBus",
"Properties": {
"Name": "MyEventBus"
},
}
},
"Outputs": {
"bus_arn": {"Value": {"Fn::GetAtt": ["EventBus", "Arn"]}},
"bus_name": {"Value": {"Fn::GetAtt": ["EventBus", "Name"]}},
}
}"""
cf = boto3.client("cloudformation", "us-west-2")
events = boto3.client("events", "us-west-2")
cf.create_stack(StackName="test_stack", TemplateBody=eventbus_template)
stack = cf.describe_stacks(StackName="test_stack")["Stacks"][0]
outputs = stack["Outputs"]
output_arn = list(filter(lambda item: item["OutputKey"] == "bus_arn", outputs))[0]
output_name = list(filter(lambda item: item["OutputKey"] == "bus_name", outputs))[0]
event_bus = events.list_event_buses(NamePrefix="MyEventBus")["EventBuses"][0]
output_arn["OutputValue"].should.equal(event_bus["Arn"])
output_name["OutputValue"].should.equal(event_bus["Name"])
@mock_cloudformation
@mock_dynamodb2
def test_dynamodb_table_creation():
CFN_TEMPLATE = {
"Outputs": {"MyTableName": {"Value": {"Ref": "MyTable"}},},
"Resources": {
"MyTable": {
"Type": "AWS::DynamoDB::Table",
"Properties": {
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
"AttributeDefinitions": [
{"AttributeName": "id", "AttributeType": "S"}
],
"BillingMode": "PAY_PER_REQUEST",
},
},
},
}
stack_name = "foobar"
cfn = boto3.client("cloudformation", "us-west-2")
cfn.create_stack(StackName=stack_name, TemplateBody=json.dumps(CFN_TEMPLATE))
# Wait until moto creates the stack
waiter = cfn.get_waiter("stack_create_complete")
waiter.wait(StackName=stack_name)
# Verify the TableName is part of the outputs
stack = cfn.describe_stacks(StackName=stack_name)["Stacks"][0]
outputs = stack["Outputs"]
outputs.should.have.length_of(1)
outputs[0]["OutputKey"].should.equal("MyTableName")
outputs[0]["OutputValue"].should.contain("foobar")
# Assert the table is created
ddb = boto3.client("dynamodb", "us-west-2")
table_names = ddb.list_tables()["TableNames"]
table_names.should.equal([outputs[0]["OutputValue"]])
| {
"content_hash": "2b56dded4b802a3e54601ebcdd4c68d9",
"timestamp": "",
"source": "github",
"line_count": 2902,
"max_line_length": 99,
"avg_line_length": 36.936250861474846,
"alnum_prop": 0.5476028323801883,
"repo_name": "william-richard/moto",
"id": "43248669fcedd83d82b83d886dee1894e358dba5",
"size": "107189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cloudformation/test_cloudformation_stack_integration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1213"
},
{
"name": "Python",
"bytes": "6637538"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
} |
"""Tests for email dashboard handler."""
from core.domain import config_services
from core.platform import models
from core.tests import test_utils
import feconf
(user_models,) = models.Registry.import_models([models.NAMES.user])
taskqueue_services = models.Registry.import_taskqueue_services()
class EmailDashboardDataHandlerTests(test_utils.GenericTestBase):
SUBMITTER_EMAIL = 'submit@example.com'
SUBMITTER_USERNAME = 'submit'
USER_A_EMAIL = 'a@example.com'
USER_A_USERNAME = 'a'
def setUp(self):
super(EmailDashboardDataHandlerTests, self).setUp()
self.signup(self.SUBMITTER_EMAIL, self.SUBMITTER_USERNAME)
self.submitter_id = self.get_user_id_from_email(
self.SUBMITTER_EMAIL)
self.signup(self.USER_A_EMAIL, self.USER_A_USERNAME)
self.user_a_id = self.get_user_id_from_email(
self.USER_A_EMAIL)
config_services.set_property(
self.submitter_id, 'whitelisted_email_senders',
[self.SUBMITTER_USERNAME])
def test_that_handler_works_correctly(self):
self.login(self.SUBMITTER_EMAIL)
csrf_token = self.get_csrf_token_from_response(
self.testapp.get('/emaildashboard'))
self.post_json(
'/emaildashboarddatahandler', {
'data': {
'has_not_logged_in_for_n_days': 2,
'inactive_in_last_n_days': 5,
'created_at_least_n_exps': 1,
'created_fewer_than_n_exps': None,
'edited_at_least_n_exps': None,
'edited_fewer_than_n_exps': 2
}}, csrf_token)
self.logout()
query_models = user_models.UserQueryModel.query().fetch()
# Check that model is stored.
self.assertEqual(len(query_models), 1)
query_model = query_models[0]
# Check that correct information is stored in model.
self.assertEqual(query_model.has_not_logged_in_for_n_days, 2)
self.assertEqual(query_model.inactive_in_last_n_days, 5)
self.assertEqual(query_model.created_at_least_n_exps, 1)
self.assertEqual(query_model.edited_fewer_than_n_exps, 2)
self.assertIsNone(query_model.edited_at_least_n_exps)
self.assertIsNone(query_model.created_fewer_than_n_exps)
self.assertEqual(query_model.submitter_id, self.submitter_id)
# Check that MR job has been enqueued.
self.assertEqual(
self.count_jobs_in_taskqueue(
queue_name=taskqueue_services.QUEUE_NAME_DEFAULT), 1)
with self.swap(feconf, 'CAN_SEND_EMAILS', True):
self.process_and_flush_pending_tasks()
def test_that_page_is_accessible_to_authorised_users_only(self):
# Make sure that only authorised users can access query pages.
self.login(self.USER_A_EMAIL)
with self.assertRaisesRegexp(Exception, '401 Unauthorized'):
self.testapp.get('/emaildashboard')
with self.assertRaisesRegexp(Exception, '401 Unauthorized'):
self.testapp.get('/querystatuscheck')
self.logout()
def test_that_exception_is_raised_for_invalid_input(self):
self.login(self.SUBMITTER_EMAIL)
csrf_token = self.get_csrf_token_from_response(
self.testapp.get('/emaildashboard'))
with self.assertRaisesRegexp(Exception, '400 Invalid input for query.'):
self.post_json(
'/emaildashboarddatahandler', {
'data': {
'has_not_logged_in_for_n_days': 2,
'inactive_in_last_n_days': 5,
'created_at_least_n_exps': 1,
'created_fewer_than_n_exps': None,
'edited_at_least_n_exps': None,
'fake_key': 2
}}, csrf_token)
with self.assertRaisesRegexp(Exception, '400 Invalid input for query.'):
self.post_json(
'/emaildashboarddatahandler', {
'data': {
'has_not_logged_in_for_n_days': 2,
'inactive_in_last_n_days': 5,
'created_at_least_n_exps': 'invalid_value',
'created_fewer_than_n_exps': 'None',
'edited_at_least_n_exps': None
}}, csrf_token)
self.logout()
| {
"content_hash": "00084403a2d58eb6bddeb2b95f3a27c3",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 80,
"avg_line_length": 42.161904761904765,
"alnum_prop": 0.5861757397786311,
"repo_name": "edallison/oppia",
"id": "449c572455284a0cb1eeae219bc760859c3f16e6",
"size": "5032",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "core/controllers/email_dashboard_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "89408"
},
{
"name": "HTML",
"bytes": "734706"
},
{
"name": "JavaScript",
"bytes": "2359899"
},
{
"name": "Python",
"bytes": "2675686"
},
{
"name": "Shell",
"bytes": "46217"
}
],
"symlink_target": ""
} |
"""Widget Pool for the ``django-metrics-dashboard`` app."""
from django.core.exceptions import ImproperlyConfigured
from django_load.core import load
from metrics_dashboard.exceptions import WidgetAlreadyRegistered
from metrics_dashboard.widget_base import DashboardWidgetBase
class DashboardWidgetPool(object):
"""
A pool of registered DashboardWidgets.
This class should only be instantiated at the end of this file, therefore
serving as a singleton. All other files should just import the instance
created in this file.
Inspired by
https://github.com/divio/django-cms/blob/develop/cms/plugin_pool.py
"""
def __init__(self):
self.widgets = {}
self.discovered = False
def discover_widgets(self):
"""
Searches for widgets in all INSTALLED_APPS.
This will be called when you call ``get_all_widgets`` for the first
time.
"""
if self.discovered:
return
load('dashboard_widgets')
self.discovered = True
def get_widgets(self):
"""Discovers all widgets and returns them."""
self.discover_widgets()
return self.widgets
def register_widget(self, widget_cls):
"""
Registers the given widget.
Widgets must inherit ``DashboardWidgetBase`` and you cannot register
the same widget twice.
:widget_cls: A class that inherits ``DashboardWidgetBase``.
"""
if not issubclass(widget_cls, DashboardWidgetBase):
raise ImproperlyConfigured(
'DashboardWidgets must be subclasses of DashboardWidgetBase,'
' {0} is not.'.format(widget_cls))
widget = widget_cls()
widget_name = widget.get_name()
if widget_name in self.widgets:
raise WidgetAlreadyRegistered(
'Cannot register {0}, a plugin with this name {1} is already '
'registered.'.format(widget_cls, widget_name))
self.widgets[widget_name] = widget
def unregister_widget(self, widget_cls):
"""Unregisters the given widget."""
if widget_cls.__name__ in self.widgets:
del self.widgets[widget_cls().get_name()]
dashboard_widget_pool = DashboardWidgetPool()
| {
"content_hash": "3565559ee507939b08e6d6a235ee92c2",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 78,
"avg_line_length": 30.743243243243242,
"alnum_prop": 0.6426373626373626,
"repo_name": "bitmazk/django-metrics-dashboard",
"id": "784c05b0fd36e7a19fed852cc4d1bc0b3f693703",
"size": "2275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metrics_dashboard/widget_pool.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38836"
},
{
"name": "Shell",
"bytes": "5137"
}
],
"symlink_target": ""
} |
import re
def is_hex_string(s):
pattern = "0x([0-9a-fA-F]+)$"
return bool(re.match(pattern, s))
def validate_proof_response_fields(res):
for o in res:
for s in o["proofArray"]:
assert is_hex_string(s)
assert is_hex_string(o["rootHash"])
assert is_hex_string(o["leaf"])
assert is_hex_string(o["payee"])
assert is_hex_string(o["tokenAddress"])
def check_duplicates_for_roots(models):
payment_cycles = set()
roots = set()
for root in models:
payment_cycles.add(root.paymentCycle)
roots.add(root.rootHash)
assert len(models) == len(roots)
assert len(models) == len(payment_cycles)
def check_duplicates_for_proofs(res):
leafs = set()
for proof in res:
leafs.add(proof["leaf"])
assert len(res) == len(leafs)
| {
"content_hash": "92dda4e194592deaa2ea400a28a85a18",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 47,
"avg_line_length": 25.151515151515152,
"alnum_prop": 0.6096385542168675,
"repo_name": "cardstack/cardstack",
"id": "58ba2c0d46414489eeb22d9c4286073a7b666e62",
"size": "830",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "packages/cardpay-reward-indexer/tests/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "327952"
},
{
"name": "Dockerfile",
"bytes": "5072"
},
{
"name": "HCL",
"bytes": "53170"
},
{
"name": "HTML",
"bytes": "12660"
},
{
"name": "Handlebars",
"bytes": "309845"
},
{
"name": "JavaScript",
"bytes": "678790"
},
{
"name": "PLpgSQL",
"bytes": "57036"
},
{
"name": "Procfile",
"bytes": "48"
},
{
"name": "Python",
"bytes": "207635"
},
{
"name": "Ruby",
"bytes": "1753"
},
{
"name": "Shell",
"bytes": "5425"
},
{
"name": "TypeScript",
"bytes": "3614447"
}
],
"symlink_target": ""
} |
import os
import sys
import math
import optparse
import dukutil
def read_unicode_data(unidata, catsinc, catsexc, filterfunc):
"Read UnicodeData.txt, including lines matching catsinc unless excluded by catsexc or filterfunc."
res = []
f = open(unidata, 'rb')
def filter_none(cp):
return True
if filterfunc is None:
filterfunc = filter_none
# The Unicode parsing is slow enough to warrant some speedups.
exclude_cat_exact = {}
for cat in catsexc:
exclude_cat_exact[cat] = True
include_cat_exact = {}
for cat in catsinc:
include_cat_exact[cat] = True
for line in f:
#line = line.strip()
parts = line.split(';')
codepoint = parts[0]
if not filterfunc(long(codepoint, 16)):
continue
category = parts[2]
if exclude_cat_exact.has_key(category):
continue # quick reject
rejected = False
for cat in catsexc:
if category.startswith(cat) or codepoint == cat:
rejected = True
break
if rejected:
continue
if include_cat_exact.has_key(category):
res.append(line)
continue
accepted = False
for cat in catsinc:
if category.startswith(cat) or codepoint == cat:
accepted = True
break
if accepted:
res.append(line)
f.close()
# Sort based on Unicode codepoint
def mycmp(a,b):
t1 = a.split(';')
t2 = b.split(';')
n1 = long(t1[0], 16)
n2 = long(t2[0], 16)
return cmp(n1, n2)
res.sort(cmp=mycmp)
return res
def scan_ranges(lines):
"Scan continuous ranges from (filtered) UnicodeData.txt lines."
ranges = []
range_start = None
prev = None
for line in lines:
t = line.split(';')
n = long(t[0], 16)
if range_start is None:
range_start = n
else:
if n == prev + 1:
# continue range
pass
else:
ranges.append((range_start, prev))
range_start = n
prev = n
if range_start is not None:
ranges.append((range_start, prev))
return ranges
def generate_png(lines, fname):
"Generate an illustrative PNG of the character set."
from PIL import Image
m = {}
for line in lines:
t = line.split(';')
n = long(t[0], 16)
m[n] = 1
codepoints = 0x10ffff + 1
width = int(256)
height = int(math.ceil(float(codepoints) / float(width)))
im = Image.new('RGB', (width, height))
black = (0,0,0)
white = (255,255,255)
for cp in xrange(codepoints):
y = cp / width
x = cp % width
if m.has_key(long(cp)):
im.putpixel((x,y), black)
else:
im.putpixel((x,y), white)
im.save(fname)
def generate_match_table1(ranges):
"Unused match table format."
# This is an earlier match table format which is no longer used.
# IdentifierStart-UnicodeLetter has 445 ranges and generates a
# match table of 2289 bytes.
data = []
prev_re = None
def genrange(rs, re):
if (rs > re):
raise Exception('assumption failed: rs=%d re=%d' % (rs, re))
while True:
now = re - rs + 1
if now > 255:
now = 255
data.append(now) # range now
data.append(0) # skip 0
rs = rs + now
else:
data.append(now) # range now
break
def genskip(ss, se):
if (ss > se):
raise Exception('assumption failed: ss=%d se=%s' % (ss, se))
while True:
now = se - ss + 1
if now > 255:
now = 255
data.append(now) # skip now
data.append(0) # range 0
ss = ss + now
else:
data.append(now) # skip now
break
for rs, re in ranges:
if prev_re is not None:
genskip(prev_re + 1, rs - 1)
genrange(rs, re)
prev_re = re
num_entries = len(data)
# header: start of first range
# num entries
hdr = []
hdr.append(ranges[0][0] >> 8) # XXX: check that not 0x10000 or over
hdr.append(ranges[0][1] & 0xff)
hdr.append(num_entries >> 8)
hdr.append(num_entries & 0xff)
return hdr + data
def generate_match_table2(ranges):
"Unused match table format."
# Another attempt at a match table which is also unused.
# Total tables for all current classes is now 1472 bytes.
data = []
def enc(x):
while True:
if x < 0x80:
data.append(x)
break
data.append(0x80 + (x & 0x7f))
x = x >> 7
prev_re = 0
for rs, re in ranges:
r1 = rs - prev_re # 1 or above (no unjoined ranges)
r2 = re - rs # 0 or above
enc(r1)
enc(r2)
prev_re = re
enc(0) # end marker
return data
def generate_match_table3(ranges):
"Current match table format."
# Yet another attempt, similar to generate_match_table2 except
# in packing format.
#
# Total match size now (at time of writing): 1194 bytes.
#
# This is the current encoding format used in duk_lexer.c.
be = dukutil.BitEncoder()
freq = [0] * (0x10ffff + 1) # informative
def enc(x):
freq[x] += 1
if x <= 0x0e:
# 4-bit encoding
be.bits(x, 4)
return
x -= 0x0e + 1
if x <= 0xfd:
# 12-bit encoding
be.bits(0x0f, 4)
be.bits(x, 8)
return
x -= 0xfd + 1
if x <= 0xfff:
# 24-bit encoding
be.bits(0x0f, 4)
be.bits(0xfe, 8)
be.bits(x, 12)
return
x -= 0xfff + 1
if True:
# 36-bit encoding
be.bits(0x0f, 4)
be.bits(0xff, 8)
be.bits(x, 24)
return
raise Exception('cannot encode')
prev_re = 0
for rs, re in ranges:
r1 = rs - prev_re # 1 or above (no unjoined ranges)
r2 = re - rs # 0 or above
enc(r1)
enc(r2)
prev_re = re
enc(0) # end marker
data, nbits = be.getBytes(), be.getNumBits()
return data, freq
def main():
parser = optparse.OptionParser()
parser.add_option('--unicode-data', dest='unicode_data') # UnicodeData.txt
parser.add_option('--special-casing', dest='special_casing') # SpecialCasing.txt
parser.add_option('--include-categories', dest='include_categories')
parser.add_option('--exclude-categories', dest='exclude_categories', default='NONE')
parser.add_option('--out-source', dest='out_source')
parser.add_option('--out-header', dest='out_header')
parser.add_option('--out-png', dest='out_png')
parser.add_option('--table-name', dest='table_name', default='match_table')
(opts, args) = parser.parse_args()
unidata = opts.unicode_data
catsinc = []
if opts.include_categories != '':
catsinc = opts.include_categories.split(',')
catsexc = []
if opts.exclude_categories != 'NONE':
catsexc = opts.exclude_categories.split(',')
print 'CATSEXC: %s' % repr(catsexc)
print 'CATSINC: %s' % repr(catsinc)
# pseudocategories
filter_ascii = ('ASCII' in catsexc)
filter_nonbmp = ('NONBMP' in catsexc)
# Read raw result
def filter1(x):
if filter_ascii and x <= 0x7f:
# exclude ascii
return False
if filter_nonbmp and x >= 0x10000:
# exclude non-bmp
return False
return True
print('read unicode data')
uni_filtered = read_unicode_data(unidata, catsinc, catsexc, filter1)
print('done reading unicode data')
# Raw output
#print('RAW OUTPUT:')
#print('===========')
#print('\n'.join(uni_filtered))
# Scan ranges
#print('')
#print('RANGES:')
#print('=======')
ranges = scan_ranges(uni_filtered)
#for i in ranges:
# if i[0] == i[1]:
# print('0x%04x' % i[0])
# else:
# print('0x%04x ... 0x%04x' % (i[0], i[1]))
#print('')
print('%d ranges total' % len(ranges))
# Generate match table
#print('')
#print('MATCH TABLE:')
#print('============')
#matchtable1 = generate_match_table1(ranges)
#matchtable2 = generate_match_table2(ranges)
matchtable3, freq = generate_match_table3(ranges)
#print 'match table: %s' % repr(matchtable3)
print 'match table length: %d bytes' % len(matchtable3)
print 'encoding freq:'
for i in xrange(len(freq)):
if freq[i] == 0:
continue
print ' %6d: %d' % (i, freq[i])
print('')
print('MATCH C TABLE -> file %s' % repr(opts.out_header))
# Create C source and header files
genc = dukutil.GenerateC()
genc.emitHeader('extract_chars.py')
genc.emitArray(matchtable3, opts.table_name, size=len(matchtable3), typename='duk_uint8_t', intvalues=True, const=True)
if opts.out_source is not None:
f = open(opts.out_source, 'wb')
f.write(genc.getString())
f.close()
genc = dukutil.GenerateC()
genc.emitHeader('extract_chars.py')
genc.emitLine('extern const duk_uint8_t %s[%d];' % (opts.table_name, len(matchtable3)))
if opts.out_header is not None:
f = open(opts.out_header, 'wb')
f.write(genc.getString())
f.close()
# Image (for illustrative purposes only)
if opts.out_png is not None:
generate_png(uni_filtered, opts.out_png)
if __name__ == '__main__':
main()
| {
"content_hash": "b1ada1fc05cbd34f11434292cf61b022",
"timestamp": "",
"source": "github",
"line_count": 368,
"max_line_length": 123,
"avg_line_length": 26.842391304347824,
"alnum_prop": 0.5355335088074509,
"repo_name": "markand/duktape",
"id": "861fc930aa5601262acf488d5d2fff111070bb03",
"size": "10712",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tools/extract_chars.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2859797"
},
{
"name": "C++",
"bytes": "18532"
},
{
"name": "CSS",
"bytes": "32733"
},
{
"name": "CoffeeScript",
"bytes": "1029"
},
{
"name": "HTML",
"bytes": "4438824"
},
{
"name": "Java",
"bytes": "3043"
},
{
"name": "JavaScript",
"bytes": "8666999"
},
{
"name": "Lua",
"bytes": "19160"
},
{
"name": "Makefile",
"bytes": "47049"
},
{
"name": "Perl",
"bytes": "177"
},
{
"name": "Perl6",
"bytes": "22748"
},
{
"name": "Python",
"bytes": "291537"
},
{
"name": "Ruby",
"bytes": "18384"
},
{
"name": "Shell",
"bytes": "24267"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals
import os
import io
import unittest
import subprocess
import tempfile
import shutil
import re
from vcstools.svn import SvnClient
class SvnClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
self.directories = dict(setUp=self.root_directory)
self.remote_path = os.path.join(self.root_directory, "remote")
self.init_path = os.path.join(self.root_directory, "init")
# create a "remote" repo
subprocess.check_call("svnadmin create %s" % self.remote_path, shell=True, cwd=self.root_directory)
self.local_root_url = "file://localhost" + self.remote_path
self.local_url = self.local_root_url + "/trunk"
# create an "init" repo to populate remote repo
subprocess.check_call("svn checkout %s %s" % (self.local_root_url, self.init_path), shell=True, cwd=self.root_directory)
for cmd in [
"mkdir trunk",
"mkdir branches",
"mkdir tags",
"svn add trunk branches tags",
"touch trunk/fixed.txt",
"svn add trunk/fixed.txt",
"svn commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_version_init = "-r1"
# files to be modified in "local" repo
for cmd in [
"touch trunk/modified.txt",
"touch trunk/modified-fs.txt",
"svn add trunk/modified.txt trunk/modified-fs.txt",
"svn commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_version_second = "-r2"
for cmd in [
"touch trunk/deleted.txt",
"touch trunk/deleted-fs.txt",
"svn add trunk/deleted.txt trunk/deleted-fs.txt",
"svn commit -m modified"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_path = os.path.join(self.root_directory, "local")
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
class SvnClientTest(SvnClientTestSetups):
def test_get_url_by_reading(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.local_url, client.get_url())
#self.assertEqual(client.get_version(), self.local_version)
self.assertEqual(client.get_version("PREV"), "-r2")
self.assertEqual(client.get_version("2"), "-r2")
self.assertEqual(client.get_version("-r2"), "-r2")
# test invalid cient and repo without url
client = SvnClient(os.path.join(self.remote_path, 'foo'))
self.assertEqual(None, client.get_url())
def test_get_type_name(self):
local_path = "/tmp/dummy"
client = SvnClient(local_path)
self.assertEqual(client.get_vcs_type_name(), 'svn')
def test_get_url_nonexistant(self):
local_path = "/tmp/dummy"
client = SvnClient(local_path)
self.assertEqual(client.get_url(), None)
def test_checkout(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
def test_checkout_dir_exists(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
os.makedirs(self.local_path)
self.assertTrue(client.checkout(url))
# non-empty
self.assertFalse(client.checkout(url))
def test_checkout_emptyversion(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version=''))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertTrue(client.update(None))
self.assertTrue(client.update(""))
def test_checkout_specific_version_and_update_short(self):
"using just a number as version"
url = self.local_url
version = "3"
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_version(), "-r3")
new_version = '2'
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), "-r2")
def testDiffClean(self):
client = SvnClient(self.remote_path)
self.assertEquals('', client.get_diff())
def testStatusClean(self):
client = SvnClient(self.remote_path)
self.assertEquals('', client.get_status())
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = SvnClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class SvnClientLogTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
def test_get_log_defaults(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log()
self.assertEquals(3, len(log))
self.assertEquals('modified', log[0]['message'])
for key in ['id', 'author', 'date', 'message']:
self.assertTrue(log[0][key] is not None, key)
# svn logs don't have email, but key should be in dict
self.assertTrue(log[0]['email'] is None)
def test_get_log_limit(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(limit=1)
self.assertEquals(1, len(log))
self.assertEquals('modified', log[0]['message'])
def test_get_log_path(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(relpath='fixed.txt')
self.assertEquals('initial', log[0]['message'])
class SvnDiffStatClientTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
# after setting up "local" repo, change files and make some changes
subprocess.check_call("rm deleted-fs.txt", shell=True, cwd=self.local_path)
subprocess.check_call("svn rm deleted.txt", shell=True, cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "modified.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "modified-fs.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added-fs.txt"), 'w')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added.txt"), 'w')
f.write('0123456789abcdef')
f.close()
subprocess.check_call("svn add added.txt", shell=True, cwd=self.local_path)
def tearDown(self):
pass
def assertStatusListEqual(self, listexpect, listactual):
"""helper fun to check scm status output while discarding file ordering differences"""
lines_expect = listexpect.splitlines()
lines_actual = listactual.splitlines()
for line in lines_expect:
self.assertTrue(line in lines_actual, 'Missing entry %s in output %s' % (line, listactual))
for line in lines_actual:
self.assertTrue(line in lines_expect, 'Superflous entry %s in output %s' % (line, listactual))
def assertEqualDiffs(self, expected, actual):
"True if actual is similar enough to expected, minus svn properties"
def filter_block(block):
"""removes property information that varies between systems, not relevant fo runit test"""
newblock = []
for line in block.splitlines():
if re.search("[=+-\\@ ].*", line) == None:
break
else:
# new svn versions use different labels for added
# files (working copy) vs (revision x)
fixedline = re.sub('\(revision [0-9]+\)', '(working copy)', line)
newblock.append(fixedline)
return "\n".join(newblock)
filtered_actual_blocks = []
# A block starts with \nIndex, and the actual diff goes up to the first line starting with [a-zA-Z], e.g. "Properties changed:"
for block in actual.split("\nIndex: "):
if filtered_actual_blocks != []:
# restore "Index: " removed by split()
block = "Index: " + block
block = filter_block(block)
filtered_actual_blocks.append(block)
expected_blocks = []
for block in expected.split("\nIndex: "):
if expected_blocks != []:
block = "Index: " + block
block = filter_block(block)
expected_blocks.append(block)
filtered = "\n".join(filtered_actual_blocks)
self.assertEquals(set(expected_blocks), set(filtered_actual_blocks))
def test_diff(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqualDiffs('Index: added.txt\n===================================================================\n--- added.txt\t(revision 0)\n+++ added.txt\t(revision 0)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: modified-fs.txt\n===================================================================\n--- modified-fs.txt\t(revision 3)\n+++ modified-fs.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: modified.txt\n===================================================================\n--- modified.txt\t(revision 3)\n+++ modified.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file',
client.get_diff().rstrip())
def test_diff_relpath(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqualDiffs('Index: local/added.txt\n===================================================================\n--- local/added.txt\t(revision 0)\n+++ local/added.txt\t(revision 0)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: local/modified-fs.txt\n===================================================================\n--- local/modified-fs.txt\t(revision 3)\n+++ local/modified-fs.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: local/modified.txt\n===================================================================\n--- local/modified.txt\t(revision 3)\n+++ local/modified.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file', client.get_diff(basepath=os.path.dirname(self.local_path)).rstrip())
def test_status(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('A added.txt\nD deleted.txt\nM modified-fs.txt\n! deleted-fs.txt\nM modified.txt\n', client.get_status())
def test_status_relpath(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('A local/added.txt\nD local/deleted.txt\nM local/modified-fs.txt\n! local/deleted-fs.txt\nM local/modified.txt\n', client.get_status(basepath=os.path.dirname(self.local_path)))
def test_status_untracked(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('? added-fs.txt\nA added.txt\nD deleted.txt\nM modified-fs.txt\n! deleted-fs.txt\nM modified.txt\n', client.get_status(untracked=True))
class SvnExportRepositoryClientTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
self.basepath_export = os.path.join(self.root_directory, 'export')
def tearDown(self):
pass
def test_export_repository(self):
client = SvnClient(self.local_path)
self.assertTrue(
client.export_repository('',
self.basepath_export)
)
self.assertTrue(os.path.exists(self.basepath_export + '.tar.gz'))
self.assertFalse(os.path.exists(self.basepath_export + '.tar'))
self.assertFalse(os.path.exists(self.basepath_export))
| {
"content_hash": "69afe0c1c4c7ea2131044728cc63fafe",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 812,
"avg_line_length": 44.177570093457945,
"alnum_prop": 0.6110288414075171,
"repo_name": "k-okada/vcstools",
"id": "342a441892b0e9c8c9224c24e7a358417f772105",
"size": "15808",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_svn.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import logging
ROUTER_STATIC_FILE = 'file'
ROUTER_STATIC_FILES = 'files'
ROUTER_STATIC_DIR = 'dir'
ROUTER_HEADER = 'headers'
ROUTER_PATH = "path"
ROUTER = 'routes'
logger = logging.getLogger('pfrock.static')
| {
"content_hash": "388e095fba135c5efe8dd3588d0875e5",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 43,
"avg_line_length": 19.181818181818183,
"alnum_prop": 0.7203791469194313,
"repo_name": "knightliao/pfrock",
"id": "f2257d76c2271fafc6a17681f2d6e866ac596461",
"size": "249",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pfrock-plugins/pfrock-static-plugin/pfrock_static_plugin/handlers/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17141"
}
],
"symlink_target": ""
} |
from netforce.model import Model, fields, get_model
class ServiceType(Model):
_name = "service.type"
_string = "Service Type"
_key = ["code"]
_fields = {
"name": fields.Char("Name", required=True, search=True),
"code": fields.Char("Code", search=True),
"description": fields.Text("Description", search=True),
"parent_id": fields.Many2One("service.type", "Parent"),
}
_order = "name"
ServiceType.register()
| {
"content_hash": "341266d755e63a438db5ccff4d2643f8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 64,
"avg_line_length": 29.0625,
"alnum_prop": 0.6129032258064516,
"repo_name": "bank-netforce/netforce",
"id": "c104f5831a8f4185d9272efa4f7aaa1c32014e19",
"size": "1570",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable-3.1",
"path": "netforce_service/netforce_service/models/service_type.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "73"
},
{
"name": "CSS",
"bytes": "407336"
},
{
"name": "HTML",
"bytes": "478918"
},
{
"name": "Java",
"bytes": "11870"
},
{
"name": "JavaScript",
"bytes": "3712147"
},
{
"name": "Makefile",
"bytes": "353"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "3469514"
},
{
"name": "Roff",
"bytes": "15858"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
} |
from theano import function, config, shared, sandbox
import theano.tensor as T
import numpy
import time
vlen = 10 * 30 * 768 # 10 x #cores x # threads per core
iters = 1000
rng = numpy.random.RandomState(22)
x = shared(numpy.asarray(rng.rand(vlen), config.floatX))
f = function([], T.exp(x))
print(f.maker.fgraph.toposort())
t0 = time.time()
for i in range(iters):
r = f()
t1 = time.time()
print("Looping %d times took %f seconds" % (iters, t1 - t0))
print("Result is %s" % (r,))
if numpy.any([isinstance(x.op, T.Elemwise) for x in f.maker.fgraph.toposort()]):
print('Used the cpu')
else:
print('Used the gpu') | {
"content_hash": "2b3ade62ffa4feefb44fbeaa8bc3ae4d",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 80,
"avg_line_length": 28.40909090909091,
"alnum_prop": 0.672,
"repo_name": "gabrielilharco/sketch-bot",
"id": "985e28c4e2958039c9e84c023445cf2fd64c24b5",
"size": "625",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "src/test_gpu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10447"
}
],
"symlink_target": ""
} |
"Base queue class"
# Things to think about:
# - timeout/visibility timeout (boto)
class BaseQueue(object):
"""
Abstract base class for queue backends.
"""
def read(self):
raise NotImplementedError
def write(self, message):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def create_queue():
raise NotImplementedError
def delete_queue(name):
raise NotImplementedError
def get_list():
raise NotImplementedError | {
"content_hash": "b6f90e4cf21703db9eb2a6feb5693f39",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 43,
"avg_line_length": 18.62962962962963,
"alnum_prop": 0.6739562624254473,
"repo_name": "danner/queues",
"id": "f5777647e0df5c6ce1dd319cc21d8bf38f238401",
"size": "503",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "queues/backends/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15438"
}
],
"symlink_target": ""
} |
import os
from flask import Flask,render_template,url_for,request,session,redirect
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_script import Manager,Shell
from flask_sqlalchemy import SQLAlchemy
from flask_mail import Mail
from flask_moment import Moment
from flask_socketio import SocketIO
from flask_gravatar import Gravatar
app = Flask(__name__)
basedir = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_DATABASE_URI'] =\
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
#app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://sql6140009:Y1912zwYwC@sql6.freemysqlhosting.net/sql6140009'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'hard to guess string'
app.config['MAIL_SERVER'] = 'smtp.googlemail.com'
app.config['MAIL_PORT'] = 587
app.config['MAIL_USE_TLS'] = True
app.config['MAIL_USERNAME'] = os.environ.get('MAIL_USERNAME')
app.config['MAIL_PASSWORD'] = os.environ.get('MAIL_PASSWORD')
manager = Manager(app)
bootstrap = Bootstrap()
db = SQLAlchemy(app)
mail = Mail(app)
moment = Moment(app)
socketio = SocketIO(app)
gravatar = Gravatar(app)
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
# app.config['SECRET_KEY'] = 'hard to guess string'
# app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
#app = create_app('DEVELOPMENT')
bootstrap.init_app(app)
#db.init_app(app)
login_manager.init_app(app)
from app import models
@app.route('/')
def index():
return render_template('index.html')
from app.auth.views import admin
app.register_blueprint(auth.views.admin,url_prefix = '/authentication')
from app.main.views import welcome
app.register_blueprint(main.views.welcome,url_prefix = '/welcome')
from app.twitterAPI.views import api
app.register_blueprint(twitterAPI.views.api,url_prefix = '/api')
| {
"content_hash": "a9735eefc6ff4807441825a5f2811599",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 117,
"avg_line_length": 32.62068965517241,
"alnum_prop": 0.7563424947145877,
"repo_name": "sumedh123/debatify",
"id": "89d78b1a48e585a5353b33fa5344659ba9f8770a",
"size": "1892",
"binary": false,
"copies": "1",
"ref": "refs/heads/UI",
"path": "app/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "347155"
},
{
"name": "HTML",
"bytes": "102503"
},
{
"name": "JavaScript",
"bytes": "608373"
},
{
"name": "Python",
"bytes": "8393673"
},
{
"name": "Shell",
"bytes": "3298"
}
],
"symlink_target": ""
} |
import os
import imath
import IECore
import IECoreScene
import IECoreVDB
import GafferTest
import GafferScene
import GafferVDB
import GafferVDBTest
class LevelSetToMeshTest( GafferVDBTest.VDBTestCase ) :
def setUp( self ) :
GafferVDBTest.VDBTestCase.setUp( self )
self.sourcePath = os.path.join( self.dataDir, "sphere.vdb" )
self.sceneInterface = IECoreScene.SceneInterface.create( self.sourcePath, IECore.IndexedIO.OpenMode.Read )
def testCanConvertLevelSetToMesh( self ) :
sphere = GafferScene.Sphere()
meshToLevelSet = GafferVDB.MeshToLevelSet()
self.setFilter( meshToLevelSet, path='/sphere' )
meshToLevelSet["voxelSize"].setValue( 0.05 )
meshToLevelSet["in"].setInput( sphere["out"] )
obj = meshToLevelSet["out"].object( "sphere" )
self.assertTrue( isinstance( obj, IECoreVDB.VDBObject ) )
self.assertEqual( obj.gridNames(), ['surface'] )
grid = obj.findGrid( "surface" )
levelSetToMesh = GafferVDB.LevelSetToMesh()
self.setFilter( levelSetToMesh, path='/sphere' )
levelSetToMesh["in"].setInput( meshToLevelSet["out"] )
mesh = levelSetToMesh["out"].object( "sphere" )
self.assertTrue( isinstance( mesh, IECoreScene.MeshPrimitive) )
def testChangingIsoValueUpdatesBounds ( self ) :
sphere = GafferScene.Sphere()
sphere["radius"].setValue( 5 )
meshToLevelSet = GafferVDB.MeshToLevelSet()
self.setFilter( meshToLevelSet, path='/sphere' )
meshToLevelSet["voxelSize"].setValue( 0.05 )
meshToLevelSet["interiorBandwidth"].setValue( 100 )
meshToLevelSet["in"].setInput( sphere["out"] )
levelSetToMesh = GafferVDB.LevelSetToMesh()
self.setFilter( levelSetToMesh, path='/sphere' )
levelSetToMesh["in"].setInput( meshToLevelSet["out"] )
self.assertSceneValid( levelSetToMesh["out"] )
self.assertEqual( levelSetToMesh["adjustBounds"].getValue(), False )
self.assertEqual( levelSetToMesh["out"].bound( "/sphere" ), levelSetToMesh["in"].bound( "/sphere" ) )
levelSetToMesh["adjustBounds"].setValue( True )
self.assertSceneValid( levelSetToMesh["out"] )
self.assertEqual(
levelSetToMesh["out"].bound( "/sphere" ),
levelSetToMesh["out"].object( "/sphere" ).bound()
)
bound = levelSetToMesh["out"].bound( "/sphere" )
levelSetToMesh["isoValue"].setValue( -0.5 ) # Shrinks the output mesh
self.assertSceneValid( levelSetToMesh["out"] )
self.assertEqual(
levelSetToMesh["out"].bound( "/sphere" ),
levelSetToMesh["out"].object( "/sphere" ).bound()
)
self.assertTrue( bound.intersects( levelSetToMesh["out"].bound( "/sphere" ).min() ) )
self.assertTrue( bound.intersects( levelSetToMesh["out"].bound( "/sphere" ).max() ) )
def testIncreasingAdapativityDecreasesPolyCount( self ) :
sphere = GafferScene.Sphere()
sphere["radius"].setValue( 5 )
meshToLevelSet = GafferVDB.MeshToLevelSet()
self.setFilter( meshToLevelSet, path='/sphere' )
meshToLevelSet["voxelSize"].setValue( 0.05 )
meshToLevelSet["exteriorBandwidth"].setValue( 4.0 )
meshToLevelSet["interiorBandwidth"].setValue( 4.0 )
meshToLevelSet["in"].setInput( sphere["out"] )
levelSetToMesh = GafferVDB.LevelSetToMesh()
self.setFilter( levelSetToMesh, path='/sphere')
levelSetToMesh["in"].setInput( meshToLevelSet["out"] )
levelSetToMesh['adaptivity'].setValue(0.0)
self.assertTrue( 187000 <= len( levelSetToMesh['out'].object( "sphere" ).verticesPerFace ) <= 188000 )
levelSetToMesh['adaptivity'].setValue(1.0)
self.assertTrue( 2800 <= len( levelSetToMesh['out'].object( "sphere" ).verticesPerFace ) <= 3200 )
| {
"content_hash": "e88713dbb54cf2ed2ee8a25af59194db",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 108,
"avg_line_length": 36.103092783505154,
"alnum_prop": 0.7264420331239292,
"repo_name": "GafferHQ/gaffer",
"id": "a8d9bf23f2359d8d638368b1aa99dd38276a4847",
"size": "5317",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "python/GafferVDBTest/LevelSetToMeshTest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5790"
},
{
"name": "C",
"bytes": "61993"
},
{
"name": "C++",
"bytes": "9572701"
},
{
"name": "CMake",
"bytes": "85201"
},
{
"name": "GLSL",
"bytes": "6208"
},
{
"name": "Python",
"bytes": "10280178"
},
{
"name": "Ruby",
"bytes": "419"
},
{
"name": "Shell",
"bytes": "14580"
}
],
"symlink_target": ""
} |
from optparse import OptionParser
import myjson
from verbose import set_v
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-g", "--get", dest="get", default=None, help="get data from myjson.com")
parser.add_option("-u", "--update", dest="update", default=None, help="put data from myjson.com")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", \
default=False, help="set verbose output")
parser.add_option("-f", "--file", dest="file", default=None, help="file to upload binary data via base64, "
"also used for output directory in get")
parser.add_option("-i", "--input", dest="input", default=False, \
help="input gathered from arg instead of std input")
(options, args) = parser.parse_args()
set_v(options.verbose)
if options.get:
call = myjson.Get()
elif options.update:
call = myjson.Put()
else:
call = myjson.Post()
call.route(options)
| {
"content_hash": "84a1ee5aa2a9c258513bdef7cbf76c0f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 111,
"avg_line_length": 44.75,
"alnum_prop": 0.5782122905027933,
"repo_name": "lowrey/myjsonstore",
"id": "0c978d2e2502da9393bb9fbfae5918a298ed1ed0",
"size": "1107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myjsonstore.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6122"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import logging, os, sets
from rapid_app import settings_app
from rapid_app.models import ManualDbHandler, PrintTitleDev
log = logging.getLogger(__name__)
class UpdateTitlesHelper( object ):
""" Manages views.update_production_easyA_titles() work. """
def __init__(self):
self.db_handler = ManualDbHandler()
def run_update( self, request ):
""" Calls the backup and update code.
Called by views.update_production_easyA_titles() """
log.debug( 'calling update_older_backup()' )
self.update_older_backup()
log.debug( 'calling update_backup()' )
self.update_backup()
log.debug( 'calling update_production_table()' )
self.update_production_table()
return
def update_older_backup( self ):
""" Copies data from backup table to older backup table.
Called by run_update() """
result = self.db_handler.run_sql( sql=unicode(os.environ['RAPID__BACKUP_COUNT_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
if result[0][0] > 10000: # result is like `[(27010,)]`; don't backup if the count is way off
self.db_handler.run_sql(
sql=unicode(os.environ['RAPID__BACKUP_OLDER_DELETE_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
if 'sqlite' in settings_app.DB_CONNECTION_URL:
self.db_handler.run_sql( sql='VACUUM;', connection_url=settings_app.DB_CONNECTION_URL )
self.db_handler.run_sql(
sql=unicode(os.environ['RAPID__BACKUP_OLDER_INSERT_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
else:
log.info( 'not backing up because count is only, ```{}```'.format(result) )
return
def update_backup( self ):
""" Copies data from production table to backup table.
Called by run_update() """
result = self.db_handler.run_sql( sql=unicode(os.environ['RAPID__PRODUCTION_COUNT_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
if result[0][0] > 10000: # result is like `[(27010,)]`; don't backup if the count is way off
self.db_handler.run_sql(
sql=unicode(os.environ['RAPID__BACKUP_DELETE_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
if 'sqlite' in settings_app.DB_CONNECTION_URL:
self.db_handler.run_sql( sql='VACUUM;', connection_url=settings_app.DB_CONNECTION_URL )
self.db_handler.run_sql(
sql=unicode(os.environ['RAPID__BACKUP_INSERT_SQL']), connection_url=settings_app.DB_CONNECTION_URL )
else:
log.info( 'not backing up because count is only, ```{}```'.format(result) )
return
def update_production_table( self ):
""" Runs update-production sql.
Called by run_update() """
( rapid_keys, easya_keys, key_int ) = self._setup_vars() # setup
rapid_keys = self._populate_rapid_keys( rapid_keys ) # get rapid keys
easya_keys = self._populate_easya_keys( easya_keys, key_int ) # get easyA keys
( rapid_not_in_easya, easya_not_in_rapid ) = self._intersect_keys( rapid_keys, easya_keys) # intersect sets
self._add_rapid_entries( rapid_not_in_easya ) # insert new rapid records
self._remove_easya_entries( easya_not_in_rapid ) # run easyA deletions
return
def _setup_vars( self ):
""" Preps vars.
Called by update_production_table() """
rapid_keys = []
easya_keys = []
tuple_keys = { 'key': 0, 'issn': 1, 'start': 2, 'end': 3, 'location': 4, 'call_number': 5 }
key_int = tuple_keys['key'] # only using zero now, might use other tuple-elements later
return ( rapid_keys, easya_keys, key_int )
def _populate_rapid_keys( self, rapid_keys ):
""" Preps list of rapid keys.
Called by update_production_table() """
for title in PrintTitleDev.objects.all():
rapid_keys.append( title.key )
log.debug( 'len rapid_keys, {}'.format(len(rapid_keys)) )
return rapid_keys
def _populate_easya_keys( self, easya_keys, key_int ):
""" Preps list of easya keys.
Called by update_production_table() """
sql = 'SELECT * FROM `{}`'.format( unicode(os.environ['RAPID__TITLES_TABLE_NAME']) )
result = self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
for row_tuple in result:
easya_keys.append( row_tuple[key_int] )
log.debug( 'len easya_keys, {}'.format(len(easya_keys)) )
return easya_keys
def _intersect_keys( self, rapid_keys, easya_keys):
""" Runs set work.
Called by update_production_table() """
rapid_not_in_easya = list( sets.Set(rapid_keys) - sets.Set(easya_keys) )
easya_not_in_rapid = list( sets.Set(easya_keys) - sets.Set(rapid_keys) )
log.debug( 'rapid_not_in_easya, {}'.format(rapid_not_in_easya) )
log.debug( 'easya_not_in_rapid, {}'.format(easya_not_in_rapid) )
return ( rapid_not_in_easya, easya_not_in_rapid )
def _add_rapid_entries( self, rapid_not_in_easya ):
""" Runs inserts of new records.
Called by update_production_table() """
for rapid_key in rapid_not_in_easya:
rapid_title = PrintTitleDev.objects.get( key=rapid_key )
sql = '''
INSERT INTO `{destination_table}` ( `key`, `issn`, `start`, `end`, `location`, `call_number` )
VALUES ( '{key}', '{issn}', '{start}', '{end}', '{building}', '{call_number}' );
'''.format( destination_table=unicode(os.environ['RAPID__TITLES_TABLE_NAME']), key=rapid_title.key, issn=rapid_title.issn, start=rapid_title.start, end=rapid_title.end, building=rapid_title.building, call_number=rapid_title.call_number )
self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
log.debug( 'rapid additions to easyA complete' )
return
def _remove_easya_entries( self, easya_not_in_rapid ):
""" Runs deletion of old records.
Called by update_production_table() """
for easya_key in easya_not_in_rapid:
sql = '''
DELETE FROM `{destination_table}`
WHERE `key` = '{easya_key}'
LIMIT 1;
'''.format( destination_table=unicode(os.environ['RAPID__TITLES_TABLE_NAME']), easya_key=easya_key )
self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
log.debug( 'easyA deletions complete' )
return
# def update_production_table( self ):
# """ Runs update-production sql.
# TODO: a more elegant way to do this would be to query both tables, do a set intersection, and then do the appropriate small loop of additions and deletes.
# Called by run_update() """
# ## load all new data to memory
# titles = PrintTitleDev.objects.all()
# ## iterate through source-set adding new records if needed
# for entry in titles:
# sql = '''
# SELECT * FROM `{destination_table}`
# WHERE `key` = '{key}'
# AND `issn` = '{issn}'
# AND `start` = {start}
# AND `end` = {end}
# AND `location` = '{location}'
# AND `call_number` = '{call_number}';
# '''.format( destination_table=unicode(os.environ['RAPID__TITLES_TABLE_NAME']), key=entry.key, issn=entry.issn, start=entry.start, end=entry.end, location=entry.location, call_number=entry.call_number )
# result = self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
# if result == None:
# sql = '''
# INSERT INTO `{destination_table}` ( `key`, `issn`, `start`, `end`, `location`, `call_number` )
# VALUES ( '{key}', '{issn}', '{start}', '{end}', '{location}', '{call_number}' );
# '''.format( destination_table=unicode(os.environ['RAPID__TITLES_TABLE_NAME']), key=entry.key, issn=entry.issn, start=entry.start, end=entry.end, location=entry.location, call_number=entry.call_number )
# self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
# ## iterate through destination-set deleting records if they're not in the source
# sql = '''SELECT * FROM `{}`;'''.format( unicode(os.environ['RAPID__TITLES_TABLE_NAME']) )
# result = self.db_handler.run_sql( sql=sql, connection_url=settings_app.DB_CONNECTION_URL )
# tuple_keys = {
# 'key': 0, 'issn': 1, 'start': 2, 'end': 3, 'location': 4, 'call_number': 5 }
# for tuple_entry in result:
# match = PrintTitleDev.objects.filter(
# key=tuple_keys['key'], issn=tuple_keys['issn'], start=int(tuple_keys['start']), end=int(tuple_keys['end']), building=tuple_keys['location'], call_number=tuple_keys['call_number'] )
# if match == []:
# sql = '''
# DELETE * FROM `{destination_table}`
# WHERE `key` = '{key}'
# AND `issn` = '{issn}'
# AND `start` = {start}
# AND `end` = {end}
# AND `location` = '{location}'
# AND `call_number` = '{call_number}'
# LIMIT 1;
# '''.format( destination_table=unicode(os.environ['RAPID__TITLES_TABLE_NAME']), key=entry.key, issn=entry.issn, start=entry.start, end=entry.end, location=entry.location, call_number=entry.call_number )
# return
# end class UpdateTitlesHelper
| {
"content_hash": "d644c1674d292f0cc30a4ee3da61a639",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 253,
"avg_line_length": 56.41142857142857,
"alnum_prop": 0.5842787682333873,
"repo_name": "birkin/rapid_exports",
"id": "9b2e8749d8a19e01eb7c453d5d7bbcbf8392e699",
"size": "9897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rapid_app/lib/viewhelper_updatedb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4483"
},
{
"name": "Python",
"bytes": "94201"
}
],
"symlink_target": ""
} |
from urllib.request import urlopen
from bs4 import BeautifulSoup
import datetime
import random
import re
import json
random.seed(datetime.datetime.now())
def getLinks(articleUrl):
html = urlopen("http://en.wikipedia.org"+articleUrl)
bsObj = BeautifulSoup(html,'html5lib')
return bsObj.find("div", {"id":"bodyContent"}).findAll("a",href=re.compile("^(/wiki/)((?!:).)*$"))
def getHistoryIPs(pageUrl):
# 编辑历史页面URL链接格式是:
# http://en.wikipedia.org/w/index.php?title=Title_in_URL&action=history pageUrl = pageUrl.replace("/wiki/", "")
historyUrl = "http://en.wikipedia.org/w/index.php?title="+pageUrl[6:]+"&action=history"
print("history url is: "+historyUrl)
html = urlopen(historyUrl)
bsObj = BeautifulSoup(html,'html5lib')
# 找出class属性是"mw-userlink mw-anonuserlink"的链接
# 它们用IP地址代替用户名
ipAddresses = bsObj.findAll("a", {"class":"mw-userlink mw-anonuserlink"})
addressList = set()
for ipAddress in ipAddresses:
addressList.add(ipAddress.get_text())
return addressList
def getCountry(ipAddress):
try:
response = urlopen("http://freegeoip.net/json/"+ipAddress).read().decode('utf-8')
except HTTPError:
return None
responseJson = json.loads(response)
return responseJson.get('country_code')
links = getLinks("/wiki/Python_(programming_language)")
while(len(links) > 0):
for link in links:
print("-------------------")
historyIPs = getHistoryIPs(link.attrs["href"])
for historyIP in historyIPs:
country = getCountry(historyIP)
if country is not None:
print(historyIP+' is from '+country)
newLink = links[random.randint(0, len(links)-1)].attrs["href"]
links = getLinks(newLink)
| {
"content_hash": "874590d28f491d69f7b4f2c1de308e19",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 111,
"avg_line_length": 31.21153846153846,
"alnum_prop": 0.7147258163894024,
"repo_name": "ChenBooming/python_pycharm",
"id": "37232253dbf75fbbb26ca5800a5cb8c7f866dea5",
"size": "1681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/getwikiIP.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21957"
},
{
"name": "Roff",
"bytes": "65536"
}
],
"symlink_target": ""
} |
import sys
try:
import boto3
except ImportError:
name = 'django_distill.backends.amazon_s3'
pipm = 'boto3'
sys.stdout.write('{} backend requires {}:\n'.format(name, pipm))
sys.stdout.write('$ pip install {}\n\n'.format(pipm))
raise
from django_distill.errors import DistillPublishError
from django_distill.backends import BackendBase
class AmazonS3Backend(BackendBase):
'''
Publisher for Amazon S3. Implements the BackendBase.
'''
REQUIRED_OPTIONS = ('ENGINE', 'PUBLIC_URL', 'ACCESS_KEY_ID',
'SECRET_ACCESS_KEY', 'BUCKET')
def _get_object(self, name):
bucket = self.account_container()
return self.d['connection'].get_object(Bucket=bucket, Key=name)
def account_username(self):
return self.options.get('ACCESS_KEY_ID', '')
def account_container(self):
return self.options.get('BUCKET', '')
def authenticate(self, calling_format=None):
access_key_id = self.account_username()
secret_access_key = self.options.get('SECRET_ACCESS_KEY', '')
bucket = self.account_container()
self.d['connection'] = boto3.client('s3', aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
self.d['bucket'] = self.d['connection'].get_bucket(bucket)
def list_remote_files(self):
rtn = set()
for obj in self.d['bucket'].objects.all():
rtn.add(obj.key)
return rtn
def delete_remote_file(self, remote_name):
obj = self._get_object(remote_name)
return obj.delete()
def compare_file(self, local_name, remote_name):
obj = self._get_object(remote_name)
local_hash = self._get_local_file_hash(local_name)
return local_hash == obj.e_tag[1:-1]
def upload_file(self, local_name, remote_name):
return self.d['bucket'].upload_file(local_name, remote_name)
def create_remote_dir(self, remote_dir_name):
# not required for S3 buckets
return True
backend_class = AmazonS3Backend
| {
"content_hash": "532a1c28cea4b58706cc483e7c4d9e54",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 84,
"avg_line_length": 31.29850746268657,
"alnum_prop": 0.6237482117310443,
"repo_name": "mgrp/django-distill",
"id": "9a327b2263365afce8e751123e9c42e43c032ad8",
"size": "2097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_distill/backends/amazon_s3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48463"
}
],
"symlink_target": ""
} |
"""
set_fake_passwords.py
Reset all user passwords to a common value. Useful for testing in a
development environment. As such, this command is only available when
setting.DEBUG is True.
"""
from typing import List
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django_extensions.management.utils import signalcommand
DEFAULT_FAKE_PASSWORD = 'password'
class Command(BaseCommand):
help = 'DEBUG only: sets all user passwords to a common value ("%s" by default)' % (DEFAULT_FAKE_PASSWORD, )
requires_system_checks: List[str] = []
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
'--prompt', dest='prompt_passwd', default=False,
action='store_true',
help='Prompts for the new password to apply to all users'
)
parser.add_argument(
'--password', dest='default_passwd', default=DEFAULT_FAKE_PASSWORD,
help='Use this as default password.'
)
@signalcommand
def handle(self, *args, **options):
if not settings.DEBUG:
raise CommandError('Only available in debug mode')
if options['prompt_passwd']:
from getpass import getpass
passwd = getpass('Password: ')
if not passwd:
raise CommandError('You must enter a valid password')
else:
passwd = options['default_passwd']
User = get_user_model()
user = User()
user.set_password(passwd)
count = User.objects.all().update(password=user.password)
print('Reset %d passwords' % count)
| {
"content_hash": "97a254b1a9c55ce276a69fe6db1c6080",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 112,
"avg_line_length": 32.22222222222222,
"alnum_prop": 0.6408045977011494,
"repo_name": "django-extensions/django-extensions",
"id": "1d03fe037bb5c661038efc581fc7f1090e66d4ee",
"size": "1764",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "django_extensions/management/commands/set_fake_passwords.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "740"
},
{
"name": "HTML",
"bytes": "2126"
},
{
"name": "JavaScript",
"bytes": "41410"
},
{
"name": "Makefile",
"bytes": "1257"
},
{
"name": "Python",
"bytes": "826197"
}
],
"symlink_target": ""
} |
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from dataclasses import dataclass
from collections.abc import Callable
from string import Template
import re
import flask
from enum import Enum
class ContextCategory(Enum):
ALL = 0
innerHTML = 1
JS = 2
Angular = 3
HTMLAttribute = 4
HTMLComment = 5
HTMLTag = 6
HTMLAttributeSrc = 7
HTMLAttributeHref = 8
@dataclass
class Context:
"""Representation of an XSS Context.
This dataclass represents XSS contexts. The contexts define where the XSS
source is mapped to in terms of browser parsing evaluation.
Attributes:
name: A short and unique string used to identify the context using request
arguments.
description: A string describing the context for humans.
implementation: A string implementing the context. Contains $payload
placeholder to fill with sink.
category: The ContextCategory enum categorizing the context on a high level.
"""
name: str
description: str
implementation: str
category: ContextCategory
def make_implementation(self, flask_request) -> str:
return self.implementation
@dataclass
class ComplexContext(Context):
"""A complex context is a context which requires more logic than simply returning a template string.
Some functionality like slightly switching the context based on a request
argument require a method to handle the implementation.
To model those functions performing transformations of the payload before
returning it, this class is added.
Instead of returning a template string for the implementation,
make_implementation does its thing to transform the payload and return the
result.
"""
implementation: Callable
base_template: str
def make_implementation(self, flask_request) -> str:
return self.implementation(self.base_template, flask_request)
# Default context
body_context = Context(
name="body",
description="The HTML body",
implementation="<body>$payload</body>",
category=ContextCategory.innerHTML,
)
script_context = Context(
name="script",
description="Inside a script tag",
implementation="<script>$payload</script>",
category=ContextCategory.JS,
)
# Source: https://www.w3schools.com/tags/ref_eventattributes.asp
body_events = [
"onafterprint",
"onbeforeprint",
"onbeforeunload",
"onerror",
"onhashchange",
"onload",
"onmessage",
"onoffline",
"ononline",
"onpagehide",
"onpageshow",
"onpopstate",
"onresize",
"onstorage",
"onunload",
]
form_events = [
"onblur",
"onchange",
"oncontextmenu",
"onfocus",
"oninput",
"oninvalid",
"onreset",
"onsearch",
"onselect",
"onsubmit",
]
keyboard_events = [
"onkeydown",
"onkeypress",
"onkeyup",
]
mouse_events = [
"onclick",
"ondblclick",
"onmousedown",
"onmousemove",
"onmouseout",
"onmouseover",
"onmouseup",
"onwheel",
]
drag_events = [
"ondrag",
"ondragend",
"ondragenter",
"ondragleave",
"ondragover",
"ondragstart",
"ondrop",
"onscroll",
]
clipboard_events = [
"oncopy",
"oncut",
"onpaste",
]
event_contexts = [
Context(
name=f"body.{event}",
description=f"Inside an HTML attribute event handler for {event} of the body tag",
implementation=f"<body {event}=$payload></body>",
category=ContextCategory.JS,
) for event in body_events
] + [
Context(
name=f"form.{event}",
description=f"Inside an HTML attribute event handler for {event} of a form tag with one input and a submit button",
implementation=f"<form {event}=$payload><input type=text /><input type=submit /></form>",
category=ContextCategory.JS,
) for event in form_events
] + [
Context(
name=f"input.{event}",
description=f"Inside an HTML attribute event handler for {event} of an input tag",
implementation=f"<input {event}=$payload />",
category=ContextCategory.JS,
) for event in keyboard_events + clipboard_events
] + [
Context(
name=f"button.{event}",
description=f"Inside an HTML attribute event handler for {event} of a button tag",
implementation=f"<button {event}=$payload>Click or drag</button>",
category=ContextCategory.JS,
) for event in mouse_events + drag_events
] + [
Context(
name="details.ontoggle",
description="Inside an HTML attribute event handler for ontoggle of a details tag",
implementation="<details ontoggle=$payload>Reveal</details>",
category=ContextCategory.JS,
)
]
post_message_context = Context(
name="postMessage",
description="In a postMessage handler inside a script tag. To be used with postMessage source",
implementation=(
"<script>"
"const postMessageHandler = (msg) => {"
"const msgData = msg.data;"
"$payload"
"};"
"""window.addEventListener("message", postMessageHandler, false);"""
"</script>"),
category=ContextCategory.JS,
)
def _create_angular_context(base_template, flask_request) -> str:
# Default to Angular version 1.6.0
angular_version = flask_request.args.get("angular", "1.6.0")
if not re.match(r"^(\d+\.)?(\d+\.)?(\d+)?$", angular_version):
flask.abort(
400,
"Invalid Angular version. Use dot separated version string e.g. 1.6.0")
return Template(base_template).substitute(version=angular_version)
angular_contexts = [
ComplexContext(
name="angular",
description="Inside a script tag with Angular loaded and an Angular app element",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"<script>$$payload</script>"
"""<div ng-app="test" ng-controller="VulnerableController" class="ng-scope"></div>"""
),
implementation=_create_angular_context,
category=ContextCategory.JS,
),
ComplexContext(
name="angular.ng-class",
description="As the ng-class attribute of a generic tag with loaded Angular ",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"<body ng-app><tag ng-class=$$payload /></body>"),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.ng-class",
description="As the ng-class attribute of a generic tag with loaded Angular ",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"<body ng-app><tag ng-class=$$payload /></body>"),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.attribute.interpolation",
description=("Inside interpolation symbols in a "
"generic attribute of a generic tag with loaded Angular"),
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"<body ng-app><tag class={{$$payload}} /></body>"),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.interpolation",
description="Into interpolation symbols inside the body tag with Angular loaded",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"""<body ng-app><div>{{$$payload}}</div></body>"""),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.interpolation.altsymbols",
description="Into alternate interpolation symbols [[]] inside the body tag with Angular loaded",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"""<script>"""
"""angular.module('ng').config(function($$$$interpolateProvider) {"""
"""$$$$interpolateProvider.startSymbol('[[').endSymbol(']]');});"""
"""</script>"""
"""<body ng-app>[[$$payload]]</body>"""),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.body",
description="Inside the body tag with Angular loaded",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"""<body ng-app>$$payload</body>"""),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
ComplexContext(
name="angular.body.altsymbols",
description="Inside the body tag with Angular loaded using alternate interpolation symbols [[]]",
base_template=(
"""<script src="//ajax.googleapis.com/ajax/libs/angularjs/$version/angular.js"></script>"""
"""<script>"""
"""angular.module('ng').config(function($$$$interpolateProvider) {"""
"""$$$$interpolateProvider.startSymbol('[[').endSymbol(']]');});"""
"""</script>"""
"""<body ng-app>$$payload</body>"""),
implementation=_create_angular_context,
category=ContextCategory.Angular,
),
]
ClientContextsList = [
script_context,
post_message_context,
] + event_contexts + angular_contexts
ServerContextsList = [
body_context,
script_context,
post_message_context,
Context(
name="body.comment",
description="A comment inside the HTML body",
implementation="<body><!-- $payload --></body>",
category=ContextCategory.HTMLComment,
),
Context(
name="script.comment.block",
description="Inside a javascript comment block in a script tag",
implementation="<script>/* $payload */</script>",
category=ContextCategory.JS,
),
Context(
name="script.comment.line",
description="Inside a javascript line comment in a script tag",
implementation="<script>// $payload</script>",
category=ContextCategory.JS,
),
Context(
name="script.comment.html",
description="Inside an HTML comment block in a script tag",
implementation="<script><!-- $payload </script>",
category=ContextCategory.JS,
),
Context(
name="script.assignment",
description="In a javascript variable assignment",
implementation="""<script>var a = $payload </script>""",
category=ContextCategory.JS,
),
Context(
name="script.src",
description="As the src attribute of a script tag",
implementation="""<script src=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="script.function.parameter",
description="As a function parameter to a JS function call inside a script tag",
implementation="""<script>const f = (a) => { return a }; f($payload); </script>""",
category=ContextCategory.JS,
),
Context(
name="attribute.name",
description="As an attribute name of a generic tag",
implementation="""<tag $payload=""/>""",
category=ContextCategory.HTMLAttribute,
),
Context(
name="attribute.generic",
description="As the value of a generic attribute in a generic tag",
implementation="""<tag attribute=$payload />""",
category=ContextCategory.HTMLAttribute,
),
Context(
name="tag.inner",
description="Inside a generic tag",
implementation="<tag>$payload</tag>",
category=ContextCategory.innerHTML,
),
Context(
name="tag.name.self_closing",
description="As a name of a self-closing tag",
implementation="<$payload />",
category=ContextCategory.HTMLTag,
),
Context(
name="tag.name",
description="As a tag name",
implementation="<$payload></$payload>",
category=ContextCategory.HTMLTag,
),
Context(
name="a.href",
description="As an href attribute of an anchor",
implementation="""<a href=$payload>Link</a>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="area.href",
description="As an href of the area tag",
implementation="""<area href=$payload>Link</area>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="link.href",
description="As an href attribute of a link tag",
implementation="""<head><link href=$payload /></head>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="embed.src",
description="As a src attribute of an embed tag",
implementation="""<embed src=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="form.action",
description="As an action attribute of a form",
implementation="""<form action=$payload><button type=submit>Submit</button></form>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="iframe.src",
description="As a src attribute of an iframe",
implementation="""<iframe src=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="iframe.srcdoc",
description="As a srcdoc attribute of an iframe",
implementation="""<iframe srcdoc=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="iframe.attribute",
description="As a generic attribute of an iframe",
implementation="""<iframe src="data:text/html, <h1>This is an iframe!</h1>" attribute=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="input.formaction",
description="As a formaction attribute of an input",
implementation="""<form><input value="Submit" type="submit" formaction=$payload /></form>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="button.formaction",
description="As a formaction attribute of an button",
implementation="""<form><button formaction=$payload>Start</button></form>""",
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="meta.content",
description="As a content attribute of a meta tag",
implementation="""<meta content=$payload />""",
category=ContextCategory.HTMLAttribute,
),
Context(
name="object.data",
description="As a data attribute of an object tag",
implementation="""<object data=$payload />""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="textarea",
description="Inside a textarea",
implementation="""<textarea>$payload</textarea>""",
category=ContextCategory.innerHTML,
),
Context(
name="textarea.value",
description="As the value attribute of a textarea",
implementation="""<textarea value=$payload>></textarea>""",
category=ContextCategory.HTMLAttribute,
),
Context(
name="head",
description="Inside the HTML head tag",
implementation="<head>$payload</head>",
category=ContextCategory.innerHTML,
),
Context(
name="head.title",
description="Inside the HTML title",
implementation="<head><title>$payload</title></head>",
category=ContextCategory.innerHTML,
),
Context(
name="head.style",
description="Inside a style tag in the HTML head",
implementation="<head><style>$payload</style></head>",
category=ContextCategory.innerHTML,
),
Context(
name="head.style.comment",
description="Inside a CSS comment in a style tag in the HTML head",
implementation="<head><style>/* $payload */</style></head>",
category=ContextCategory.innerHTML,
),
Context(
name="noscript",
description="In a noscript element",
implementation="<noscript>$payload</noscript>",
category=ContextCategory.innerHTML,
),
Context(
name="noembed",
description="In a noembed element",
implementation="<noembed>$payload</noembed>",
category=ContextCategory.innerHTML,
),
Context(
name="select.option",
description="As an option to a select element",
implementation="<select><option>$payload</option></select>",
category=ContextCategory.innerHTML,
),
Context(
name="frameset",
description="Inside a frameset element",
implementation="<frameset>$payload</frameset>",
category=ContextCategory.innerHTML,
),
Context(
name="frameset.frame.src",
description="As the src attribute of a frame element inside a frameset",
implementation="""<frameset><frame src=$payload></frameset>""",
category=ContextCategory.HTMLAttributeSrc,
),
Context(
name="template",
description="Inside a template element",
implementation="<template>$payload</template>",
category=ContextCategory.innerHTML,
),
Context(
name="object.param.code",
description="As a value to a code param inside an object element",
implementation=("<object>"
"""<param name="code" value=$payload />"""
"</object>"),
category=ContextCategory.HTMLAttribute,
),
Context(
name="object.param.movie",
description="As a value to a movie param inside an object element",
implementation=("<object>"
"""<param name="movie" value=$payload />"""
"</object>"),
category=ContextCategory.HTMLAttribute,
),
Context(
name="object.param.src",
description="As a value to a src param inside an object element",
implementation=("<object>"
"""<param name="src" value=$payload />"""
"</object>"),
category=ContextCategory.HTMLAttribute,
),
Context(
name="object.param.url",
description="As a value to a url param inside an object element",
implementation=("<object>"
"""<param name="url" value=$payload />"""
"</object>"),
category=ContextCategory.HTMLAttribute,
),
Context(
name="svg.script.xlink-href",
description="As an xlink:href attribute of a script inside an xlink-svg element",
implementation=("""<svg xmlns:xlink="http://www.w3.org/1999/xlink">"""
"""<script xlink:href=$payload></script>"""
"</svg>"),
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="svg.a.xlink-href",
description="As an xlink:href attribute of an anchor inside an xlink-svg element",
implementation=(
"""<svg xmlns:xlink="http://www.w3.org/1999/xlink" width="500" height="500">"""
"""<a xlink:href=$payload>Link</a>"""
"</svg>"),
category=ContextCategory.HTMLAttributeHref,
),
Context(
name="script.setInnerHTML",
description="Inside a script tag as an assignment to an element's innerHTML",
implementation=(
"<div id='d'></div>"
"<script>"
"""document.getElementById('d').innerHTML = $payload ;"""
"</script>"),
category=ContextCategory.JS,
),
] + event_contexts + angular_contexts
ClientContexts = {context.name: context for context in ClientContextsList}
ServerContexts = {context.name: context for context in ServerContextsList}
| {
"content_hash": "8afaa719e41a36197894a0b372e9dbee",
"timestamp": "",
"source": "github",
"line_count": 587,
"max_line_length": 123,
"avg_line_length": 35.310051107325386,
"alnum_prop": 0.6213634389926184,
"repo_name": "google/security-testbeds",
"id": "eff0b4ab5e34947fce49ca9e9868731b03a3154b",
"size": "20727",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "archery_range/xss/components/context.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7221"
},
{
"name": "Python",
"bytes": "80197"
}
],
"symlink_target": ""
} |
"""
Feature extractor class for LayoutLMv3.
"""
from ...utils import logging
from .image_processing_layoutlmv3 import LayoutLMv3ImageProcessor
logger = logging.get_logger(__name__)
LayoutLMv3FeatureExtractor = LayoutLMv3ImageProcessor
| {
"content_hash": "b8abedef26f960a07493729e6c561f7e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 65,
"avg_line_length": 20,
"alnum_prop": 0.7958333333333333,
"repo_name": "huggingface/transformers",
"id": "d742c068fc730753a25d6b7dc5d1714ca9d30b2a",
"size": "845",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/transformers/models/layoutlmv3/feature_extraction_layoutlmv3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6021"
},
{
"name": "C++",
"bytes": "12959"
},
{
"name": "Cuda",
"bytes": "175419"
},
{
"name": "Dockerfile",
"bytes": "18218"
},
{
"name": "Jsonnet",
"bytes": "937"
},
{
"name": "Makefile",
"bytes": "3430"
},
{
"name": "Python",
"bytes": "35742012"
},
{
"name": "Shell",
"bytes": "30374"
}
],
"symlink_target": ""
} |
from ._models_py3 import ActionDetail
from ._models_py3 import ActionGroupList
from ._models_py3 import ActionGroupPatchBody
from ._models_py3 import ActionGroupResource
from ._models_py3 import ArmRoleReceiver
from ._models_py3 import AutomationRunbookReceiver
from ._models_py3 import AzureAppPushReceiver
from ._models_py3 import AzureFunctionReceiver
from ._models_py3 import AzureResource
from ._models_py3 import Context
from ._models_py3 import EmailReceiver
from ._models_py3 import EnableRequest
from ._models_py3 import ErrorResponse
from ._models_py3 import EventHubReceiver
from ._models_py3 import ItsmReceiver
from ._models_py3 import LogicAppReceiver
from ._models_py3 import NotificationRequestBody
from ._models_py3 import SmsReceiver
from ._models_py3 import TestNotificationDetailsResponse
from ._models_py3 import VoiceReceiver
from ._models_py3 import WebhookReceiver
from ._monitor_management_client_enums import ReceiverStatus
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"ActionDetail",
"ActionGroupList",
"ActionGroupPatchBody",
"ActionGroupResource",
"ArmRoleReceiver",
"AutomationRunbookReceiver",
"AzureAppPushReceiver",
"AzureFunctionReceiver",
"AzureResource",
"Context",
"EmailReceiver",
"EnableRequest",
"ErrorResponse",
"EventHubReceiver",
"ItsmReceiver",
"LogicAppReceiver",
"NotificationRequestBody",
"SmsReceiver",
"TestNotificationDetailsResponse",
"VoiceReceiver",
"WebhookReceiver",
"ReceiverStatus",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
| {
"content_hash": "98bed2a52e68756093563ded109454a0",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 32.77358490566038,
"alnum_prop": 0.7576280944156591,
"repo_name": "Azure/azure-sdk-for-python",
"id": "c2c49e9b3b64eead95c99699a3daa7ac109fc660",
"size": "2205",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2022_06_01/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('futuschedule', '0007_auto_20180227_1126'),
]
operations = [
migrations.AddField(
model_name='futuuser',
name='personio_id',
field=models.IntegerField(unique=True, null=True, blank=True),
),
]
| {
"content_hash": "5d3fe6cb0ca6cd81e7f0de252bca9b84",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 74,
"avg_line_length": 22.88888888888889,
"alnum_prop": 0.6092233009708737,
"repo_name": "futurice/schedule",
"id": "24a53f32e39e8836ed11535a13b1e8ac2bd1f10e",
"size": "436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "schedulesite/futuschedule/migrations/0008_futuuser_personioid.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4704"
},
{
"name": "Dockerfile",
"bytes": "1367"
},
{
"name": "HTML",
"bytes": "5543"
},
{
"name": "JavaScript",
"bytes": "165591"
},
{
"name": "Python",
"bytes": "94435"
},
{
"name": "Shell",
"bytes": "1748"
}
],
"symlink_target": ""
} |
import warnings
import torch
from ..constraints import Positive
from ..lazy import MatmulLazyTensor, RootLazyTensor
from .kernel import Kernel
class LinearKernel(Kernel):
r"""
Computes a covariance matrix based on the Linear kernel
between inputs :math:`\mathbf{x_1}` and :math:`\mathbf{x_2}`:
.. math::
\begin{equation*}
k_\text{Linear}(\mathbf{x_1}, \mathbf{x_2}) = v\mathbf{x_1}^\top
\mathbf{x_2}.
\end{equation*}
where
* :math:`v` is a :attr:`variance` parameter.
.. note::
To implement this efficiently, we use a :obj:`gpytorch.lazy.RootLazyTensor` during training and a
:class:`gpytorch.lazy.MatmulLazyTensor` during test. These lazy tensors represent matrices of the form
:math:`K = XX^{\top}` and :math:`K = XZ^{\top}`. This makes inference
efficient because a matrix-vector product :math:`Kv` can be computed as
:math:`Kv=X(X^{\top}v)`, where the base multiply :math:`Xv` takes only
:math:`O(nd)` time and space.
Args:
:attr:`variance_prior` (:class:`gpytorch.priors.Prior`):
Prior over the variance parameter (default `None`).
:attr:`variance_constraint` (Constraint, optional):
Constraint to place on variance parameter. Default: `Positive`.
:attr:`active_dims` (list):
List of data dimensions to operate on.
`len(active_dims)` should equal `num_dimensions`.
"""
def __init__(self, num_dimensions=None, offset_prior=None, variance_prior=None, variance_constraint=None, **kwargs):
super(LinearKernel, self).__init__(**kwargs)
if variance_constraint is None:
variance_constraint = Positive()
if num_dimensions is not None:
# Remove after 1.0
warnings.warn("The `num_dimensions` argument is deprecated and no longer used.", DeprecationWarning)
self.register_parameter(name="offset", parameter=torch.nn.Parameter(torch.zeros(1, 1, num_dimensions)))
if offset_prior is not None:
# Remove after 1.0
warnings.warn("The `offset_prior` argument is deprecated and no longer used.", DeprecationWarning)
self.register_parameter(name="raw_variance", parameter=torch.nn.Parameter(torch.zeros(*self.batch_shape, 1, 1)))
if variance_prior is not None:
self.register_prior(
"variance_prior", variance_prior, lambda: self.variance, lambda v: self._set_variance(v)
)
self.register_constraint("raw_variance", variance_constraint)
@property
def variance(self):
return self.raw_variance_constraint.transform(self.raw_variance)
@variance.setter
def variance(self, value):
self._set_variance(value)
def _set_variance(self, value):
if not torch.is_tensor(value):
value = torch.as_tensor(value).to(self.raw_variance)
self.initialize(raw_variance=self.raw_variance_constraint.inverse_transform(value))
def forward(self, x1, x2, diag=False, last_dim_is_batch=False, **params):
x1_ = x1 * self.variance.sqrt()
if last_dim_is_batch:
x1_ = x1_.transpose(-1, -2).unsqueeze(-1)
if x1.size() == x2.size() and torch.equal(x1, x2):
# Use RootLazyTensor when x1 == x2 for efficiency when composing
# with other kernels
prod = RootLazyTensor(x1_)
else:
x2_ = x2 * self.variance.sqrt()
if last_dim_is_batch:
x2_ = x2_.transpose(-1, -2).unsqueeze(-1)
prod = MatmulLazyTensor(x1_, x2_.transpose(-2, -1))
if diag:
return prod.diag()
else:
return prod
| {
"content_hash": "e464c342c9248c0264b3acd59577afc8",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 120,
"avg_line_length": 38.265306122448976,
"alnum_prop": 0.6181333333333333,
"repo_name": "jrg365/gpytorch",
"id": "aa1633a1f35c35128bf943e3b285926d966c256f",
"size": "3774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gpytorch/kernels/linear_kernel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6005"
},
{
"name": "C++",
"bytes": "242"
},
{
"name": "Python",
"bytes": "338860"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_adafruitms1438 as upmAdafruitms1438
def main():
# Import header values
I2CBus = upmAdafruitms1438.ADAFRUITMS1438_I2C_BUS
I2CAddr = upmAdafruitms1438.ADAFRUITMS1438_DEFAULT_I2C_ADDR
M12Motor = upmAdafruitms1438.AdafruitMS1438.STEPMOTOR_M12
MotorDirCW = upmAdafruitms1438.AdafruitMS1438.DIR_CW
MotorDirCCW = upmAdafruitms1438.AdafruitMS1438.DIR_CCW
# Instantiate an Adafruit MS 1438 on I2C bus 0
myMotorShield = upmAdafruitms1438.AdafruitMS1438(I2CBus, I2CAddr)
## Exit handlers ##
# This stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit,
# including functions from myMotorShield
def exitHandler():
myMotorShield.disableStepper(M12Motor)
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# Setup for use with a stepper motor connected to the M1 & M2 ports
# set a PWM period of 50Hz
# disable first, to be safe
myMotorShield.disableStepper(M12Motor)
# configure for a NEMA-17, 200 steps per revolution
myMotorShield.stepConfig(M12Motor, 200)
# set speed at 10 RPM's
myMotorShield.setStepperSpeed(M12Motor, 10);
myMotorShield.setStepperDirection(M12Motor, MotorDirCW)
# enable
print("Enabling...")
myMotorShield.enableStepper(M12Motor)
print("Rotating 1 full revolution at 10 RPM speed.")
myMotorShield.stepperSteps(M12Motor, 200)
print("Sleeping for 2 seconds...")
time.sleep(2)
print("Rotating 1/2 revolution in opposite direction at 10 RPM speed.")
myMotorShield.setStepperDirection(M12Motor, MotorDirCCW)
myMotorShield.stepperSteps(M12Motor, 100)
print("Disabling...")
# exitHandler runs automatically
if __name__ == '__main__':
main()
| {
"content_hash": "033a30b656a47ae544f68aacc17c8ab9",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 75,
"avg_line_length": 30.606060606060606,
"alnum_prop": 0.7207920792079208,
"repo_name": "g-vidal/upm",
"id": "543c535d36dd1f8830dfeb58e9d386220f4d016c",
"size": "3180",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "examples/python/adafruitms1438-stepper.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3305722"
},
{
"name": "C++",
"bytes": "3740016"
},
{
"name": "CMake",
"bytes": "160214"
},
{
"name": "CSS",
"bytes": "18714"
},
{
"name": "HTML",
"bytes": "32376"
},
{
"name": "JavaScript",
"bytes": "7727"
},
{
"name": "Objective-C",
"bytes": "4075"
},
{
"name": "Python",
"bytes": "39715"
},
{
"name": "Shell",
"bytes": "10703"
}
],
"symlink_target": ""
} |
'''Module containg Abstract Syntax Tree (AST) constructors.'''
from abc import ABCMeta, abstractmethod
from math import exp, log, cos, sin, tan, factorial
from operator import add, sub, mul, truediv, pow, neg
# Binary operation lookup table (prevents looking at cases later).
bin_ops = {
'+': add,
'-': sub,
'*': mul,
'/': truediv,
'^': pow
}
# Function lookup table (also prevents looking at cases later).
functions = {
'-': neg,
'abs': abs,
'exp': exp,
'log': log,
'cos': cos,
'sin': sin,
'tan': tan,
'!': factorial
}
class AST(metaclass=ABCMeta):
'''Abstract AST class.'''
@abstractmethod
def evaluate(self):
'''Traverse the tree and return the value that the tree represents.'''
# Implemented in subclass.
pass
@abstractmethod
def set_vars(self, variables):
'''Assign values to variables in the leaf nodes.'''
# Implemented in subclass.
pass
@abstractmethod
def postfix(self):
'''Return a postfix (postorder) representation of the tree.'''
# Implemented in subclass.
pass
def __repr__(self):
'''Convert the tree to a string.'''
return self.postfix()
class Branch(AST, metaclass=ABCMeta):
'''A branch of the AST. The value of a branch is a function. Children of the
branch are ASTs which represent arguments to the function.'''
def __init__(self, f, identifier, *args):
self.f = f
self.identifier = identifier
self.args = args
def evaluate(self):
'''Evaluate the children, then apply the function to the results.'''
return self.f(*(arg.evaluate() for arg in self.args))
def set_vars(self, variables):
return all(arg.set_vars(variables) for arg in self.args)
def postfix(self):
arguments = ' '.join(arg.postfix() for arg in self.args)
return '(' + arguments + ') ' + self.identifier
class BinaryOperation(Branch):
'''A type of AST Branch where the node is a binary operation and there are
two children.'''
def __init__(self, op_symbol, left, right):
# Check if bin_op is one of the available binary operations.
if op_symbol in bin_ops:
bin_op = bin_ops[op_symbol]
super().__init__(bin_op, op_symbol, left, right)
else:
raise ValueError('Illegal binary operation: ' + op_symbol)
class UnaryFunction(Branch):
'''A type of AST Branch where the node is a unary function and there is only
one child AST.'''
def __init__(self, function_name, argument):
# Check if function_name is one of the available functions.
if function_name in functions:
function = functions[function_name]
super().__init__(function, function_name, argument)
else:
raise ValueError('Illegal function: ' + function_name)
class Leaf(AST, metaclass=ABCMeta):
'''A node on an AST with no children.'''
def __init__(self, name, value):
self.name = name
self.value = value
def evaluate(self):
return self.value
def set_vars(self, variables):
return True
def postfix(self):
return self.name
class Value(Leaf):
'''A leaf with a constant numeric value.'''
def __init__(self, value):
super().__init__(str(value), value)
class Variable(Leaf):
'''A leaf with a variable value.'''
def __init__(self, name):
super().__init__(name, None)
def evaluate(self):
# Check if the variable name is assigned to a value.
if self.value is not None:
return self.value
else:
message = 'The variable ' + self.name + ' has no value.'
raise UnboundLocalError(message)
def set_vars(self, variables):
# Try to assign a value to the variable name.
if self.name in variables:
self.value = variables[self.name]
return True
else:
return False
| {
"content_hash": "e57a30592749f445c3941c164e89a6d7",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 80,
"avg_line_length": 29.906474820143885,
"alnum_prop": 0.5828722636516719,
"repo_name": "artemmavrin/pycalc",
"id": "498be53db4e4a88501f54eef613cf3cec4c83faa",
"size": "4157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycalc/tree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26105"
}
],
"symlink_target": ""
} |
import contextlib
import mock
import webob.exc as wexc
from neutron.api.v2 import base
from neutron.common import constants as n_const
from neutron import context
from neutron.extensions import portbindings
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import config as ml2_config
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2.drivers.cisco.nexus import config as cisco_config
from neutron.plugins.ml2.drivers.cisco.nexus import exceptions as c_exc
from neutron.plugins.ml2.drivers.cisco.nexus import mech_cisco_nexus
from neutron.plugins.ml2.drivers.cisco.nexus import nexus_db_v2
from neutron.plugins.ml2.drivers.cisco.nexus import nexus_network_driver
from neutron.plugins.ml2.drivers import type_vlan as vlan_config
from neutron.tests.unit import test_db_plugin
LOG = logging.getLogger(__name__)
ML2_PLUGIN = 'neutron.plugins.ml2.plugin.Ml2Plugin'
PHYS_NET = 'physnet1'
COMP_HOST_NAME = 'testhost'
COMP_HOST_NAME_2 = 'testhost_2'
VLAN_START = 1000
VLAN_END = 1100
NEXUS_IP_ADDR = '1.1.1.1'
NETWORK_NAME = 'test_network'
NETWORK_NAME_2 = 'test_network_2'
NEXUS_INTERFACE = '1/1'
NEXUS_INTERFACE_2 = '1/2'
CIDR_1 = '10.0.0.0/24'
CIDR_2 = '10.0.1.0/24'
DEVICE_ID_1 = '11111111-1111-1111-1111-111111111111'
DEVICE_ID_2 = '22222222-2222-2222-2222-222222222222'
DEVICE_OWNER = 'compute:None'
BOUND_SEGMENT1 = {api.NETWORK_TYPE: p_const.TYPE_VLAN,
api.PHYSICAL_NETWORK: PHYS_NET,
api.SEGMENTATION_ID: VLAN_START}
BOUND_SEGMENT2 = {api.NETWORK_TYPE: p_const.TYPE_VLAN,
api.PHYSICAL_NETWORK: PHYS_NET,
api.SEGMENTATION_ID: VLAN_START + 1}
class CiscoML2MechanismTestCase(test_db_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
"""Configure for end-to-end neutron testing using a mock ncclient.
This setup includes:
- Configure the ML2 plugin to use VLANs in the range of 1000-1100.
- Configure the Cisco mechanism driver to use an imaginary switch
at NEXUS_IP_ADDR.
- Create a mock NETCONF client (ncclient) for the Cisco mechanism
driver
"""
# Configure the ML2 mechanism drivers and network types
ml2_opts = {
'mechanism_drivers': ['cisco_nexus'],
'tenant_network_types': ['vlan'],
}
for opt, val in ml2_opts.items():
ml2_config.cfg.CONF.set_override(opt, val, 'ml2')
# Configure the ML2 VLAN parameters
phys_vrange = ':'.join([PHYS_NET, str(VLAN_START), str(VLAN_END)])
vlan_config.cfg.CONF.set_override('network_vlan_ranges',
[phys_vrange],
'ml2_type_vlan')
# Configure the Cisco Nexus mechanism driver
nexus_config = {
(NEXUS_IP_ADDR, 'username'): 'admin',
(NEXUS_IP_ADDR, 'password'): 'mySecretPassword',
(NEXUS_IP_ADDR, 'ssh_port'): 22,
(NEXUS_IP_ADDR, COMP_HOST_NAME): NEXUS_INTERFACE,
(NEXUS_IP_ADDR, COMP_HOST_NAME_2): NEXUS_INTERFACE_2}
nexus_patch = mock.patch.dict(
cisco_config.ML2MechCiscoConfig.nexus_dict,
nexus_config)
nexus_patch.start()
self.addCleanup(nexus_patch.stop)
# The NETCONF client module is not included in the DevStack
# distribution, so mock this module for unit testing.
self.mock_ncclient = mock.Mock()
mock.patch.object(nexus_network_driver.CiscoNexusDriver,
'_import_ncclient',
return_value=self.mock_ncclient).start()
# Mock port context values for bound_segments and 'status'.
self.mock_bound_segment = mock.patch.object(
driver_context.PortContext,
'bound_segment',
new_callable=mock.PropertyMock).start()
self.mock_bound_segment.return_value = BOUND_SEGMENT1
self.mock_original_bound_segment = mock.patch.object(
driver_context.PortContext,
'original_bound_segment',
new_callable=mock.PropertyMock).start()
self.mock_original_bound_segment.return_value = None
mock_status = mock.patch.object(
mech_cisco_nexus.CiscoNexusMechanismDriver,
'_is_status_active').start()
mock_status.return_value = n_const.PORT_STATUS_ACTIVE
super(CiscoML2MechanismTestCase, self).setUp(ML2_PLUGIN)
self.port_create_status = 'DOWN'
@contextlib.contextmanager
def _patch_ncclient(self, attr, value):
"""Configure an attribute on the mock ncclient module.
This method can be used to inject errors by setting a side effect
or a return value for an ncclient method.
:param attr: ncclient attribute (typically method) to be configured.
:param value: Value to be configured on the attribute.
"""
# Configure attribute.
config = {attr: value}
self.mock_ncclient.configure_mock(**config)
# Continue testing
yield
# Unconfigure attribute
config = {attr: None}
self.mock_ncclient.configure_mock(**config)
@staticmethod
def _config_dependent_side_effect(match_config, exc):
"""Generates a config-dependent side effect for ncclient edit_config.
This method generates a mock side-effect function which can be
configured on the mock ncclient module for the edit_config method.
This side effect will cause a given exception to be raised whenever
the XML config string that is passed to edit_config contains all
words in a given match config string.
:param match_config: String containing keywords to be matched
:param exc: Exception to be raised when match is found
:return: Side effect function for the mock ncclient module's
edit_config method.
"""
keywords = match_config.split()
def _side_effect_function(target, config):
if all(word in config for word in keywords):
raise exc
return _side_effect_function
def _is_in_nexus_cfg(self, words):
"""Check if any config sent to Nexus contains all words in a list."""
for call in (self.mock_ncclient.connect.return_value.
edit_config.mock_calls):
configlet = call[2]['config']
if all(word in configlet for word in words):
return True
return False
def _is_in_last_nexus_cfg(self, words):
"""Confirm last config sent to Nexus contains specified keywords."""
last_cfg = (self.mock_ncclient.connect.return_value.
edit_config.mock_calls[-1][2]['config'])
return all(word in last_cfg for word in words)
def _is_vlan_configured(self, vlan_creation_expected=True,
add_keyword_expected=False):
vlan_created = self._is_in_nexus_cfg(['vlan', 'vlan-name'])
add_appears = self._is_in_last_nexus_cfg(['add'])
return (self._is_in_last_nexus_cfg(['allowed', 'vlan']) and
vlan_created == vlan_creation_expected and
add_appears == add_keyword_expected)
def _is_vlan_unconfigured(self, vlan_deletion_expected=True):
vlan_deleted = self._is_in_last_nexus_cfg(
['no', 'vlan', 'vlan-id-create-delete'])
return (self._is_in_nexus_cfg(['allowed', 'vlan', 'remove']) and
vlan_deleted == vlan_deletion_expected)
class TestCiscoBasicGet(CiscoML2MechanismTestCase,
test_db_plugin.TestBasicGet):
pass
class TestCiscoV2HTTPResponse(CiscoML2MechanismTestCase,
test_db_plugin.TestV2HTTPResponse):
pass
class TestCiscoPortsV2(CiscoML2MechanismTestCase,
test_db_plugin.TestPortsV2):
@contextlib.contextmanager
def _create_resources(self, name=NETWORK_NAME, cidr=CIDR_1,
device_id=DEVICE_ID_1,
host_id=COMP_HOST_NAME):
"""Create network, subnet, and port resources for test cases.
Create a network, subnet, port and then update the port, yield the
result, then delete the port, subnet and network.
:param name: Name of network to be created.
:param cidr: cidr address of subnetwork to be created.
:param device_id: Device ID to use for port to be created/updated.
:param host_id: Host ID to use for port create/update.
"""
with self.network(name=name) as network:
with self.subnet(network=network, cidr=cidr) as subnet:
with self.port(subnet=subnet, cidr=cidr) as port:
data = {'port': {portbindings.HOST_ID: host_id,
'device_id': device_id,
'device_owner': 'compute:none',
'admin_state_up': True}}
req = self.new_update_request('ports', data,
port['port']['id'])
yield req.get_response(self.api)
def _assertExpectedHTTP(self, status, exc):
"""Confirm that an HTTP status corresponds to an expected exception.
Confirm that an HTTP status which has been returned for an
neutron API request matches the HTTP status corresponding
to an expected exception.
:param status: HTTP status
:param exc: Expected exception
"""
if exc in base.FAULT_MAP:
expected_http = base.FAULT_MAP[exc].code
else:
expected_http = wexc.HTTPInternalServerError.code
self.assertEqual(status, expected_http)
def test_create_ports_bulk_emulated_plugin_failure(self):
real_has_attr = hasattr
#ensures the API chooses the emulation code path
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
with mock.patch('__builtin__.hasattr',
new=fakehasattr):
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_port
with mock.patch.object(plugin_obj,
'create_port') as patched_plugin:
def side_effect(*args, **kwargs):
return self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
with self.network() as net:
res = self._create_port_bulk(self.fmt, 2,
net['network']['id'],
'test',
True)
# Expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'ports',
wexc.HTTPInternalServerError.code)
def test_create_ports_bulk_native(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk port create")
def test_create_ports_bulk_emulated(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk port create")
def test_create_ports_bulk_native_plugin_failure(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk port create")
ctx = context.get_admin_context()
with self.network() as net:
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_port
with mock.patch.object(plugin_obj,
'create_port') as patched_plugin:
def side_effect(*args, **kwargs):
return self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
res = self._create_port_bulk(self.fmt, 2, net['network']['id'],
'test', True, context=ctx)
# We expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'ports',
wexc.HTTPInternalServerError.code)
def test_nexus_enable_vlan_cmd(self):
"""Verify the syntax of the command to enable a vlan on an intf.
Confirm that for the first VLAN configured on a Nexus interface,
the command string sent to the switch does not contain the
keyword 'add'.
Confirm that for the second VLAN configured on a Nexus interface,
the command string sent to the switch contains the keyword 'add'.
"""
# First vlan should be configured without 'add' keyword
with self._create_resources():
self.assertTrue(self._is_vlan_configured(
vlan_creation_expected=True,
add_keyword_expected=False))
self.mock_ncclient.reset_mock()
self.mock_bound_segment.return_value = BOUND_SEGMENT2
# Second vlan should be configured with 'add' keyword
with self._create_resources(name=NETWORK_NAME_2,
device_id=DEVICE_ID_2,
cidr=CIDR_2):
self.assertTrue(self._is_vlan_configured(
vlan_creation_expected=True,
add_keyword_expected=True))
# Return to first segment for delete port calls.
self.mock_bound_segment.return_value = BOUND_SEGMENT1
def test_nexus_add_trunk(self):
"""Verify syntax to enable a vlan on an interface.
Test also verifies that the vlan interface is not created.
Test of the following ml2_conf_cisco_ini config:
[ml2_mech_cisco_nexus:1.1.1.1]
hostA=1/1
hostB=1/2
where vlan_id = 100
Confirm that for the first host configured on a Nexus interface,
the command string sent to the switch does not contain the
keyword 'add'.
Confirm that for the second host configured on a Nexus interface,
the command staring sent to the switch contains does not contain
the keyword 'name' [signifies vlan intf creation].
"""
with self._create_resources(name='net1', cidr=CIDR_1):
self.assertTrue(self._is_in_last_nexus_cfg(['allowed', 'vlan']))
self.assertFalse(self._is_in_last_nexus_cfg(['add']))
with self._create_resources(name='net2',
cidr=CIDR_2, host_id=COMP_HOST_NAME_2):
self.assertTrue(
self._is_in_last_nexus_cfg(['allowed', 'vlan']))
self.assertFalse(self._is_in_last_nexus_cfg(['name']))
def test_nexus_connect_fail(self):
"""Test failure to connect to a Nexus switch.
While creating a network, subnet, and port, simulate a connection
failure to a nexus switch. Confirm that the expected HTTP code
is returned for the create port operation.
"""
with self._patch_ncclient('connect.side_effect',
AttributeError):
with self._create_resources() as result:
self._assertExpectedHTTP(result.status_int,
c_exc.NexusConnectFailed)
def test_nexus_vlan_config_two_hosts(self):
"""Verify config/unconfig of vlan on two compute hosts."""
@contextlib.contextmanager
def _create_port_check_vlan(comp_host_name, device_id,
vlan_creation_expected=True):
with self.port(subnet=subnet, fmt=self.fmt) as port:
data = {'port': {portbindings.HOST_ID: comp_host_name,
'device_id': device_id,
'device_owner': DEVICE_OWNER,
'admin_state_up': True}}
req = self.new_update_request('ports', data,
port['port']['id'])
req.get_response(self.api)
self.assertTrue(self._is_vlan_configured(
vlan_creation_expected=vlan_creation_expected,
add_keyword_expected=False))
self.mock_ncclient.reset_mock()
yield
# Create network and subnet
with self.network(name=NETWORK_NAME) as network:
with self.subnet(network=network, cidr=CIDR_1) as subnet:
# Create an instance on first compute host
with _create_port_check_vlan(COMP_HOST_NAME, DEVICE_ID_1,
vlan_creation_expected=True):
# Create an instance on second compute host
with _create_port_check_vlan(COMP_HOST_NAME_2, DEVICE_ID_2,
vlan_creation_expected=False):
pass
# Instance on second host is now terminated.
# Vlan should be untrunked from port, but vlan should
# still exist on the switch.
self.assertTrue(self._is_vlan_unconfigured(
vlan_deletion_expected=False))
self.mock_ncclient.reset_mock()
# Instance on first host is now terminated.
# Vlan should be untrunked from port and vlan should have
# been deleted from the switch.
self.assertTrue(self._is_vlan_unconfigured(
vlan_deletion_expected=True))
def test_nexus_vm_migration(self):
"""Verify VM (live) migration.
Simulate the following:
Nova informs neutron of live-migration with port-update(new host).
This should trigger two update_port_pre/postcommit() calls.
The first one should only change the current host_id and remove the
binding resulting in the mechanism drivers receiving:
PortContext.original['binding:host_id']: previous value
PortContext.original_bound_segment: previous value
PortContext.current['binding:host_id']: current (new) value
PortContext.bound_segment: None
The second one binds the new host resulting in the mechanism
drivers receiving:
PortContext.original['binding:host_id']: previous value
PortContext.original_bound_segment: None
PortContext.current['binding:host_id']: previous value
PortContext.bound_segment: new value
"""
# Create network, subnet and port.
with self._create_resources() as result:
# Verify initial database entry.
# Use port_id to verify that 1st host name was used.
binding = nexus_db_v2.get_nexusvm_bindings(VLAN_START,
DEVICE_ID_1)[0]
intf_type, nexus_port = binding.port_id.split(':')
self.assertEqual(nexus_port, NEXUS_INTERFACE)
port = self.deserialize(self.fmt, result)
port_id = port['port']['id']
# Trigger update event to unbind segment.
# Results in port being deleted from nexus DB and switch.
data = {'port': {portbindings.HOST_ID: COMP_HOST_NAME_2}}
self.mock_bound_segment.return_value = None
self.mock_original_bound_segment.return_value = BOUND_SEGMENT1
self.new_update_request('ports', data,
port_id).get_response(self.api)
# Verify that port entry has been deleted.
self.assertRaises(c_exc.NexusPortBindingNotFound,
nexus_db_v2.get_nexusvm_bindings,
VLAN_START, DEVICE_ID_1)
# Trigger update event to bind segment with new host.
self.mock_bound_segment.return_value = BOUND_SEGMENT1
self.mock_original_bound_segment.return_value = None
self.new_update_request('ports', data,
port_id).get_response(self.api)
# Verify that port entry has been added using new host name.
# Use port_id to verify that 2nd host name was used.
binding = nexus_db_v2.get_nexusvm_bindings(VLAN_START,
DEVICE_ID_1)[0]
intf_type, nexus_port = binding.port_id.split(':')
self.assertEqual(nexus_port, NEXUS_INTERFACE_2)
def test_nexus_config_fail(self):
"""Test a Nexus switch configuration failure.
While creating a network, subnet, and port, simulate a nexus
switch configuration error. Confirm that the expected HTTP code
is returned for the create port operation.
"""
with self._patch_ncclient(
'connect.return_value.edit_config.side_effect',
AttributeError):
with self._create_resources() as result:
self._assertExpectedHTTP(result.status_int,
c_exc.NexusConfigFailed)
def test_nexus_extended_vlan_range_failure(self):
"""Test that extended VLAN range config errors are ignored.
Some versions of Nexus switch do not allow state changes for
the extended VLAN range (1006-4094), but these errors can be
ignored (default values are appropriate). Test that such errors
are ignored by the Nexus plugin.
"""
def mock_edit_config_a(target, config):
if all(word in config for word in ['state', 'active']):
raise Exception("Can't modify state for extended")
with self._patch_ncclient(
'connect.return_value.edit_config.side_effect',
mock_edit_config_a):
with self._create_resources() as result:
self.assertEqual(result.status_int, wexc.HTTPOk.code)
def mock_edit_config_b(target, config):
if all(word in config for word in ['no', 'shutdown']):
raise Exception("Command is only allowed on VLAN")
with self._patch_ncclient(
'connect.return_value.edit_config.side_effect',
mock_edit_config_b):
with self._create_resources() as result:
self.assertEqual(result.status_int, wexc.HTTPOk.code)
def test_nexus_vlan_config_rollback(self):
"""Test rollback following Nexus VLAN state config failure.
Test that the Cisco Nexus plugin correctly deletes the VLAN
on the Nexus switch when the 'state active' command fails (for
a reason other than state configuration change is rejected
for the extended VLAN range).
"""
vlan_state_configs = ['state active', 'no shutdown']
for config in vlan_state_configs:
with self._patch_ncclient(
'connect.return_value.edit_config.side_effect',
self._config_dependent_side_effect(config, ValueError)):
with self._create_resources() as result:
# Confirm that the last configuration sent to the Nexus
# switch was deletion of the VLAN.
self.assertTrue(
self._is_in_last_nexus_cfg(['<no>', '<vlan>'])
)
self._assertExpectedHTTP(result.status_int,
c_exc.NexusConfigFailed)
def test_nexus_host_not_configured(self):
"""Test handling of a NexusComputeHostNotConfigured exception.
Test the Cisco NexusComputeHostNotConfigured exception by using
a fictitious host name during port creation.
"""
with self._create_resources(host_id='fake_host') as result:
self._assertExpectedHTTP(result.status_int,
c_exc.NexusComputeHostNotConfigured)
def test_nexus_missing_fields(self):
"""Test handling of a NexusMissingRequiredFields exception.
Test the Cisco NexusMissingRequiredFields exception by using
empty host_id and device_id values during port creation.
"""
with self._create_resources(device_id='', host_id='') as result:
self._assertExpectedHTTP(result.status_int,
c_exc.NexusMissingRequiredFields)
class TestCiscoNetworksV2(CiscoML2MechanismTestCase,
test_db_plugin.TestNetworksV2):
def test_create_networks_bulk_emulated_plugin_failure(self):
real_has_attr = hasattr
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_network
#ensures the API choose the emulation code path
with mock.patch('__builtin__.hasattr',
new=fakehasattr):
with mock.patch.object(plugin_obj,
'create_network') as patched_plugin:
def side_effect(*args, **kwargs):
return self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
res = self._create_network_bulk(self.fmt, 2, 'test', True)
LOG.debug("response is %s" % res)
# We expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'networks',
wexc.HTTPInternalServerError.code)
def test_create_networks_bulk_native_plugin_failure(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk network create")
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_network
with mock.patch.object(plugin_obj,
'create_network') as patched_plugin:
def side_effect(*args, **kwargs):
return self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
res = self._create_network_bulk(self.fmt, 2, 'test', True)
# We expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'networks',
wexc.HTTPInternalServerError.code)
class TestCiscoSubnetsV2(CiscoML2MechanismTestCase,
test_db_plugin.TestSubnetsV2):
def test_create_subnets_bulk_emulated_plugin_failure(self):
real_has_attr = hasattr
#ensures the API choose the emulation code path
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
with mock.patch('__builtin__.hasattr',
new=fakehasattr):
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_subnet
with mock.patch.object(plugin_obj,
'create_subnet') as patched_plugin:
def side_effect(*args, **kwargs):
self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
with self.network() as net:
res = self._create_subnet_bulk(self.fmt, 2,
net['network']['id'],
'test')
# We expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'subnets',
wexc.HTTPInternalServerError.code)
def test_create_subnets_bulk_native_plugin_failure(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk subnet create")
plugin_obj = manager.NeutronManager.get_plugin()
orig = plugin_obj.create_subnet
with mock.patch.object(plugin_obj,
'create_subnet') as patched_plugin:
def side_effect(*args, **kwargs):
return self._do_side_effect(patched_plugin, orig,
*args, **kwargs)
patched_plugin.side_effect = side_effect
with self.network() as net:
res = self._create_subnet_bulk(self.fmt, 2,
net['network']['id'],
'test')
# We expect an internal server error as we injected a fault
self._validate_behavior_on_bulk_failure(
res,
'subnets',
wexc.HTTPInternalServerError.code)
class TestCiscoPortsV2XML(TestCiscoPortsV2):
fmt = 'xml'
class TestCiscoNetworksV2XML(TestCiscoNetworksV2):
fmt = 'xml'
class TestCiscoSubnetsV2XML(TestCiscoSubnetsV2):
fmt = 'xml'
| {
"content_hash": "0e70e41ebb4cd00d6245affe267d8229",
"timestamp": "",
"source": "github",
"line_count": 700,
"max_line_length": 79,
"avg_line_length": 42.90857142857143,
"alnum_prop": 0.5775402849913437,
"repo_name": "onecloud/neutron",
"id": "55717ebe2cc1820da24180dd857eb9b4d4efb81d",
"size": "30627",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/ml2/drivers/cisco/nexus/test_cisco_mech.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Social network login test."""
from urllib.parse import urlparse, parse_qs
import transaction
from pyramid import testing
from mock import MagicMock
from velruse import AuthenticationComplete
from pyramid_fullauth.views.social import SocialLoginViews
from pyramid_fullauth.models import User
from tests.views.conftest import mock_translate
def test_social_login_link(social_app):
"""Login:Form displayed social form."""
res = social_app.get("/login")
assert "Connect with facebook</a>" in res.body.decode("unicode_escape")
def test_social_click_link(social_app):
"""Click social login link."""
res = social_app.get("/login/facebook?scope=email%2Coffline_access", status=302)
redirect = urlparse(res.headers["Location"])
query = parse_qs(redirect.query)
assert redirect.netloc == "www.facebook.com", "We should redirect user to facebook"
assert redirect.path == "/dialog/oauth/", "Path should be oauth"
assert "redirect_uri" in query
assert "scope" in query
assert query["redirect_uri"] == ["http://localhost/login/facebook/callback"]
assert query["scope"] == ["email,offline_access"]
def test_social_login_register(social_config, db_session):
"""Register fresh user and logs him in."""
profile = {
"accounts": [{"domain": "facebook.com", "userid": "2343"}],
"displayName": "teddy",
"verifiedEmail": "we@po.pl",
"preferredUsername": "teddy",
"emails": [{"value": "aasd@bwwqwe.pl"}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
request = testing.DummyRequest()
request.user = None
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
assert out == {"status": True}
transaction.commit()
# read first new account
user = db_session.query(User).one()
assert user.is_active
assert user.provider_id("facebook") == profile["accounts"][0]["userid"]
def test_login_different_social_account(social_config, db_session, facebook_user): # pylint:disable=unused-argument
"""
Login with different social account than connected from same provider.
System should let user in, but not change connection.
"""
# profile mock response
profile = {
# facebook user id is different than user's
"accounts": [{"domain": "facebook.com", "userid": "2343"}],
"displayName": "teddy",
"verifiedEmail": facebook_user.email,
"preferredUsername": "teddy",
"emails": [{"value": "aasd@bwwqwe.pl"}],
"name": "ted",
}
request = testing.DummyRequest()
request.user = None
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(
profile,
{"oauthAccessToken": "7897048593434"},
"facebook",
"facebook",
)
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
# user should be authenticated recognized by email!
assert out["status"] is True
assert facebook_user.provider_id("facebook") is not profile["accounts"][0]["userid"]
def test_login_social_connect(social_config, active_user, db_session):
"""Connect and logs user in."""
user = db_session.merge(active_user)
profile = {
"accounts": [{"domain": "facebook.com", "userid": "2343"}],
"displayName": "teddy",
"preferredUsername": "teddy",
"emails": [{"value": user.email}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
user = None
request = testing.DummyRequest()
request.user = user
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
assert out == {"status": True}
def test_logged_social_connect_account(social_config, active_user, db_session):
"""Connect facebook account to logged in user."""
user = db_session.merge(active_user)
profile = {
"accounts": [{"domain": "facebook.com", "userid": "2343"}],
"displayName": "teddy",
"preferredUsername": "teddy",
"emails": [{"value": "aasd@basd.pl"}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
request = testing.DummyRequest()
request.user = user
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request._ = mock_translate
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
assert out["status"] is True
transaction.commit()
user = db_session.merge(user)
assert user.provider_id("facebook") == profile["accounts"][0]["userid"]
def test_logged_social_connect_self(social_config, facebook_user, db_session):
"""Connect self."""
user = db_session.merge(facebook_user)
profile = {
"accounts": [
{
"domain": "facebook.com",
"userid": user.provider_id("facebook"),
}
],
"displayName": "teddy",
"preferredUsername": "teddy",
"emails": [{"value": user.email}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
request = testing.DummyRequest()
request.user = user
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request._ = mock_translate
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
assert out["status"] is True
user = db_session.merge(facebook_user)
assert user.provider_id("facebook") == profile["accounts"][0]["userid"]
def test_logged_social_connect_second_account(social_config, facebook_user, db_session):
"""Connect second facebook account to logged in user."""
user = db_session.merge(facebook_user)
# mock request
profile = {
"accounts": [{"domain": "facebook.com", "userid": "2343"}],
"displayName": "teddy",
"preferredUsername": "teddy",
"emails": [{"value": "aasd@basd.pl"}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
request = testing.DummyRequest()
request.user = user
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request._ = mock_translate
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
view = SocialLoginViews(request)
out = view()
# status should be false
assert out["status"] is False
assert out["msg"] == "Your account is already connected to other ${provider} account."
assert user.provider_id("facebook") is not profile["accounts"][0]["userid"]
def test_logged_social_connect_used_account(social_config, facebook_user, db_session):
"""Try to connect facebook account to logged in user used by other user."""
# this user will be logged and trying to connect facebook's user account.
fresh_user = User(
email="new@user.pl",
password="somepassword",
address_ip="127.0.0.1",
)
db_session.add(fresh_user)
transaction.commit()
user = db_session.merge(facebook_user)
fresh_user = db_session.merge(fresh_user)
# mock request
profile = {
"accounts": [
{
"domain": "facebook.com",
"userid": user.provider_id("facebook"),
}
],
"displayName": "teddy",
"preferredUsername": "teddy",
"emails": [{"value": "aasd@basd.pl"}],
"name": "ted",
}
credentials = {"oauthAccessToken": "7897048593434"}
provider_name = "facebook"
provider_type = "facebook"
request = testing.DummyRequest()
request.user = fresh_user
request.registry = social_config.registry
request.remote_addr = "127.0.0.123"
request.context = AuthenticationComplete(profile, credentials, provider_name, provider_type)
request._ = mock_translate
request.login_perform = MagicMock(name="login_perform")
request.login_perform.return_value = {"status": True}
# call!
view = SocialLoginViews(request)
out = view()
# status should be false
assert out["status"] is False
assert out["msg"] == "This ${provider} account is already connected with other account."
transaction.begin()
fresh_user = db_session.merge(fresh_user)
assert fresh_user.provider_id("facebook") is None
| {
"content_hash": "53793264723ae8c8c1f3de87259c20c6",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 116,
"avg_line_length": 35.47653429602888,
"alnum_prop": 0.6487229062786202,
"repo_name": "fizyk/pyramid_fullauth",
"id": "ef12a6df2e37c25188bd52001e3a1798543a9856",
"size": "9827",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/views/test_login_social.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "13837"
},
{
"name": "Python",
"bytes": "146592"
}
],
"symlink_target": ""
} |
import os
import gzip
import logging
import logging.handlers
class CompressedRotatingFileHandler(logging.handlers.RotatingFileHandler):
""" compress old files
from http://roadtodistributed.blogspot.com/2011/04/compressed-rotatingfilehandler-for.html
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0):
super(CompressedRotatingFileHandler, self).__init__(filename, mode, maxBytes, backupCount, encoding, delay)
def doRollover(self):
self.stream.close()
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d.gz" % (self.baseFilename, i)
dfn = "%s.%d.gz" % (self.baseFilename, i + 1)
if os.path.exists(sfn):
# print "%s -> %s" % (sfn, dfn)
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.baseFilename + ".1.gz"
if os.path.exists(dfn):
os.remove(dfn)
f_in = f_out = None
try:
f_in = open(self.baseFilename, 'rb')
f_out = gzip.open(dfn, 'wb')
f_out.writelines(f_in)
except Exception:
if not os.path.exists(dfn):
if os.path.exists(self.baseFilename):
os.rename(self.baseFilename, dfn)
finally:
if f_out is not None:
f_out.close()
if f_in is not None:
f_in.close()
if os.path.exists(self.baseFilename):
os.remove(self.baseFilename)
# os.rename(self.baseFilename, dfn)
# print "%s -> %s" % (self.baseFilename, dfn)
self.mode = 'w'
self.stream = self._open()
logger = logging.getLogger('yas3fs')
| {
"content_hash": "7bd4e2770b763657af1e511ae843ee7e",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 115,
"avg_line_length": 36.924528301886795,
"alnum_prop": 0.5074092999489014,
"repo_name": "Apkawa/yas3fs",
"id": "b3fb3baf2e11d8acfff569cfc13a87ecc6bb90f2",
"size": "1981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yas3fs/log.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "155147"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from django.core.exceptions import ValidationError
from oscar.apps.catalogue.models import (Product, ProductClass,
ProductAttribute,
AttributeOptionGroup,
AttributeOption)
class ProductTests(TestCase):
def setUp(self):
self.product_class,_ = ProductClass.objects.get_or_create(name='Clothing')
class ProductCreationTests(ProductTests):
def setUp(self):
super(ProductCreationTests, self).setUp()
ProductAttribute.objects.create(product_class=self.product_class,
name='Number of pages',
code='num_pages',
type='integer')
Product.ENABLE_ATTRIBUTE_BINDING = True
def tearDown(self):
Product.ENABLE_ATTRIBUTE_BINDING = False
def test_create_products_with_attributes(self):
product = Product(upc='1234',
product_class=self.product_class,
title='testing')
product.attr.num_pages = 100
product.save()
class TopLevelProductTests(ProductTests):
def test_top_level_products_must_have_titles(self):
self.assertRaises(ValidationError, Product.objects.create, product_class=self.product_class)
class VariantProductTests(ProductTests):
def setUp(self):
super(VariantProductTests, self).setUp()
self.parent = Product.objects.create(title="Parent product", product_class=self.product_class)
def test_variant_products_dont_need_titles(self):
Product.objects.create(parent=self.parent, product_class=self.product_class)
def test_variant_products_dont_need_a_product_class(self):
Product.objects.create(parent=self.parent)
def test_variant_products_inherit_parent_titles(self):
p = Product.objects.create(parent=self.parent, product_class=self.product_class)
self.assertEquals("Parent product", p.get_title())
def test_variant_products_inherit_product_class(self):
p = Product.objects.create(parent=self.parent)
self.assertEquals("Clothing", p.get_product_class().name)
class ProductAttributeCreationTests(TestCase):
def setUp(self):
self.product_class,_ = ProductClass.objects.get_or_create(
name='Clothing'
)
self.option_group = AttributeOptionGroup.objects.create(name='group')
self.option_1 = AttributeOption.objects.create(group=self.option_group, option='first')
self.option_2 = AttributeOption.objects.create(group=self.option_group, option='second')
def test_validating_option_attribute(self):
pa = ProductAttribute.objects.create(product_class=self.product_class,
name='test group',
code='test_group',
type='option',
option_group=self.option_group)
self.assertRaises(ValidationError, pa.get_validator(), 'invalid')
try:
pa.get_validator()(self.option_1)
except ValidationError:
self.fail("valid option '%s' not validated" % self.option_1)
try:
pa.get_validator()(self.option_2)
except ValidationError:
self.fail("valid option '%s' not validated" % self.option_1)
invalid_option = AttributeOption()
invalid_option.option = 'invalid option'
self.assertRaises(ValidationError, pa.get_validator(),
invalid_option)
| {
"content_hash": "d087ce3938ad2d0df9d25dc74d3af8db",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 102,
"avg_line_length": 38.31958762886598,
"alnum_prop": 0.6088243206887275,
"repo_name": "Idematica/django-oscar",
"id": "453f21e50332594e65db8ebc82022d433f34c15a",
"size": "3717",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/integration/catalogue/product_tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1099824"
},
{
"name": "JavaScript",
"bytes": "818932"
},
{
"name": "Puppet",
"bytes": "3507"
},
{
"name": "Python",
"bytes": "4079718"
},
{
"name": "Shell",
"bytes": "5760"
},
{
"name": "XSLT",
"bytes": "49764"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V2alpha1CronJobSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'concurrency_policy': 'str',
'failed_jobs_history_limit': 'int',
'job_template': 'V2alpha1JobTemplateSpec',
'schedule': 'str',
'starting_deadline_seconds': 'int',
'successful_jobs_history_limit': 'int',
'suspend': 'bool'
}
attribute_map = {
'concurrency_policy': 'concurrencyPolicy',
'failed_jobs_history_limit': 'failedJobsHistoryLimit',
'job_template': 'jobTemplate',
'schedule': 'schedule',
'starting_deadline_seconds': 'startingDeadlineSeconds',
'successful_jobs_history_limit': 'successfulJobsHistoryLimit',
'suspend': 'suspend'
}
def __init__(self, concurrency_policy=None, failed_jobs_history_limit=None, job_template=None, schedule=None, starting_deadline_seconds=None, successful_jobs_history_limit=None, suspend=None):
"""
V2alpha1CronJobSpec - a model defined in Swagger
"""
self._concurrency_policy = None
self._failed_jobs_history_limit = None
self._job_template = None
self._schedule = None
self._starting_deadline_seconds = None
self._successful_jobs_history_limit = None
self._suspend = None
self.discriminator = None
if concurrency_policy is not None:
self.concurrency_policy = concurrency_policy
if failed_jobs_history_limit is not None:
self.failed_jobs_history_limit = failed_jobs_history_limit
self.job_template = job_template
self.schedule = schedule
if starting_deadline_seconds is not None:
self.starting_deadline_seconds = starting_deadline_seconds
if successful_jobs_history_limit is not None:
self.successful_jobs_history_limit = successful_jobs_history_limit
if suspend is not None:
self.suspend = suspend
@property
def concurrency_policy(self):
"""
Gets the concurrency_policy of this V2alpha1CronJobSpec.
Specifies how to treat concurrent executions of a Job. Defaults to Allow.
:return: The concurrency_policy of this V2alpha1CronJobSpec.
:rtype: str
"""
return self._concurrency_policy
@concurrency_policy.setter
def concurrency_policy(self, concurrency_policy):
"""
Sets the concurrency_policy of this V2alpha1CronJobSpec.
Specifies how to treat concurrent executions of a Job. Defaults to Allow.
:param concurrency_policy: The concurrency_policy of this V2alpha1CronJobSpec.
:type: str
"""
self._concurrency_policy = concurrency_policy
@property
def failed_jobs_history_limit(self):
"""
Gets the failed_jobs_history_limit of this V2alpha1CronJobSpec.
The number of failed finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified.
:return: The failed_jobs_history_limit of this V2alpha1CronJobSpec.
:rtype: int
"""
return self._failed_jobs_history_limit
@failed_jobs_history_limit.setter
def failed_jobs_history_limit(self, failed_jobs_history_limit):
"""
Sets the failed_jobs_history_limit of this V2alpha1CronJobSpec.
The number of failed finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified.
:param failed_jobs_history_limit: The failed_jobs_history_limit of this V2alpha1CronJobSpec.
:type: int
"""
self._failed_jobs_history_limit = failed_jobs_history_limit
@property
def job_template(self):
"""
Gets the job_template of this V2alpha1CronJobSpec.
Specifies the job that will be created when executing a CronJob.
:return: The job_template of this V2alpha1CronJobSpec.
:rtype: V2alpha1JobTemplateSpec
"""
return self._job_template
@job_template.setter
def job_template(self, job_template):
"""
Sets the job_template of this V2alpha1CronJobSpec.
Specifies the job that will be created when executing a CronJob.
:param job_template: The job_template of this V2alpha1CronJobSpec.
:type: V2alpha1JobTemplateSpec
"""
if job_template is None:
raise ValueError("Invalid value for `job_template`, must not be `None`")
self._job_template = job_template
@property
def schedule(self):
"""
Gets the schedule of this V2alpha1CronJobSpec.
The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron.
:return: The schedule of this V2alpha1CronJobSpec.
:rtype: str
"""
return self._schedule
@schedule.setter
def schedule(self, schedule):
"""
Sets the schedule of this V2alpha1CronJobSpec.
The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron.
:param schedule: The schedule of this V2alpha1CronJobSpec.
:type: str
"""
if schedule is None:
raise ValueError("Invalid value for `schedule`, must not be `None`")
self._schedule = schedule
@property
def starting_deadline_seconds(self):
"""
Gets the starting_deadline_seconds of this V2alpha1CronJobSpec.
Optional deadline in seconds for starting the job if it misses scheduled time for any reason. Missed jobs executions will be counted as failed ones.
:return: The starting_deadline_seconds of this V2alpha1CronJobSpec.
:rtype: int
"""
return self._starting_deadline_seconds
@starting_deadline_seconds.setter
def starting_deadline_seconds(self, starting_deadline_seconds):
"""
Sets the starting_deadline_seconds of this V2alpha1CronJobSpec.
Optional deadline in seconds for starting the job if it misses scheduled time for any reason. Missed jobs executions will be counted as failed ones.
:param starting_deadline_seconds: The starting_deadline_seconds of this V2alpha1CronJobSpec.
:type: int
"""
self._starting_deadline_seconds = starting_deadline_seconds
@property
def successful_jobs_history_limit(self):
"""
Gets the successful_jobs_history_limit of this V2alpha1CronJobSpec.
The number of successful finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified.
:return: The successful_jobs_history_limit of this V2alpha1CronJobSpec.
:rtype: int
"""
return self._successful_jobs_history_limit
@successful_jobs_history_limit.setter
def successful_jobs_history_limit(self, successful_jobs_history_limit):
"""
Sets the successful_jobs_history_limit of this V2alpha1CronJobSpec.
The number of successful finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified.
:param successful_jobs_history_limit: The successful_jobs_history_limit of this V2alpha1CronJobSpec.
:type: int
"""
self._successful_jobs_history_limit = successful_jobs_history_limit
@property
def suspend(self):
"""
Gets the suspend of this V2alpha1CronJobSpec.
This flag tells the controller to suspend subsequent executions, it does not apply to already started executions. Defaults to false.
:return: The suspend of this V2alpha1CronJobSpec.
:rtype: bool
"""
return self._suspend
@suspend.setter
def suspend(self, suspend):
"""
Sets the suspend of this V2alpha1CronJobSpec.
This flag tells the controller to suspend subsequent executions, it does not apply to already started executions. Defaults to false.
:param suspend: The suspend of this V2alpha1CronJobSpec.
:type: bool
"""
self._suspend = suspend
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V2alpha1CronJobSpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "34dad851b6400692690eb8f1bad9fb92",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 196,
"avg_line_length": 34.51700680272109,
"alnum_prop": 0.6310603074497438,
"repo_name": "mbohlool/client-python",
"id": "55652a4e4074a32d9db8fcb7f598afdd3efe514f",
"size": "10165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v2alpha1_cron_job_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
} |
"""Part of the Keras training engine related to plain array data.
"""
# pylint: disable=protected-access
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import callbacks as cbks
from tensorflow.python.keras.engine import training_distributed
from tensorflow.python.keras.engine import training_utils
from tensorflow.python.keras.utils.generic_utils import make_batches
from tensorflow.python.keras.utils.generic_utils import slice_arrays
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.mode_keys import ModeKeys
try:
from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
except ImportError:
issparse = None
def _get_model_feed(model, mode):
if mode == ModeKeys.PREDICT:
feed = model._feed_inputs
else:
feed = (
model._feed_inputs + model._feed_targets + model._feed_sample_weights)
return feed
def _validate_arguments(steps_per_epoch, validation_steps, kwargs):
for k in kwargs:
if k != 'steps':
raise ValueError('Invalid argument passed: {}'.format(k))
# Validate inputs when in training mode.
if validation_steps and steps_per_epoch is None:
raise ValueError('Can only use `validation_steps` '
'when doing step-wise '
'training, i.e. `steps_per_epoch` '
'must be set.')
def _print_train_info(inputs, val_inputs, steps_per_epoch, verbose):
if (val_inputs and steps_per_epoch is None and verbose and inputs and
hasattr(inputs[0], 'shape') and hasattr(val_inputs[0], 'shape')):
print('Train on %d samples, validate on %d samples' %
(inputs[0].shape[0], val_inputs[0].shape[0]))
def _get_num_samples_or_steps(ins, batch_size, steps_per_epoch):
"""Returns total number of samples (when training in batch mode) or steps."""
if steps_per_epoch:
return steps_per_epoch
return training_utils.check_num_samples(ins, batch_size, steps_per_epoch,
'steps_per_epoch')
def _prepare_feed_values(model, inputs, targets, sample_weights, mode):
"""Prepare feed values to the model execution function.
Arguments:
model: Model to prepare feed values for.
inputs: List or dict of model inputs.
targets: Optional list of model targets.
sample_weights: Optional list of sample weight arrays.
mode: One of ModeKeys.TRAIN/ModeKeys.TEST/ModeKeys.PREDICT.
Returns:
Feed values for the model in the given mode.
"""
if model._distribution_strategy:
def get_distributed_inputs():
return training_distributed._prepare_feed_values(
model, inputs, targets, sample_weights, mode)
# In the eager case, we want to call the input method per step, so return
# a lambda from here that can be called. Note that this is applicable only
# in Distribution Strategy case as it follows the same code path for both
# eager and graph modes.
# TODO(priyag,omalleyt): Either we should move the training DS with
# EagerIterator to use training_generator code path, or figure out how to
# set a symbolic Iterator out of a Dataset when in eager mode.
if context.executing_eagerly():
return get_distributed_inputs
else:
return get_distributed_inputs()
inputs = training_utils.ModelInputs(inputs).as_list()
targets = targets or []
sample_weights = sample_weights or []
ins = inputs + targets + sample_weights
if mode == ModeKeys.TRAIN and not isinstance(K.symbolic_learning_phase(),
int):
ins += [True]
return ins
def _make_execution_function(model, mode):
"""Makes function to run one step of model execution."""
if model._distribution_strategy:
return training_distributed._make_execution_function(model, mode)
return model._make_execution_function(mode)
def model_iteration(model,
inputs,
targets=None,
sample_weights=None,
batch_size=None,
epochs=1,
verbose=1,
callbacks=None,
val_inputs=None,
val_targets=None,
val_sample_weights=None,
shuffle=True,
initial_epoch=0,
steps_per_epoch=None,
validation_steps=None,
mode=ModeKeys.TRAIN,
validation_in_fit=False,
**kwargs):
"""Loop function for arrays of data with modes TRAIN/TEST/PREDICT.
Arguments:
model: Keras Model instance.
inputs: Either a list of arrays or a dictionary.
targets: List of target arrays.
sample_weights: Optional list of sample weight arrays.
batch_size: Integer batch size or None if unknown.
epochs: Number of times to iterate over the data
verbose: Verbosity mode, 0, 1 or 2
callbacks: List of callbacks to be called during training
val_inputs: List of input arrays.
val_targets: List of target arrays.
val_sample_weights: Optional list of sample weight arrays.
shuffle: Whether to shuffle the data at the beginning of each epoch
concatenation of list the display names of the outputs of `f` and the
list of display names of the outputs of `f_val`.
initial_epoch: Epoch at which to start training (useful for resuming a
previous training run)
steps_per_epoch: Total number of steps (batches of samples) before
declaring one epoch finished and starting the next epoch. Ignored with
the default value of `None`.
validation_steps: Number of steps to run validation for (only if doing
validation from data tensors). Ignored with the default value of `None`.
mode: One of ModeKeys.TRAIN/ModeKeys.TEST/ModeKeys.PREDICT.
validation_in_fit: DEPRECATED: if true, then this method is invoked from
within training iteration (for validation). In this case, do not copy
weights when using a tf.distribute.Strategy. The input is deprecated as
it is not required if the user creates a distributed model under the
distribution strategy scope rather than passing it to compile.
**kwargs: Additional arguments for backwards compatibility.
Returns:
- In TRAIN mode: `History` object.
- In TEST mode: Evaluation metrics.
- In PREDICT mode: Outputs of the Model called on inputs.
Raises:
ValueError: in case of invalid arguments.
"""
# Backwards compatibility.
if 'steps' in kwargs:
steps_per_epoch = kwargs['steps']
_validate_arguments(steps_per_epoch, validation_steps, kwargs)
if mode == ModeKeys.TRAIN:
_print_train_info(inputs, val_inputs, steps_per_epoch, verbose)
# Enter DistributionStrategy scope.
if model._distribution_strategy:
scope = model._distribution_strategy.scope()
scope.__enter__()
# Get step function and loop type.
f = _make_execution_function(model, mode)
use_steps = steps_per_epoch is not None
do_validation = val_inputs is not None
# Prepare input data.
ins = _prepare_feed_values(model, inputs, targets, sample_weights, mode)
num_samples_or_steps = _get_num_samples_or_steps(ins, batch_size,
steps_per_epoch)
# Configure callbacks.
count_mode = 'steps' if use_steps else 'samples'
callbacks = cbks.configure_callbacks(
callbacks,
model,
do_validation=do_validation,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
samples=num_samples_or_steps,
verbose=0, # Handle ProgBarLogger separately in this loop.
mode=mode)
# TODO(omalleyt): Handle ProgBar as part of Callbacks once hooks are ready.
progbar = training_utils.get_progbar(model, count_mode)
progbar.params = callbacks.params
progbar.params['verbose'] = verbose
# Find beforehand arrays that need sparse-to-dense conversion.
if issparse is not None and not use_steps:
indices_for_conversion_to_dense = []
feed = _get_model_feed(model, mode)
for i, (input_data, feed_tensor) in enumerate(zip(ins, feed)):
if issparse(input_data) and not K.is_sparse(feed_tensor):
indices_for_conversion_to_dense.append(i)
# Select aggregation method.
if mode == ModeKeys.PREDICT:
aggregator = training_utils.OutputsAggregator(use_steps,
num_samples_or_steps)
else:
aggregator = training_utils.MetricsAggregator(use_steps,
num_samples_or_steps)
if model._compile_distribution and not validation_in_fit:
training_distributed._copy_weights_to_distributed_model(
model, model._distributed_model)
callbacks.model.stop_training = False
callbacks._call_begin_hook(mode)
progbar.on_train_begin()
for epoch in range(initial_epoch, epochs):
if callbacks.model.stop_training:
break
# Setup work for each epoch
epoch_logs = {}
model.reset_metrics()
if mode == ModeKeys.TRAIN:
callbacks.on_epoch_begin(epoch, epoch_logs)
progbar.on_epoch_begin(epoch, epoch_logs)
if use_steps:
# Step-wise loop.
for step in range(steps_per_epoch):
batch_logs = {'batch': step, 'size': 1}
callbacks._call_batch_hook(mode, 'begin', step, batch_logs)
progbar.on_batch_begin(step, batch_logs)
# Get outputs.
try:
# `ins` can be callable in DistributionStrategy + eager case.
actual_inputs = ins() if callable(ins) else ins
batch_outs = f(actual_inputs)
except errors.OutOfRangeError:
logging.warning('Your dataset iterator ran out of data; '
'interrupting training. Make sure that your dataset '
'can generate at least `steps_per_epoch * epochs` '
'batches (in this case, %d batches). You may need to'
'use the repeat() function when building your '
'dataset.' % steps_per_epoch * epochs)
break
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
if model._distribution_strategy:
batch_outs = training_distributed._per_device_aggregate_batch(
batch_outs, model, mode)
# Aggregate results.
if step == 0:
aggregator.create(batch_outs)
aggregator.aggregate(batch_outs)
# Callbacks batch end.
batch_logs = cbks.make_logs(model, batch_logs, batch_outs, mode)
callbacks._call_batch_hook(mode, 'end', step, batch_logs)
progbar.on_batch_end(step, batch_logs)
if callbacks.model.stop_training:
break
else:
# Sample-wise loop.
index_array = np.arange(num_samples_or_steps)
if shuffle == 'batch':
index_array = training_utils.batch_shuffle(index_array, batch_size)
elif shuffle:
np.random.shuffle(index_array)
batches = make_batches(num_samples_or_steps, batch_size)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
# Slice into a batch.
try:
if ins and isinstance(ins[-1], int):
# Do not slice the training phase flag.
ins_batch = slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = slice_arrays(ins, batch_ids)
except TypeError:
raise TypeError('TypeError while preparing batch. '
'If using HDF5 input data, '
'pass shuffle="batch".')
# Sparse to dense conversion.
if issparse is not None:
for i in indices_for_conversion_to_dense:
ins_batch[i] = ins_batch[i].toarray()
# Callbacks batch_begin.
batch_logs = {'batch': batch_index, 'size': len(batch_ids)}
callbacks._call_batch_hook(mode, 'begin', batch_index, batch_logs)
progbar.on_batch_begin(batch_index, batch_logs)
# Get outputs.
batch_outs = f(ins_batch)
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
# Aggregate results.
if batch_index == 0:
aggregator.create(batch_outs)
aggregator.aggregate(batch_outs, batch_start, batch_end)
# Callbacks batch end.
batch_logs = cbks.make_logs(model, batch_logs, batch_outs, mode)
callbacks._call_batch_hook(mode, 'end', batch_index, batch_logs)
progbar.on_batch_end(batch_index, batch_logs)
if callbacks.model.stop_training:
break
aggregator.finalize()
results = aggregator.results
epoch_logs = cbks.make_logs(model, epoch_logs, results, mode)
if len(results) == 1:
results = results[0]
# Run the test loop every epoch during training.
if do_validation and not callbacks.model.stop_training:
val_results = model_iteration(
model,
val_inputs,
targets=val_targets,
sample_weights=val_sample_weights,
batch_size=batch_size,
steps_per_epoch=validation_steps,
callbacks=callbacks,
verbose=0,
mode=ModeKeys.TEST,
validation_in_fit=True)
if not isinstance(val_results, list):
val_results = [val_results]
epoch_logs = cbks.make_logs(
model, epoch_logs, val_results, mode, prefix='val_')
if mode == ModeKeys.TRAIN:
# Epochs only apply to `fit`.
callbacks.on_epoch_end(epoch, epoch_logs)
progbar.on_epoch_end(epoch, epoch_logs)
callbacks._call_end_hook(mode)
if model._distribution_strategy:
if model._compile_distribution and not validation_in_fit:
# TODO(priyag, psv): Copy back metrics to the original model as well?
training_distributed._copy_weights_to_original_model(
model, model._distributed_model, mode)
scope.__exit__(None, None, None)
if mode == ModeKeys.TRAIN:
return model.history
return results
# For backwards compatibility for internal users of these loops.
fit_loop = functools.partial(model_iteration, mode=ModeKeys.TRAIN)
test_loop = functools.partial(
model_iteration, mode=ModeKeys.TEST, shuffle=False)
predict_loop = functools.partial(
model_iteration, mode=ModeKeys.PREDICT, shuffle=False)
| {
"content_hash": "e80d0b27fedc046ffc938cc6855489ec",
"timestamp": "",
"source": "github",
"line_count": 385,
"max_line_length": 80,
"avg_line_length": 38.25974025974026,
"alnum_prop": 0.6470468431771894,
"repo_name": "Bismarrck/tensorflow",
"id": "e4f91fa18a75d0783d625f4ff572ef9922bcde61",
"size": "15419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/engine/training_arrays.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4882"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "493885"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "53117668"
},
{
"name": "CMake",
"bytes": "207176"
},
{
"name": "Dockerfile",
"bytes": "39024"
},
{
"name": "Go",
"bytes": "1303624"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "893928"
},
{
"name": "Jupyter Notebook",
"bytes": "2657814"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "68402"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102511"
},
{
"name": "PHP",
"bytes": "5172"
},
{
"name": "Pascal",
"bytes": "221"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "43480067"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "497472"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
import collections
from typing import Optional
import tensorflow as tf
from official.modeling import tf_utils
from official.nlp.keras_nlp import layers
from etcmodel import feature_utils as etc_feature_utils
from etcmodel import layers as etc_layers
_NUM_OTHER_RELATIVE_IDS = 3
class MmtEncoder(tf.keras.Model):
"""Multimodal Transformer-based Encoder.
This network implements a multimodal transformer-based encoder for
image and language understanding. It includes the embedding lookups and
relative transformer layers, but not the masked language model (mlm),
masked patch prediction (mpp), or classification task networks.
We follow BERT implementation and use approximated gelu function for faster
TPU computation.
(1) Bert implementation: https://github.com/tensorflow/models/blob/2de518be2d6a6e3670b223a4582b1353538d3489/official/nlp/keras_nlp/encoders/bert_encoder.py#L26inner_activation
(2) Related issue: https://github.com/google/jax/issues/4428#issuecomment-701793190
Args: refer to `MmtEncoderConfig` for more details.
"""
def __init__(self,
vocab_size: int,
segment_vocab_size: int = 16,
embedding_size: int = None,
hidden_size: int = 768,
num_hidden_layers: int = 12,
num_attention_heads: int = 12,
intermediate_size: int = 3072,
inner_activation=lambda x: tf.keras.activations.gelu(
x, approximate=True),
hidden_dropout_prob: float = 0.1,
attention_probs_dropout_prob: float = 0.1,
max_absolute_position_embeddings: Optional[int] = None,
relative_vocab_size: int = 32,
relative_pos_max_distance: int = 12,
initializer_range: float = 0.02,
use_pre_activation_order: bool = False,
use_one_hot_lookup: bool = True,
use_pooler_layer: bool = False,
name: str = 'mmt_encoder',
**kwargs):
super(MmtEncoder, self).__init__(name=name, **kwargs)
if relative_vocab_size is None:
if relative_pos_max_distance != 0:
raise ValueError(
'`relative_pos_max_distance` must be 0 when `relative_vocab_size` '
'is None.')
elif relative_vocab_size < (
etc_feature_utils.RelativePositionGenerator(
relative_pos_max_distance).relative_vocab_size +
_NUM_OTHER_RELATIVE_IDS):
raise ValueError(
f'`relative_vocab_size` ({relative_vocab_size}) too small for '
f'`relative_pos_max_distance` ({relative_pos_max_distance}')
if embedding_size is None:
embedding_size = hidden_size
activation = tf.keras.activations.get(inner_activation)
initializer = tf.keras.initializers.TruncatedNormal(
stddev=initializer_range)
initializer = tf.keras.initializers.get(initializer)
self._word_embedding_layer = etc_layers.EmbeddingLookup(
vocab_size=vocab_size,
embedding_size=embedding_size,
projection_size=hidden_size,
initializer_range=initializer_range,
name='word_embeddings')
if max_absolute_position_embeddings is None:
self._position_embedding_layer = None
else:
self._position_embedding_layer = layers.PositionEmbedding(
initializer=initializer,
max_length=max_absolute_position_embeddings,
name='absolute_position_embeddings')
self._segment_embedding_layer = etc_layers.EmbeddingLookup(
vocab_size=segment_vocab_size,
embedding_size=embedding_size,
projection_size=hidden_size,
initializer_range=initializer_range,
use_one_hot_lookup=use_one_hot_lookup,
name='segment_embeddings')
self._patch_embedding_projection = tf.keras.layers.Dense(
units=hidden_size,
kernel_initializer=initializer,
name='patch_embedding_projection')
self._embedding_norm_layer = tf.keras.layers.LayerNormalization(
axis=-1, epsilon=1e-12, name='embeddings/layer_norm')
self._embedding_dropout_layer = tf.keras.layers.Dropout(
rate=hidden_dropout_prob, name='embeddings/dropout')
self._transformer_layers = etc_layers.RelativeTransformerLayers(
hidden_size=hidden_size,
num_hidden_layers=num_hidden_layers,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
hidden_act=activation,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
initializer_range=initializer_range,
relative_vocab_size=relative_vocab_size,
use_pre_activation_order=use_pre_activation_order,
use_one_hot_lookup=use_one_hot_lookup)
if use_pooler_layer:
self._pooler_layer = tf.keras.layers.Dense(
units=hidden_size,
activation='tanh',
kernel_initializer=initializer,
name='pooler_transform')
config_dict = {
'vocab_size': vocab_size,
'segment_vocab_size': segment_vocab_size,
'hidden_size': hidden_size,
'num_hidden_layers': num_hidden_layers,
'num_attention_heads': num_attention_heads,
'intermediate_size': intermediate_size,
'inner_activation': tf.keras.activations.serialize(activation),
'hidden_dropout_prob': hidden_dropout_prob,
'attention_probs_dropout_prob': attention_probs_dropout_prob,
'max_absolute_position_embeddings': max_absolute_position_embeddings,
'relative_vocab_size': relative_vocab_size,
'relative_pos_max_distance': relative_pos_max_distance,
'initializer_range': initializer_range,
'use_pre_activation_order': use_pre_activation_order,
'use_one_hot_lookup': use_one_hot_lookup,
'use_pooler_layer': use_pooler_layer
}
config_cls = collections.namedtuple('Config', config_dict.keys())
self._config = config_cls(**config_dict)
def call(self,
word_ids: tf.Tensor,
segment_ids: Optional[tf.Tensor] = None,
att_mask: Optional[tf.Tensor] = None,
relative_att_ids: Optional[tf.Tensor] = None,
patch_embeddings: Optional[tf.Tensor] = None,
training: Optional[bool] = None):
"""Calls MmtEncoder layer.
Args:
word_ids: <int32>[batch_size, seq_len] Tensor of word piece ids.
segment_ids: <int32>[batch_size, seq_len] Optional Tensor of segment
ids. By default we just fill all elements with segment id 1.
att_mask: <int32>[batch_size, seq_len, seq_len].
relative_att_ids: <int32>[batch_size, seq_len, seq_len].
training: For Keras, optional boolean scalar tensor or Python boolean
indicating whether the call is meant for training or inference.
Returns: A dictionary of encoder outputs.
sequence_output: <float32>[batch_size, seq_len, hidden_size].
"""
if segment_ids is None:
segment_ids = tf.ones_like(word_ids, dtype=tf.int32)
word_embeddings = self._word_embedding_layer(word_ids)
segment_embeddings = self._segment_embedding_layer(segment_ids)
word_embeddings = self._embedding_norm_layer(word_embeddings)
word_embeddings = self._embedding_dropout_layer(word_embeddings,
training=training)
embeddings = word_embeddings + segment_embeddings
if self._position_embedding_layer is not None:
position_embeddings = self._position_embedding_layer(word_embeddings)
embeddings += position_embeddings
if patch_embeddings is not None:
seq_len = tf_utils.get_shape_list(word_embeddings)[1]
patch_seq_len = tf_utils.get_shape_list(patch_embeddings)[1]
patch_embeddings = self._patch_embedding_projection(patch_embeddings)
# Make patch_embeddings and word_embeddings have the same shape.
# 2 is for CLS and [PATCH]
prefix_pad_len = 2
suffix_pad_len = seq_len - 2 - patch_seq_len
patch_embeddings = tf.pad(
patch_embeddings,
paddings=[[0, 0], [prefix_pad_len, suffix_pad_len], [0, 0]])
embeddings += patch_embeddings
encoder_output = self._transformer_layers(
inputs=embeddings,
att_mask=att_mask,
relative_att_ids=relative_att_ids,
training=training)
outputs = dict()
outputs['sequence_output'] = encoder_output
if hasattr(self, '_pooler_layer'):
batch_size, _, hidden_size = tf_utils.get_shape_list(encoder_output)
first_token_tensor = tf.slice(
encoder_output, [0, 0, 0], [batch_size, 1, hidden_size])
first_token_tensor = tf.squeeze(first_token_tensor, axis=1)
cls_output = self._pooler_layer(first_token_tensor)
outputs[pooled_output] = cls_output
return outputs
def get_word_embedding_table(self):
"""Returns the token embedding table, but only if the model is built."""
if not hasattr(self._word_embedding_layer, 'embedding_table'):
raise ValueError(
'Cannot call `get_token_embedding_table()` until the model has been '
'called so that all variables are built.')
return self._word_embedding_layer.embedding_table
def get_word_embedding_layer(self):
return self._word_embedding_layer
def get_config(self):
return dict(self._config._asdict())
@property
def transformer_layers(self):
"""List of Transformer layers in the encoder."""
return self._transformer_layers
@property
def pooler_layer(self):
"""The pooler dense layer after the transformer layers."""
if hasattr(self, '_pooler_layer'):
return self._pooler_layer
else:
raise ValueError('pooler layers is not initialized.')
@classmethod
def from_config(cls, config, custom_objects=None):
if 'embedding_layer' in config and config['embedding_layer'] is not None:
warn_string = (
'You are reloading a model that was saved with a '
'potentially-shared embedding layer object. If you contine to '
'train this model, the embedding layer will no longer be shared. '
'To work around this, load the model outside of the Keras API.')
print('WARNING: ' + warn_string)
logging.warn(warn_string)
return cls(**config)
| {
"content_hash": "1922648f6ee25dbcb4e32d91a08e3a45",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 177,
"avg_line_length": 39.72623574144487,
"alnum_prop": 0.6521822358346095,
"repo_name": "googleinterns/multimodal-long-transformer-2021",
"id": "a195a394ac569a681457da9173a66d0dfb09ad51",
"size": "11027",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/modeling/models/mmt_encoder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""add simple_identifier
Revision ID: 508864cbfc71
Revises: 35388de3b2c1
Create Date: 2017-04-21 08:16:09.841511
"""
# revision identifiers, used by Alembic.
revision = '508864cbfc71'
down_revision = '35388de3b2c1'
import sqlalchemy as sa
from alembic import op
def upgrade():
op.create_table('SimpleIdentifierTbl',
sa.Column('identifier', sa.Integer(), primary_key=True),
sa.Column('sampleID', sa.Integer(), sa.ForeignKey('SampleTbl.id')))
op.add_column('AnalysisTbl',
sa.Column('simple_identifier', sa.Integer(), sa.ForeignKey('SimpleIdentifierTbl.identifier')))
def downgrade():
op.drop_column('AnalysisTbl', 'simple_identifier')
op.drop_table('SimpleIdentifierTbl')
| {
"content_hash": "b7cc213440b90175280fbd06148130de",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 112,
"avg_line_length": 26.892857142857142,
"alnum_prop": 0.6812749003984063,
"repo_name": "NMGRL/pychron",
"id": "51162530811fce9d447b33f8fcef68427466262a",
"size": "753",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "alembic_dvc/versions/508864cbfc71_add_simple_identifie.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
version = '1.13.8'
package_name = "ropper"
package_dir = "ropper"
package_description = """Show information about files in different file formats and find gadgets to build rop chains for different architectures.
""".strip()
packages = find_packages()
valid_packages = []
for p in packages:
if p.startswith('ropper'):
valid_packages.append(p)
install_requires = ['filebytes>=0.10.0']
try:
import capstone
if capstone.cs_version()[0] < 3:
install_requires.append('capstone')
del capstone
except:
install_requires.append('capstone')
setup(
name=package_name,
version=version,
description=package_description,
packages=valid_packages,
license="BSD",
author="Sascha Schirra",
author_email="sashs@scoding.de",
install_requires=install_requires,
url="http://scoding.de/ropper/",
entry_points={'console_scripts': ['ropper = ropper.__main__:main']},
classifiers=[
'Topic :: Security',
'Environment :: Console',
'Operating System :: OS Independent',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers'
]
)
| {
"content_hash": "b9a8ee4026986bc4b837b7c9e9cc5d09",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 145,
"avg_line_length": 28.068181818181817,
"alnum_prop": 0.6615384615384615,
"repo_name": "sashs/Ropper",
"id": "6213f3c446016a8219be1f12aef70d807928bb03",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "704"
},
{
"name": "Python",
"bytes": "401764"
},
{
"name": "Shell",
"bytes": "46"
}
],
"symlink_target": ""
} |
import os
import os.path
import sys
import time
import urllib2
import urlparse
import lxml.etree
import lxml.html
debug = False
def main():
site_url = 'http://sites.google.com/site/bmaupinwiki'
#dest_path = '%s/Documents/misc/bmaupinwiki' % (home)
dest_path = '{0}/Desktop/bmaupinwiki'.format(os.getenv('HOME'))
ensure_folder_exists(dest_path)
link_paths = []
parsed_url = urlparse.urlsplit(site_url)
outfile_name = 'home'
write_url_to_file(
site_url,
'{0}.html'.format(
os.path.join(dest_path, outfile_name)
),
site_url,
dest_path,
check_timestamp=True,
insert_encoding=True
)
# attempt to alleviate encoding issues
parser = lxml.html.HTMLParser(encoding='utf-8')
try:
page = lxml.html.parse(site_url, parser).getroot()
# in case there's a bug in lxml (http://stackoverflow.com/q/3116269/399105)
except IOError:
page = lxml.html.parse(urllib2.urlopen(site_url), parser).getroot()
# iterate through all of the div elements in the main index page
for element in page.iter('div'):
# get the table of contents
if element.get('class') == 'nav-toc-content':
toc = element.find('ul')
break
# iterate through all of the links ("a" elements) in the table of contents
for element in toc.iter('a'):
link = element.get('href')
# if the path of the URL is in the link
if link.startswith(parsed_url.path):
# remove it
link = link.replace(parsed_url.path, '')
# remove a starting slash
if link.startswith('/'):
link = link[1:]
link_paths.append(link)
if debug:
link_paths.sort()
print link_paths
for link_path in link_paths:
# drop everything after the final /, and that's the path
path = link_path.rsplit('/', 1)[0]
full_path = os.path.join(dest_path, path)
ensure_folder_exists(full_path)
url = '%s/%s' % (site_url, link_path)
if debug:
print url
print '%s/%s.html' % (dest_path, link_path)
write_url_to_file(
url,
'{0}.html'.format(
os.path.join(dest_path, link_path)
),
site_url,
dest_path,
check_timestamp=True,
insert_encoding=True
)
def ensure_folder_exists(path):
# make sure the path isn't an existing file
if os.path.isfile(path):
sys.exit('ERROR: folder %s is an existing file' % (path))
# create the path if necessary
elif not os.path.isdir(path):
try:
os.mkdir(path)
except OSError, error:
sys.exit('OSError: %s' % (error))
def write_url_to_file(url, outfile_path, site_url, dest_path,
check_timestamp=False, insert_encoding=False):
try:
infile = urllib2.urlopen(url)
except urllib2.HTTPError, error:
sys.exit('HTTPError: %s' % (error))
except urllib2.URLError, error:
sys.exit('URLError: %s' % (error))
# only check the timestamp if the destination file already exists
if check_timestamp == True and os.path.isfile(outfile_path):
# if local file modification time is greater than URL mod time
if (os.path.getmtime(outfile_path) >
time.mktime(infile.info().getdate('last-modified'))):
infile.close()
# exit the function and don't overwrite the local file
return
parser = lxml.html.HTMLParser(encoding='utf-8')
page = lxml.html.parse(infile, parser)
if insert_encoding == True:
head = page.getroot().find('head')
meta = lxml.etree.SubElement(head, 'meta')
meta.set('charset', 'utf-8')
''' TODO: make the path relative
from this page:
/home/bryan/Desktop/pile/bmaupinwiki/home/operating-systems/gnu-linux/rhel.html
this link:
/site/bmaupinwiki/home/operating-systems/gnu-linux/rhel/rhel-init-script-template
converts to (absolute):
/home/bryan/Desktop/pile/bmaupinwiki/home/operating-systems/gnu-linux/rhel/rhel-init-script-template.html
relative:
rhel/rhel-init-script-template.html
'''
old_link_prefix = '{0}/'.format(urlparse.urlparse(site_url).path)
'''
The links normally look like this:
/site/bmaupinwiki/home/operating-systems/gnu-linux/rhel/rhel-init-script-template
so update them
'''
for element in page.iter('a'):
if 'href' not in element.attrib:
continue
link = element.get('href')
if link.startswith(old_link_prefix):
element.set(
'href',
'{0}.html'.format(
os.path.join(
dest_path,
link.replace(old_link_prefix, '')
)
)
)
outfile = open(outfile_path, 'w')
outfile.write(
lxml.etree.tostring(
page.getroot(),
pretty_print=True,
method='html',
doctype=page.docinfo.doctype
)
)
outfile.close()
infile.close()
if __name__ == '__main__':
main()
'''
TODO:
- Make links relative so we can move the wiki
- Update write_url_to_file and make it more modular
- Add way to delete old pages
- Download page css and images so they work too
'''
| {
"content_hash": "b2a23557ba16169ba47c5fdd25e885af",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 109,
"avg_line_length": 29.585106382978722,
"alnum_prop": 0.5719165767709457,
"repo_name": "bmaupin/junkpile",
"id": "2f9e2d8d41da1d0fe6261d2a7723cca6bd941f86",
"size": "5585",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/graveyard/misc/mywiki-backup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6887"
},
{
"name": "CSS",
"bytes": "67395"
},
{
"name": "Go",
"bytes": "4476"
},
{
"name": "Groovy",
"bytes": "100702"
},
{
"name": "HTML",
"bytes": "533096"
},
{
"name": "Java",
"bytes": "190842"
},
{
"name": "JavaScript",
"bytes": "492078"
},
{
"name": "PHP",
"bytes": "26452"
},
{
"name": "PowerShell",
"bytes": "4124"
},
{
"name": "Python",
"bytes": "161364"
},
{
"name": "QML",
"bytes": "476"
},
{
"name": "Roff",
"bytes": "18224"
},
{
"name": "Shell",
"bytes": "17816"
},
{
"name": "Swift",
"bytes": "3202"
},
{
"name": "TypeScript",
"bytes": "15968"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
try:
from html.parser import HTMLParser # py3
except ImportError:
from HTMLParser import HTMLParser # py2
from django.core.exceptions import ImproperlyConfigured
from django.forms import widgets
from django.utils.html import format_html
from django.utils.translation import ungettext_lazy, ugettext_lazy as _
from django.forms.fields import IntegerField
from django.forms.models import ModelForm
from cms.plugin_pool import plugin_pool
from djangocms_text_ckeditor.widgets import TextEditorWidget
from djangocms_text_ckeditor.utils import plugin_tags_to_user_html
from cmsplugin_cascade.fields import PartialFormField
from cmsplugin_cascade.forms import ManageChildrenFormMixin
from cmsplugin_cascade.mixins import ImagePropertyMixin
from cmsplugin_cascade.widgets import NumberInputWidget, MultipleCascadingSizeWidget
from cmsplugin_cascade.link.cms_plugins import TextLinkPlugin
from . import settings, utils
from .plugin_base import BootstrapPluginBase
from .image import ImageForm
from .picture import BootstrapPicturePlugin
class CarouselSlidesForm(ManageChildrenFormMixin, ModelForm):
num_children = IntegerField(min_value=1, initial=1,
widget=NumberInputWidget(attrs={'size': '3', 'style': 'width: 5em !important;'}),
label=_('Slides'),
help_text=_('Number of slides for this carousel.'),
)
class CarouselPlugin(BootstrapPluginBase):
name = _("Carousel")
form = CarouselSlidesForm
default_css_class = 'carousel'
default_css_attributes = ('options',)
parent_classes = ['BootstrapColumnPlugin', 'SimpleWrapperPlugin']
render_template = 'cascade/bootstrap3/{}/carousel.html'
default_inline_styles = {'overflow': 'hidden'}
fields = ('num_children', 'glossary',)
DEFAULT_CAROUSEL_ATTRIBUTES = {'data-ride': 'carousel'}
OPTION_CHOICES = (('slide', _("Animate")), ('pause', _("Pause")), ('wrap', _("Wrap")),)
glossary_fields = (
PartialFormField('interval',
NumberInputWidget(attrs={'size': '2', 'style': 'width: 4em;', 'min': '1'}),
label=_("Interval"),
initial=5,
help_text=_("Change slide after this number of seconds."),
),
PartialFormField('options',
widgets.CheckboxSelectMultiple(choices=OPTION_CHOICES),
label=_('Options'),
initial=['slide', 'wrap', 'pause'],
help_text=_("Adjust interval for the carousel."),
),
PartialFormField('container_max_heights',
MultipleCascadingSizeWidget(list(tp[0] for tp in settings.CMSPLUGIN_CASCADE['bootstrap3']['breakpoints']),
allowed_units=['px']),
label=_("Carousel heights"),
initial=dict((bp[0], '{}px'.format(100 + 50 * i))
for i, bp in enumerate(settings.CMSPLUGIN_CASCADE['bootstrap3']['breakpoints'])),
help_text=_("Heights of Carousel in pixels for distinct Bootstrap's breakpoints."),
),
PartialFormField('resize-options',
widgets.CheckboxSelectMultiple(choices=BootstrapPicturePlugin.RESIZE_OPTIONS),
label=_("Resize Options"),
help_text=_("Options to use when resizing the image."),
initial=['upscale', 'crop', 'subject_location', 'high_resolution']
),
)
def get_form(self, request, obj=None, **kwargs):
utils.reduce_breakpoints(self, 'container_max_heights', request)
return super(CarouselPlugin, self).get_form(request, obj, **kwargs)
@classmethod
def get_identifier(cls, obj):
identifier = super(CarouselPlugin, cls).get_identifier(obj)
num_cols = obj.get_children().count()
content = ungettext_lazy('with {0} slide', 'with {0} slides', num_cols).format(num_cols)
return format_html('{0}{1}', identifier, content)
@classmethod
def get_css_classes(cls, obj):
css_classes = super(CarouselPlugin, cls).get_css_classes(obj)
if 'slide' in obj.glossary.get('options', []):
css_classes.append('slide')
return css_classes
@classmethod
def get_html_tag_attributes(cls, obj):
attributes = super(CarouselPlugin, cls).get_html_tag_attributes(obj)
attributes.update(cls.DEFAULT_CAROUSEL_ATTRIBUTES)
attributes['data-interval'] = 1000 * int(obj.glossary.get('interval', 5))
options = obj.glossary.get('options', [])
attributes['data-pause'] = 'pause' in options and 'hover' or 'false'
attributes['data-wrap'] = 'wrap' in options and 'true' or 'false'
return attributes
def save_model(self, request, obj, form, change):
wanted_children = int(form.cleaned_data.get('num_children'))
super(CarouselPlugin, self).save_model(request, obj, form, change)
self.extend_children(obj, wanted_children, CarouselSlidePlugin)
@classmethod
def sanitize_model(cls, obj):
sanitized = super(CarouselPlugin, cls).sanitize_model(obj)
complete_glossary = obj.get_complete_glossary()
# fill all invalid heights for this container to a meaningful value
max_height = max(obj.glossary['container_max_heights'].values())
pattern = re.compile(r'^(\d+)px$')
for bp in complete_glossary.get('breakpoints', ()):
if not pattern.match(obj.glossary['container_max_heights'].get(bp, '')):
obj.glossary['container_max_heights'][bp] = max_height
return sanitized
plugin_pool.register_plugin(CarouselPlugin)
class CarouselSlidePlugin(BootstrapPluginBase):
name = _("Slide")
model_mixins = (ImagePropertyMixin,)
form = ImageForm
default_css_class = 'img-responsive'
parent_classes = ['CarouselPlugin']
raw_id_fields = ('image_file',)
fields = ('image_file', 'glossary',)
render_template = 'cascade/bootstrap3/carousel-slide.html'
change_form_template = 'cascade/admin/text_plugin_change_form.html'
html_parser = HTMLParser()
def get_form(self, request, obj=None, **kwargs):
if obj:
caption = self.html_parser.unescape(obj.glossary.get('caption', ''))
obj.glossary.update(caption=caption)
parent_obj = self.get_parent_instance(request)
if not (parent_obj and issubclass(parent_obj.plugin_class, BootstrapPluginBase)):
raise ImproperlyConfigured("A CarouselSlidePlugin requires a valid parent")
# define glossary fields on the fly, because the TextEditorWidget requires the plugin_pk
text_editor_widget = TextEditorWidget(installed_plugins=[TextLinkPlugin], pk=parent_obj.pk,
placeholder=parent_obj.placeholder, plugin_language=parent_obj.language)
kwargs['glossary_fields'] = (
PartialFormField('caption', text_editor_widget, label=_("Slide Caption"),
help_text=_("Caption text to be laid over the backgroud image."),
),
)
return super(CarouselSlidePlugin, self).get_form(request, obj, **kwargs)
def render(self, context, instance, placeholder):
# image shall be rendered in a responsive context using the ``<picture>`` element
elements = utils.get_picture_elements(context, instance)
caption = self.html_parser.unescape(instance.glossary.get('caption', ''))
fluid = instance.get_complete_glossary().get('fluid') == 'on'
context.update({
'is_responsive': True,
'instance': instance,
'caption': plugin_tags_to_user_html(caption, context, placeholder),
'is_fluid': fluid,
'placeholder': placeholder,
'elements': elements,
})
return super(CarouselSlidePlugin, self).render(context, instance, placeholder)
@classmethod
def sanitize_model(cls, obj):
sanitized = super(CarouselSlidePlugin, cls).sanitize_model(obj)
complete_glossary = obj.get_complete_glossary()
obj.glossary.update({'resize-options': complete_glossary.get('resize-options', [])})
return sanitized
@classmethod
def get_identifier(cls, obj):
identifier = super(CarouselSlidePlugin, cls).get_identifier(obj)
try:
content = obj.image.name or obj.image.original_filename
except AttributeError:
content = _("Empty Slide")
return format_html('{0}{1}', identifier, content)
plugin_pool.register_plugin(CarouselSlidePlugin)
| {
"content_hash": "7971ce5a7d2bfe6e6aaadce231ee703b",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 118,
"avg_line_length": 45.40860215053763,
"alnum_prop": 0.6643381482358512,
"repo_name": "rfleschenberg/djangocms-cascade",
"id": "84b70421613018094433add65d670da98a1795b8",
"size": "8470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cmsplugin_cascade/bootstrap3/carousel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3158"
},
{
"name": "HTML",
"bytes": "15968"
},
{
"name": "JavaScript",
"bytes": "89011"
},
{
"name": "Python",
"bytes": "270069"
}
],
"symlink_target": ""
} |
"""
Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
See https://llvm.org/LICENSE.txt for license information.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Prepares language bindings for LLDB build process. Run with --help
to see a description of the supported command line arguments.
"""
# Python modules:
import io
# Third party modules
import six
def _encoded_read(old_read, encoding):
def impl(size):
result = old_read(size)
# If this is Python 2 then we need to convert the resulting `unicode` back
# into a `str` before returning
if six.PY2:
result = result.encode(encoding)
return result
return impl
def _encoded_write(old_write, encoding):
def impl(s):
# If we were asked to write a `str` (in Py2) or a `bytes` (in Py3) decode it
# as unicode before attempting to write.
if isinstance(s, six.binary_type):
s = s.decode(encoding, "replace")
# Filter unreadable characters, Python 3 is stricter than python 2 about them.
import re
s = re.sub(r'[^\x00-\x7f]',r' ',s)
return old_write(s)
return impl
'''
Create a Text I/O file object that can be written to with either unicode strings or byte strings
under Python 2 and Python 3, and automatically encodes and decodes as necessary to return the
native string type for the current Python version
'''
def open(
file,
encoding,
mode='r',
buffering=-1,
errors=None,
newline=None,
closefd=True):
wrapped_file = io.open(
file,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
closefd=closefd)
new_read = _encoded_read(getattr(wrapped_file, 'read'), encoding)
new_write = _encoded_write(getattr(wrapped_file, 'write'), encoding)
setattr(wrapped_file, 'read', new_read)
setattr(wrapped_file, 'write', new_write)
return wrapped_file
| {
"content_hash": "e24abafcc54de530dec2e460620ce863",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 96,
"avg_line_length": 30.37313432835821,
"alnum_prop": 0.6496314496314496,
"repo_name": "llvm-mirror/lldb",
"id": "c233e046ba757dc59e76e9e77860e51fc1a8b12d",
"size": "2035",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "packages/Python/lldbsuite/support/encoded_file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "131618"
},
{
"name": "C",
"bytes": "195293"
},
{
"name": "C++",
"bytes": "23346708"
},
{
"name": "CMake",
"bytes": "167302"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "50396"
},
{
"name": "Objective-C",
"bytes": "106956"
},
{
"name": "Objective-C++",
"bytes": "24806"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "3669886"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
} |
import logging
import os
import subprocess
from ..config import as_dict, as_pargs, as_path, decode
from .call_decorator import CallDecorator
logger = logging.getLogger(__name__)
class SubprocessPropertyDecorator(CallDecorator):
"""
Decorator for SUT calls to extend issues with an arbitrary property where
the value is the output of a shell subprocess.
**Mandatory parameters of the decorator:**
- ``property``: name of the property to extend the issue with.
- ``command``: string to pass to the child shell as a command to run.
**Optional parameters of the decorator:**
- ``cwd``: if not ``None``, change working directory before the command
invocation.
- ``env``: if not ``None``, a dictionary of variable names-values to
update the environment with.
- ``timeout``: run subprocess with timeout.
- ``encoding``: stdout and stderr encoding (default: autodetect).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.StdinSubprocessCall
call.decorate(0)=fuzzinator.call.SubprocessPropertyDecorator
[sut.foo.call]
command=./bin/foo -
cwd=/home/alice/foo
[sut.foo.call.decorate(0)]
property=version
command=git rev-parse --short HEAD
cwd=${sut.foo.call:cwd}
env={"GIT_FLUSH": "1"}
"""
def __init__(self, *, property, command, cwd=None, env=None, timeout=None, encoding=None, **kwargs):
self.property = property
self.command = command
self.cwd = as_path(cwd) if cwd else os.getcwd()
self.env = dict(os.environ, **as_dict(env)) if env else None
self.timeout = int(timeout) if timeout else None
self.encoding = encoding
def call(self, cls, obj, *, test, **kwargs):
issue = super(cls, obj).__call__(test=test, **kwargs)
if not issue:
return issue
try:
result = subprocess.run(as_pargs(self.command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=self.cwd,
env=self.env,
timeout=self.timeout,
check=True)
issue[self.property] = decode(result.stdout, self.encoding)
except subprocess.TimeoutExpired as e:
logger.warning('SubprocessPropertyDecorator execution timeout (%ds) expired while setting the \'%s\' property.\n%s\n%s',
e.timeout,
self.property,
decode(e.stdout or b'', self.encoding),
decode(e.stderr or b'', self.encoding))
except subprocess.CalledProcessError as e:
logger.warning('SubprocessPropertyDecorator exited with nonzero exit code (%d) while setting the \'%s\' property.\n%s\n%s',
e.returncode,
self.property,
decode(e.stdout, self.encoding),
decode(e.stderr, self.encoding))
return issue
| {
"content_hash": "ebe58c8ccd5866ca9b250f8cfc5387c9",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 135,
"avg_line_length": 39.3855421686747,
"alnum_prop": 0.5591924135821352,
"repo_name": "renatahodovan/fuzzinator",
"id": "4d4944fea7b854c3675fc673569f0e119ab16821",
"size": "3523",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fuzzinator/call/subprocess_property_decorator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ANTLR",
"bytes": "339"
},
{
"name": "C++",
"bytes": "3678"
},
{
"name": "CSS",
"bytes": "10728"
},
{
"name": "HTML",
"bytes": "44477"
},
{
"name": "JavaScript",
"bytes": "25491"
},
{
"name": "Makefile",
"bytes": "1755"
},
{
"name": "Python",
"bytes": "412762"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import Pyro4
@Pyro4.expose
class Thingy(object):
def message(self, arg):
print("Message received:", arg)
return "Roger!"
if os.path.exists("example_unix.sock"):
os.remove("example_unix.sock")
with Pyro4.Daemon(unixsocket="example_unix.sock") as d:
uri = d.register(Thingy, "example.unixsock")
print("Server running, uri=", uri)
d.requestLoop()
| {
"content_hash": "ca508ae4757fcfb544af96e329c6faed",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 55,
"avg_line_length": 21.7,
"alnum_prop": 0.6705069124423964,
"repo_name": "irmen/Pyro4",
"id": "cec7e952d3aa6202578c67767e645ee2394c5fa5",
"size": "434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/unixdomainsock/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1283"
},
{
"name": "Python",
"bytes": "618799"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import range
from future import standard_library
standard_library.install_aliases()
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
if PY2:
if PYTHON_VERSION < (2, 7, 9):
raise Exception('Must use Python 2.7.9 or later')
elif PYTHON_VERSION < (3, 4):
raise Exception('Must use Python 3.4 or later')
import hpOneView as hpov
from pprint import pprint
def acceptEULA(con):
# See if we need to accept the EULA before we try to log in
con.get_eula_status()
try:
if con.get_eula_status() is True:
print('EULA display needed')
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
# Login with givin credentials
try:
con.login(credential)
except:
print('Login failed')
def import_enclosure(srv, sts, eg, ip, usr, pas, lic, baseline, force, forcefw,
monitor):
if not monitor:
# Locate the enclosure group
egroup = None
egroups = srv.get_enclosure_groups()
for group in egroups:
if group['name'] == eg:
egroup = group
break
if not egroup:
print('ERROR: Importing Enclosure')
print('Enclosure Group: "%s" has not been defined' % eg)
print('')
sys.exit()
print('Adding Enclosure')
# Find the first Firmware Baseline
uri = ''
if baseline:
spps = sts.get_spps()
for spp in spps:
if spp['isoFileName'] == baseline:
uri = spp['uri']
if not uri:
print('ERROR: Locating Firmeware Baseline SPP')
print('Baseline: "%s" can not be located' % baseline)
print('')
sys.exit()
if not uri:
add_enclosure = hpov.common.make_enclosure_dict(ip, usr, pas,
egroup['uri'],
licenseIntent=lic,
force=force,
forcefw=forcefw)
else:
add_enclosure = hpov.common.make_enclosure_dict(ip, usr, pas,
egroup['uri'],
licenseIntent=lic,
firmwareBaseLineUri=uri,
force=force,
forcefw=forcefw)
else:
add_enclosure = hpov.common.make_monitored_enclosure_dict(ip, usr, pas)
enclosure = srv.add_enclosure(add_enclosure)
if 'enclosureType' in enclosure:
print('Type: ', enclosure['enclosureType'])
print('Name: ', enclosure['name'])
print('Rack: ', enclosure['rackName'])
print('Serial Number: ', enclosure['serialNumber'])
else:
pprint(enclosure)
def main():
parser = argparse.ArgumentParser(add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
description='''
This example script will import an enclosure into HP OneView as a
managed device. The Onboard Administrator needs to have IP Address
configured for each module, and a valid Administrator account with a
password.
Usage: ''')
parser.add_argument('-a', dest='host', required=True,
help='''
HP OneView Appliance hostname or IP address''')
parser.add_argument('-u', dest='user', required=False,
default='Administrator',
help='''
HP OneView Username''')
parser.add_argument('-p', dest='passwd', required=True,
help='''
HP OneView Password''')
parser.add_argument('-c', dest='cert', required=False,
help='''
Trusted SSL Certificate Bundle in PEM (Base64 Encoded DER) Format''')
parser.add_argument('-y', dest='proxy', required=False,
help='''
Proxy (host:port format''')
parser.add_argument('-eu', dest='encusr', required=True,
help='''
Administrative username for the c7000 enclosure OA''')
parser.add_argument('-ep', dest='encpass', required=True,
help='''
Administrative password for the c7000 enclosure OA''')
parser.add_argument('-oa', dest='enc', required=True,
help='''
IP address of the c7000 to import into HP OneView''')
parser.add_argument('-s', dest='spp', required=False,
help='''
SPP Baseline file name. e.g. SPP2013090_2013_0830_30.iso''')
parser.add_argument('-l', dest='license', required=False,
choices=['OneView', 'OneViewNoiLO'],
default='OneView',
help='''
Specifies whether the intent is to apply either OneView or
OneView w/o iLO licenses to the servers in the enclosure
being imported.
Accepted values are:
- OneView
- OneViewNoiLO ''')
parser.add_argument('-f', dest='force', action='store_true',
required=False,
help='''
When attempting to add an Enclosure to the appliance, the appliance will
validate the target Enclosure is not already claimed. If it is, this
parameter is used when the Enclosure has been claimed by another appliance
to bypass the confirmation prompt, and force add the import of the
Enclosure ''')
parser.add_argument('-fw', dest='forcefw', action='store_true',
required=False,
help='''
Force the installation of the provided Firmware Baseline. ''')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-eg', dest='egroup',
help='''
Enclosure Group to add the enclosure to''')
group.add_argument('-m', dest='monitor', action='store_true',
help='''
Import the enclosure as a Monitored enclosure. ''')
args = parser.parse_args()
credential = {'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
srv = hpov.servers(con)
sts = hpov.settings(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
import_enclosure(srv, sts, args.egroup, args.enc, args.encusr,
args.encpass, args.license, args.spp, args.force,
args.forcefw, args.monitor)
if __name__ == '__main__':
import sys
import argparse
sys.exit(main())
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| {
"content_hash": "0eb9be2e38de67696b1fde3d9a940947",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 84,
"avg_line_length": 38.38743455497382,
"alnum_prop": 0.5366884888161484,
"repo_name": "miqui/python-hpOneView",
"id": "aedd950eb9b7ff3942bd733052f87f0eb2ba468e",
"size": "8489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/scripts/add-enclosure.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "121791"
}
],
"symlink_target": ""
} |
import Numberjack
from origen import seven_segment
from origen import seven_segment_data
class AccessPuzzle(object):
def __init__(self, goal, num_boards):
self.model = Numberjack.Model()
self.goal = _make_access_matrix('goal')
self._set_goal(goal)
self.key = _make_access_matrix('key')
self.boards = []
for i in range(0, num_boards):
self.boards.append(_make_access_matrix('board%s' % i))
self._constrain_key()
def solve(self):
solver = self.model.load('Mistral')
solver.solve()
def get_key(self):
return _convert_matrix_to_glyphs('key', self.key)
def get_board(self, idx):
return _convert_matrix_to_glyphs('board%s' % idx, self.boards[idx])
def _set_goal(self, goal):
_constrain_matrix(self.model, self.goal, goal)
def _constrain_key(self):
for i, row in enumerate(self.key):
for j, value in enumerate(row):
target = self.goal[i][j]
for board in self.boards:
self.model.add((value | board[i][j]) == target)
def accept(glyphs):
return glyphs == seven_segment_data.ACCESS
def _make_access_matrix(name):
access_length = len(seven_segment_data.ACCESS)
return Numberjack.Matrix(access_length, 3, name)
def _constrain_matrix(model, matrix, glyphs):
padding = len(matrix) - len(glyphs)
for i, segment in enumerate(glyphs.segments + [0] * padding):
for bit in range(0, 3):
model.add(matrix[i][bit] == bool(segment & (1 << bit)))
def _convert_matrix_to_glyphs(name, matrix):
glyphs = seven_segment.Glyphs(name, [])
for i, row in enumerate(matrix):
bits = 0
for j, value in enumerate(row):
if value.get_value():
bits |= 1 << j
glyphs.segments.append(bits)
return glyphs
| {
"content_hash": "0a99f565ba3098d63e185364211533be",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 71,
"avg_line_length": 27.476190476190474,
"alnum_prop": 0.6504910456383594,
"repo_name": "PhilHarnish/forge",
"id": "13b4d2cec1a3e58a18b5f7a193214ffc0cc094e6",
"size": "1731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/origen/puzzles/access.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "254508"
},
{
"name": "HTML",
"bytes": "12062"
},
{
"name": "JavaScript",
"bytes": "178383"
},
{
"name": "Jupyter Notebook",
"bytes": "1755506"
},
{
"name": "Python",
"bytes": "1033953"
},
{
"name": "Ruby",
"bytes": "800"
},
{
"name": "Shell",
"bytes": "3181"
}
],
"symlink_target": ""
} |
"""
A simple class to read a binary lamms file
"""
import struct
class lammps_trj(object):
def __init__(self, filename):
self.variables_order = ['timestep', 'ndump', 'triclinic',
'xlo-flag', 'xhi-flag', 'ylo-flag', 'yhi-flag',
'zlo-flag', 'zhi-flag', 'xlo', 'xhi',
'ylo', 'yhi','zlo','zhi','fields per atom',
'number of blocks']
self.head_fmt = '<qqiiiiiiiddddddii' # format for reading the header
self.head_buff = 100 # buffer length for the header
self.filename = filename
self.int_size = 4
self.double_size = 8
def read_frame(self):
"""
Function to read a frame from a binary lammps file
"""
data = {}
this_chunk = 0
buff = self.f.read(self.head_buff)
if not buff: return data, this_chunk
x = struct.unpack(self.head_fmt, buff)
for bi,v in zip(x, self.variables_order):
data[v] = bi
this_chunk += self.head_buff
data['blockdata'] = []
for x in range(data['number of blocks']): # one block per cpu
buff_size = struct.unpack('i', self.f.read(self.int_size))[0] # total number of doubles to follow
this_chunk += self.int_size
fmt = 'd'*buff_size
thisblock = struct.unpack(fmt, self.f.read(buff_size*self.double_size))
this_chunk += (buff_size*self.double_size)
data['blockdata'].append(thisblock)
return data, this_chunk
def read_trajectory(self):
self.f = open(self.filename, 'rb')
while True:
data, new_chunk, = self.read_frame()
if not data:
break
yield data
self.f.close()
if __name__=='__main__':
lt = lammps_trj('track.lammpstrj.bin')
for frame in lt.read_trajectory():
print 'Step: {}'.format(frame['timestep'])
#for b in frame['blockdata']:
# m = np.reshape(b,(-1,5))
# x,y,z = m[:,2],m[:,3],m[:,4]
| {
"content_hash": "3584a757704f3be1b0c4bc228930e849",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 109,
"avg_line_length": 36.58620689655172,
"alnum_prop": 0.5169651272384543,
"repo_name": "andersle/suspicious-trick",
"id": "a0b4e36b74c5a05934e991923878c56035c0d1ce",
"size": "2122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/readlammstrj.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "20016"
},
{
"name": "Python",
"bytes": "2122"
}
],
"symlink_target": ""
} |
from ironic.api.controllers.v1 import versions
from ironic.tests.unit.api import base
class TestRoot(base.BaseApiTest):
def test_get_root(self):
response = self.get_json('/', path_prefix='')
# Check fields are not empty
[self.assertNotIn(f, ['', []]) for f in response]
self.assertEqual('OpenStack Ironic API', response['name'])
self.assertTrue(response['description'])
self.assertEqual([response['default_version']], response['versions'])
version1 = response['default_version']
self.assertEqual('v1', version1['id'])
self.assertEqual('CURRENT', version1['status'])
self.assertEqual(versions.MIN_VERSION_STRING, version1['min_version'])
self.assertEqual(versions.MAX_VERSION_STRING, version1['version'])
class TestV1Root(base.BaseApiTest):
def test_get_v1_root(self):
data = self.get_json('/')
self.assertEqual('v1', data['id'])
# Check fields are not empty
for f in data.keys():
self.assertNotIn(f, ['', []])
# Check if all known resources are present and there are no extra ones.
not_resources = ('id', 'links', 'media_types')
actual_resources = tuple(set(data.keys()) - set(not_resources))
expected_resources = ('chassis', 'drivers', 'nodes', 'ports')
self.assertEqual(sorted(expected_resources), sorted(actual_resources))
self.assertIn({'type': 'application/vnd.openstack.ironic.v1+json',
'base': 'application/json'}, data['media_types'])
| {
"content_hash": "ef83d3ef674f501f3e7f84f9e24de868",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 79,
"avg_line_length": 41.13157894736842,
"alnum_prop": 0.6327575175943698,
"repo_name": "dims/ironic",
"id": "2aa4e25baadbcb8fb1d9bd5d6c20940c9fc13378",
"size": "2192",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ironic/tests/unit/api/test_root.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "3893123"
},
{
"name": "Shell",
"bytes": "48638"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.module_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ScriptDB.db_lock_storage'
db.alter_column('scripts_scriptdb', 'db_lock_storage', self.gf('django.db.models.fields.CharField')(max_length=512))
# Changing field 'ScriptDB.db_permissions'
db.alter_column('scripts_scriptdb', 'db_permissions', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'ScriptAttribute.db_lock_storage'
db.alter_column('scripts_scriptattribute', 'db_lock_storage', self.gf('django.db.models.fields.CharField')(max_length=512))
def backwards(self, orm):
# Changing field 'ScriptDB.db_lock_storage'
db.alter_column('scripts_scriptdb', 'db_lock_storage', self.gf('django.db.models.fields.TextField')())
# Changing field 'ScriptDB.db_permissions'
db.alter_column('scripts_scriptdb', 'db_permissions', self.gf('django.db.models.fields.CharField')(max_length=512))
# Changing field 'ScriptAttribute.db_lock_storage'
db.alter_column('scripts_scriptattribute', 'db_lock_storage', self.gf('django.db.models.fields.TextField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'objects.objectdb': {
'Meta': {'object_name': 'ObjectDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'players.playerdb': {
'Meta': {'object_name': 'PlayerDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']", 'null': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True'})
},
'scripts.scriptattribute': {
'Meta': {'object_name': 'ScriptAttribute'},
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scripts.ScriptDB']"}),
'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'scripts.scriptdb': {
'Meta': {'object_name': 'ScriptDB'},
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_desc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_interval': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'db_is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']", 'null': 'True', 'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_persistent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'db_repeats': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'db_start_delay': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['scripts']
| {
"content_hash": "ab0a6882a073fd5318c74b84d2d43008",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 187,
"avg_line_length": 72.62406015037594,
"alnum_prop": 0.5707630189460606,
"repo_name": "google-code-export/evennia",
"id": "e34c0f32233930aec4c61d1cc9b08e25323d8e61",
"size": "9677",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/scripts/migrations/0003_auto__chg_field_scriptdb_db_lock_storage__chg_field_scriptdb_db_permis.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "4450"
},
{
"name": "CSS",
"bytes": "19010"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "Gettext Catalog",
"bytes": "12141"
},
{
"name": "HTML",
"bytes": "31276"
},
{
"name": "JavaScript",
"bytes": "10492"
},
{
"name": "Python",
"bytes": "2789785"
}
],
"symlink_target": ""
} |
"""
Integration test for Google Cloud Pub/Sub.
"""
from __future__ import absolute_import
import logging
import unittest
import uuid
from hamcrest.core.core.allof import all_of
from nose.plugins.attrib import attr
from apache_beam.io.gcp import pubsub_it_pipeline
from apache_beam.io.gcp.pubsub import PubsubMessage
from apache_beam.io.gcp.tests.pubsub_matcher import PubSubMessageMatcher
from apache_beam.runners.runner import PipelineState
from apache_beam.testing import test_utils
from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher
from apache_beam.testing.test_pipeline import TestPipeline
INPUT_TOPIC = 'psit_topic_input'
OUTPUT_TOPIC = 'psit_topic_output'
INPUT_SUB = 'psit_subscription_input'
OUTPUT_SUB = 'psit_subscription_output'
# How long TestXXXRunner will wait for pubsub_it_pipeline to run before
# cancelling it.
TEST_PIPELINE_DURATION_MS = 3 * 60 * 1000
# How long PubSubMessageMatcher will wait for the correct set of messages to
# appear.
MESSAGE_MATCHER_TIMEOUT_S = 5 * 60
class PubSubIntegrationTest(unittest.TestCase):
ID_LABEL = 'id'
TIMESTAMP_ATTRIBUTE = 'timestamp'
INPUT_MESSAGES = {
# TODO(BEAM-4275): DirectRunner doesn't support reading or writing
# label_ids, nor writing timestamp attributes. Once these features exist,
# TestDirectRunner and TestDataflowRunner should behave identically.
'TestDirectRunner': [
PubsubMessage(b'data001', {}),
# For those elements that have the TIMESTAMP_ATTRIBUTE attribute, the
# IT pipeline writes back the timestamp of each element (as reported
# by Beam), as a TIMESTAMP_ATTRIBUTE + '_out' attribute.
PubsubMessage(b'data002', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
}),
PubsubMessage(b'data003\xab\xac', {}),
PubsubMessage(b'data004\xab\xac', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
})
],
'TestDataflowRunner': [
# Use ID_LABEL attribute to deduplicate messages with the same ID.
PubsubMessage(b'data001', {ID_LABEL: 'foo'}),
PubsubMessage(b'data001', {ID_LABEL: 'foo'}),
PubsubMessage(b'data001', {ID_LABEL: 'foo'}),
# For those elements that have the TIMESTAMP_ATTRIBUTE attribute, the
# IT pipeline writes back the timestamp of each element (as reported
# by Beam), as a TIMESTAMP_ATTRIBUTE + '_out' attribute.
PubsubMessage(b'data002', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
}),
PubsubMessage(b'data003\xab\xac', {ID_LABEL: 'foo2'}),
PubsubMessage(b'data003\xab\xac', {ID_LABEL: 'foo2'}),
PubsubMessage(b'data003\xab\xac', {ID_LABEL: 'foo2'}),
PubsubMessage(b'data004\xab\xac', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
})
],
}
EXPECTED_OUTPUT_MESSAGES = {
'TestDirectRunner': [
PubsubMessage(b'data001-seen', {'processed': 'IT'}),
PubsubMessage(b'data002-seen', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
TIMESTAMP_ATTRIBUTE + '_out': '2018-07-11T02:02:50.149000Z',
'processed': 'IT',
}),
PubsubMessage(b'data003\xab\xac-seen', {'processed': 'IT'}),
PubsubMessage(b'data004\xab\xac-seen', {
TIMESTAMP_ATTRIBUTE: '2018-07-11T02:02:50.149000Z',
TIMESTAMP_ATTRIBUTE + '_out': '2018-07-11T02:02:50.149000Z',
'processed': 'IT',
})
],
'TestDataflowRunner': [
PubsubMessage(b'data001-seen', {'processed': 'IT'}),
PubsubMessage(b'data002-seen', {
TIMESTAMP_ATTRIBUTE + '_out': '2018-07-11T02:02:50.149000Z',
'processed': 'IT',
}),
PubsubMessage(b'data003\xab\xac-seen', {'processed': 'IT'}),
PubsubMessage(b'data004\xab\xac-seen', {
TIMESTAMP_ATTRIBUTE + '_out': '2018-07-11T02:02:50.149000Z',
'processed': 'IT',
})
],
}
def setUp(self):
self.test_pipeline = TestPipeline(is_integration_test=True)
self.runner_name = type(self.test_pipeline.runner).__name__
self.project = self.test_pipeline.get_option('project')
self.uuid = str(uuid.uuid4())
# Set up PubSub environment.
from google.cloud import pubsub
self.pub_client = pubsub.PublisherClient()
self.input_topic = self.pub_client.create_topic(
self.pub_client.topic_path(self.project, INPUT_TOPIC + self.uuid))
self.output_topic = self.pub_client.create_topic(
self.pub_client.topic_path(self.project, OUTPUT_TOPIC + self.uuid))
self.sub_client = pubsub.SubscriberClient()
self.input_sub = self.sub_client.create_subscription(
self.sub_client.subscription_path(self.project, INPUT_SUB + self.uuid),
self.input_topic.name)
self.output_sub = self.sub_client.create_subscription(
self.sub_client.subscription_path(self.project, OUTPUT_SUB + self.uuid),
self.output_topic.name)
def tearDown(self):
test_utils.cleanup_subscriptions(self.sub_client,
[self.input_sub, self.output_sub])
test_utils.cleanup_topics(self.pub_client,
[self.input_topic, self.output_topic])
def _test_streaming(self, with_attributes):
"""Runs IT pipeline with message verifier.
Args:
with_attributes: False - Reads and writes message data only.
True - Reads and writes message data and attributes. Also verifies
id_label and timestamp_attribute features.
"""
# Set on_success_matcher to verify pipeline state and pubsub output. These
# verifications run on a (remote) worker.
# Expect the state to be RUNNING since a streaming pipeline is usually
# never DONE. The test runner will cancel the pipeline after verification.
state_verifier = PipelineStateMatcher(PipelineState.RUNNING)
expected_messages = self.EXPECTED_OUTPUT_MESSAGES[self.runner_name]
if not with_attributes:
expected_messages = [pubsub_msg.data for pubsub_msg in expected_messages]
if self.runner_name == 'TestDirectRunner':
strip_attributes = None
else:
strip_attributes = [self.ID_LABEL, self.TIMESTAMP_ATTRIBUTE]
pubsub_msg_verifier = PubSubMessageMatcher(
self.project,
self.output_sub.name,
expected_messages,
timeout=MESSAGE_MATCHER_TIMEOUT_S,
with_attributes=with_attributes,
strip_attributes=strip_attributes)
extra_opts = {'input_subscription': self.input_sub.name,
'output_topic': self.output_topic.name,
'wait_until_finish_duration': TEST_PIPELINE_DURATION_MS,
'on_success_matcher': all_of(state_verifier,
pubsub_msg_verifier)}
# Generate input data and inject to PubSub.
for msg in self.INPUT_MESSAGES[self.runner_name]:
self.pub_client.publish(self.input_topic.name, msg.data, **msg.attributes)
# Get pipeline options from command argument: --test-pipeline-options,
# and start pipeline job by calling pipeline main function.
pubsub_it_pipeline.run_pipeline(
argv=self.test_pipeline.get_full_options_as_args(**extra_opts),
with_attributes=with_attributes,
id_label=self.ID_LABEL,
timestamp_attribute=self.TIMESTAMP_ATTRIBUTE)
@attr('IT')
def test_streaming_data_only(self):
self._test_streaming(with_attributes=False)
@attr('IT')
def test_streaming_with_attributes(self):
self._test_streaming(with_attributes=True)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| {
"content_hash": "04db1040ca2b68417c0b8862fc4c7927",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 80,
"avg_line_length": 41.51063829787234,
"alnum_prop": 0.6524859046642747,
"repo_name": "RyanSkraba/beam",
"id": "2c4378657c4a58527cde3cb6b7127d7a3547a4c3",
"size": "8588",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/io/gcp/pubsub_integration_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1597"
},
{
"name": "CSS",
"bytes": "40963"
},
{
"name": "Dockerfile",
"bytes": "16638"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2683402"
},
{
"name": "Groovy",
"bytes": "517560"
},
{
"name": "HTML",
"bytes": "183330"
},
{
"name": "Java",
"bytes": "28609011"
},
{
"name": "JavaScript",
"bytes": "16595"
},
{
"name": "Jupyter Notebook",
"bytes": "56365"
},
{
"name": "Python",
"bytes": "6191025"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "235061"
},
{
"name": "TSQL",
"bytes": "841"
}
],
"symlink_target": ""
} |
"""
.. module:: dj-stripe.contrib.rest_framework.urls.
:synopsis: URL routes for the dj-stripe REST API.
.. moduleauthor:: Philippe Luickx (@philippeluickx)
Wire this into the root URLConf this way::
url(
r'^api/v1/stripe/',
include('djstripe.contrib.rest_framework.urls', namespace="rest_djstripe")
),
# url can be changed
# Call to 'djstripe.contrib.rest_framework.urls' and 'namespace' must stay as is
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from django.conf.urls import url
from . import views
app_name = "djstripe_rest_framework"
urlpatterns = [
# REST api
url(
r"^subscription/$",
views.SubscriptionRestView.as_view(),
name="subscription"
),
]
| {
"content_hash": "8913a9cb18a21d4cb72a16d89a3938b5",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 21.694444444444443,
"alnum_prop": 0.6632522407170295,
"repo_name": "jleclanche/dj-stripe",
"id": "87acee17e79df9f60f8cf8f6affb70953d7e09e3",
"size": "805",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "djstripe/contrib/rest_framework/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4213"
},
{
"name": "Python",
"bytes": "562724"
}
],
"symlink_target": ""
} |
import collections
import ctypes
import re
import numpy as np
from numba.core import errors, types
from numba.core.typing.templates import signature
from numba.np import npdatetime_helpers
from numba.core.errors import TypingError
# re-export
from numba.core.cgutils import is_nonelike # noqa: F401
numpy_version = tuple(map(int, np.__version__.split('.')[:2]))
FROM_DTYPE = {
np.dtype('bool'): types.boolean,
np.dtype('int8'): types.int8,
np.dtype('int16'): types.int16,
np.dtype('int32'): types.int32,
np.dtype('int64'): types.int64,
np.dtype('uint8'): types.uint8,
np.dtype('uint16'): types.uint16,
np.dtype('uint32'): types.uint32,
np.dtype('uint64'): types.uint64,
np.dtype('float32'): types.float32,
np.dtype('float64'): types.float64,
np.dtype('float16'): types.float16,
np.dtype('complex64'): types.complex64,
np.dtype('complex128'): types.complex128,
np.dtype(object): types.pyobject,
}
re_typestr = re.compile(r'[<>=\|]([a-z])(\d+)?$', re.I)
re_datetimestr = re.compile(r'[<>=\|]([mM])8?(\[([a-z]+)\])?$', re.I)
sizeof_unicode_char = np.dtype('U1').itemsize
def _from_str_dtype(dtype):
m = re_typestr.match(dtype.str)
if not m:
raise NotImplementedError(dtype)
groups = m.groups()
typecode = groups[0]
if typecode == 'U':
# unicode
if dtype.byteorder not in '=|':
raise NotImplementedError("Does not support non-native "
"byteorder")
count = dtype.itemsize // sizeof_unicode_char
assert count == int(groups[1]), "Unicode char size mismatch"
return types.UnicodeCharSeq(count)
elif typecode == 'S':
# char
count = dtype.itemsize
assert count == int(groups[1]), "Char size mismatch"
return types.CharSeq(count)
else:
raise NotImplementedError(dtype)
def _from_datetime_dtype(dtype):
m = re_datetimestr.match(dtype.str)
if not m:
raise NotImplementedError(dtype)
groups = m.groups()
typecode = groups[0]
unit = groups[2] or ''
if typecode == 'm':
return types.NPTimedelta(unit)
elif typecode == 'M':
return types.NPDatetime(unit)
else:
raise NotImplementedError(dtype)
def from_dtype(dtype):
"""
Return a Numba Type instance corresponding to the given Numpy *dtype*.
NotImplementedError is raised on unsupported Numpy dtypes.
"""
if type(dtype) == type and issubclass(dtype, np.generic):
dtype = np.dtype(dtype)
elif getattr(dtype, "fields", None) is not None:
return from_struct_dtype(dtype)
try:
return FROM_DTYPE[dtype]
except KeyError:
pass
try:
char = dtype.char
except AttributeError:
pass
else:
if char in 'SU':
return _from_str_dtype(dtype)
if char in 'mM':
return _from_datetime_dtype(dtype)
if char in 'V' and dtype.subdtype is not None:
subtype = from_dtype(dtype.subdtype[0])
return types.NestedArray(subtype, dtype.shape)
raise errors.NumbaNotImplementedError(dtype)
_as_dtype_letters = {
types.NPDatetime: 'M8',
types.NPTimedelta: 'm8',
types.CharSeq: 'S',
types.UnicodeCharSeq: 'U',
}
def as_dtype(nbtype):
"""
Return a numpy dtype instance corresponding to the given Numba type.
NotImplementedError is if no correspondence is known.
"""
nbtype = types.unliteral(nbtype)
if isinstance(nbtype, (types.Complex, types.Integer, types.Float)):
return np.dtype(str(nbtype))
if nbtype is types.bool_:
return np.dtype('?')
if isinstance(nbtype, (types.NPDatetime, types.NPTimedelta)):
letter = _as_dtype_letters[type(nbtype)]
if nbtype.unit:
return np.dtype('%s[%s]' % (letter, nbtype.unit))
else:
return np.dtype(letter)
if isinstance(nbtype, (types.CharSeq, types.UnicodeCharSeq)):
letter = _as_dtype_letters[type(nbtype)]
return np.dtype('%s%d' % (letter, nbtype.count))
if isinstance(nbtype, types.Record):
return as_struct_dtype(nbtype)
if isinstance(nbtype, types.EnumMember):
return as_dtype(nbtype.dtype)
if isinstance(nbtype, types.npytypes.DType):
return as_dtype(nbtype.dtype)
if isinstance(nbtype, types.NumberClass):
return as_dtype(nbtype.dtype)
if isinstance(nbtype, types.NestedArray):
spec = (as_dtype(nbtype.dtype), tuple(nbtype.shape))
return np.dtype(spec)
if isinstance(nbtype, types.PyObject):
return np.dtype(object)
msg = f"{nbtype} cannot be represented as a NumPy dtype"
raise errors.NumbaNotImplementedError(msg)
def as_struct_dtype(rec):
"""Convert Numba Record type to NumPy structured dtype
"""
assert isinstance(rec, types.Record)
names = []
formats = []
offsets = []
titles = []
# Fill the fields if they are not a title.
for k, t in rec.members:
if not rec.is_title(k):
names.append(k)
formats.append(as_dtype(t))
offsets.append(rec.offset(k))
titles.append(rec.fields[k].title)
fields = {
'names': names,
'formats': formats,
'offsets': offsets,
'itemsize': rec.size,
'titles': titles,
}
_check_struct_alignment(rec, fields)
return np.dtype(fields, align=rec.aligned)
def _check_struct_alignment(rec, fields):
"""Check alignment compatibility with Numpy"""
if rec.aligned:
for k, dt in zip(fields['names'], fields['formats']):
llvm_align = rec.alignof(k)
npy_align = dt.alignment
if llvm_align is not None and npy_align != llvm_align:
msg = (
'NumPy is using a different alignment ({}) '
'than Numba/LLVM ({}) for {}. '
'This is likely a NumPy bug.'
)
raise ValueError(msg.format(npy_align, llvm_align, dt))
def map_arrayscalar_type(val):
if isinstance(val, np.generic):
# We can't blindly call np.dtype() as it loses information
# on some types, e.g. datetime64 and timedelta64.
dtype = val.dtype
else:
try:
dtype = np.dtype(type(val))
except TypeError:
raise NotImplementedError("no corresponding numpy dtype "
"for %r" % type(val))
return from_dtype(dtype)
def is_array(val):
return isinstance(val, np.ndarray)
def map_layout(val):
if val.flags['C_CONTIGUOUS']:
layout = 'C'
elif val.flags['F_CONTIGUOUS']:
layout = 'F'
else:
layout = 'A'
return layout
def select_array_wrapper(inputs):
"""
Given the array-compatible input types to an operation (e.g. ufunc),
select the appropriate input for wrapping the operation output,
according to each input's __array_priority__.
An index into *inputs* is returned.
"""
max_prio = float('-inf')
selected_index = None
for index, ty in enumerate(inputs):
# Ties are broken by choosing the first winner, as in Numpy
if (isinstance(ty, types.ArrayCompatible) and
ty.array_priority > max_prio):
selected_index = index
max_prio = ty.array_priority
assert selected_index is not None
return selected_index
def resolve_output_type(context, inputs, formal_output):
"""
Given the array-compatible input types to an operation (e.g. ufunc),
and the operation's formal output type (a types.Array instance),
resolve the actual output type using the typing *context*.
This uses a mechanism compatible with Numpy's __array_priority__ /
__array_wrap__.
"""
selected_input = inputs[select_array_wrapper(inputs)]
args = selected_input, formal_output
sig = context.resolve_function_type('__array_wrap__', args, {})
if sig is None:
if selected_input.array_priority == types.Array.array_priority:
# If it's the same priority as a regular array, assume we
# should return the output unchanged.
# (we can't define __array_wrap__ explicitly for types.Buffer,
# as that would be inherited by most array-compatible objects)
return formal_output
raise errors.TypingError("__array_wrap__ failed for %s" % (args,))
return sig.return_type
def supported_ufunc_loop(ufunc, loop):
"""Return whether the *loop* for the *ufunc* is supported -in nopython-.
*loop* should be a UFuncLoopSpec instance, and *ufunc* a numpy ufunc.
For ufuncs implemented using the ufunc_db, it is supported if the ufunc_db
contains a lowering definition for 'loop' in the 'ufunc' entry.
For other ufuncs, it is type based. The loop will be considered valid if it
only contains the following letter types: '?bBhHiIlLqQfd'. Note this is
legacy and when implementing new ufuncs the ufunc_db should be preferred,
as it allows for a more fine-grained incremental support.
"""
# NOTE: Assuming ufunc for the CPUContext
from numba.np import ufunc_db
loop_sig = loop.ufunc_sig
try:
# check if the loop has a codegen description in the
# ufunc_db. If so, we can proceed.
# note that as of now not all ufuncs have an entry in the
# ufunc_db
supported_loop = loop_sig in ufunc_db.get_ufunc_info(ufunc)
except KeyError:
# for ufuncs not in ufunc_db, base the decision of whether the
# loop is supported on its types
loop_types = [x.char for x in loop.numpy_inputs + loop.numpy_outputs]
supported_types = '?bBhHiIlLqQfd'
# check if all the types involved in the ufunc loop are
# supported in this mode
supported_loop = all(t in supported_types for t in loop_types)
return supported_loop
class UFuncLoopSpec(collections.namedtuple('_UFuncLoopSpec',
('inputs', 'outputs', 'ufunc_sig'))):
"""
An object describing a ufunc loop's inner types. Properties:
- inputs: the inputs' Numba types
- outputs: the outputs' Numba types
- ufunc_sig: the string representing the ufunc's type signature, in
Numpy format (e.g. "ii->i")
"""
__slots__ = ()
@property
def numpy_inputs(self):
return [as_dtype(x) for x in self.inputs]
@property
def numpy_outputs(self):
return [as_dtype(x) for x in self.outputs]
def _ufunc_loop_sig(out_tys, in_tys):
if len(out_tys) == 1:
return signature(out_tys[0], *in_tys)
else:
return signature(types.Tuple(out_tys), *in_tys)
def ufunc_can_cast(from_, to, has_mixed_inputs, casting='safe'):
"""
A variant of np.can_cast() that can allow casting any integer to
any real or complex type, in case the operation has mixed-kind
inputs.
For example we want `np.power(float32, int32)` to be computed using
SP arithmetic and return `float32`.
However, `np.sqrt(int32)` should use DP arithmetic and return `float64`.
"""
from_ = np.dtype(from_)
to = np.dtype(to)
if has_mixed_inputs and from_.kind in 'iu' and to.kind in 'cf':
# Decide that all integers can cast to any real or complex type.
return True
return np.can_cast(from_, to, casting)
def ufunc_find_matching_loop(ufunc, arg_types):
"""Find the appropriate loop to be used for a ufunc based on the types
of the operands
ufunc - The ufunc we want to check
arg_types - The tuple of arguments to the ufunc, including any
explicit output(s).
return value - A UFuncLoopSpec identifying the loop, or None
if no matching loop is found.
"""
# Separate logical input from explicit output arguments
input_types = arg_types[:ufunc.nin]
output_types = arg_types[ufunc.nin:]
assert(len(input_types) == ufunc.nin)
try:
np_input_types = [as_dtype(x) for x in input_types]
except errors.NumbaNotImplementedError:
return None
try:
np_output_types = [as_dtype(x) for x in output_types]
except errors.NumbaNotImplementedError:
return None
# Whether the inputs are mixed integer / floating-point
has_mixed_inputs = (
any(dt.kind in 'iu' for dt in np_input_types) and
any(dt.kind in 'cf' for dt in np_input_types))
def choose_types(numba_types, ufunc_letters):
"""
Return a list of Numba types representing *ufunc_letters*,
except when the letter designates a datetime64 or timedelta64,
in which case the type is taken from *numba_types*.
"""
assert len(ufunc_letters) >= len(numba_types)
types = [tp if letter in 'mM' else from_dtype(np.dtype(letter))
for tp, letter in zip(numba_types, ufunc_letters)]
# Add missing types (presumably implicit outputs)
types += [from_dtype(np.dtype(letter))
for letter in ufunc_letters[len(numba_types):]]
return types
def set_output_dt_units(inputs, outputs, ufunc_inputs, ufunc_name):
"""
Sets the output unit of a datetime type based on the input units
Timedelta is a special dtype that requires the time unit to be
specified (day, month, etc). Not every operation with timedelta inputs
leads to an output of timedelta output. However, for those that do,
the unit of output must be inferred based on the units of the inputs.
At the moment this function takes care of two cases:
a) where all inputs are timedelta with the same unit (mm), and
therefore the output has the same unit.
This case is used for arr.sum, and for arr1+arr2 where all arrays
are timedeltas.
If in the future this needs to be extended to a case with mixed units,
the rules should be implemented in `npdatetime_helpers` and called
from this function to set the correct output unit.
b) where left operand is a timedelta, i.e. the "m?" case. This case
is used for division, eg timedelta / int.
At the time of writing, Numba does not support addition of timedelta
and other types, so this function does not consider the case "?m",
i.e. where timedelta is the right operand to a non-timedelta left
operand. To extend it in the future, just add another elif clause.
"""
def make_specific(outputs, unit):
new_outputs = []
for out in outputs:
if isinstance(out, types.NPTimedelta) and out.unit == "":
new_outputs.append(types.NPTimedelta(unit))
else:
new_outputs.append(out)
return new_outputs
def make_datetime_specific(outputs, dt_unit, td_unit):
new_outputs = []
for out in outputs:
if isinstance(out, types.NPDatetime) and out.unit == "":
unit = npdatetime_helpers.combine_datetime_timedelta_units(
dt_unit, td_unit)
if unit is None:
raise TypeError(f"ufunc '{ufunc_name}' is not " +
"supported between " +
f"datetime64[{dt_unit}] " +
f"and timedelta64[{td_unit}]"
)
new_outputs.append(types.NPDatetime(unit))
else:
new_outputs.append(out)
return new_outputs
if ufunc_inputs == 'mm':
if all(inp.unit == inputs[0].unit for inp in inputs):
# Case with operation on same units. Operations on different
# units not adjusted for now but might need to be
# added in the future
unit = inputs[0].unit
new_outputs = make_specific(outputs, unit)
else:
return outputs
return new_outputs
elif ufunc_inputs == 'mM':
# case where the left operand has timedelta type
# and the right operand has datetime
td_unit = inputs[0].unit
dt_unit = inputs[1].unit
return make_datetime_specific(outputs, dt_unit, td_unit)
elif ufunc_inputs == 'Mm':
# case where the right operand has timedelta type
# and the left operand has datetime
dt_unit = inputs[0].unit
td_unit = inputs[1].unit
return make_datetime_specific(outputs, dt_unit, td_unit)
elif ufunc_inputs[0] == 'm':
# case where the left operand has timedelta type
unit = inputs[0].unit
new_outputs = make_specific(outputs, unit)
return new_outputs
# In NumPy, the loops are evaluated from first to last. The first one
# that is viable is the one used. One loop is viable if it is possible
# to cast every input operand to the one expected by the ufunc.
# Also under NumPy 1.10+ the output must be able to be cast back
# to a close enough type ("same_kind").
for candidate in ufunc.types:
ufunc_inputs = candidate[:ufunc.nin]
ufunc_outputs = candidate[-ufunc.nout:] if ufunc.nout else []
if 'e' in ufunc_inputs:
# Skip float16 arrays since we don't have implementation for them
continue
if 'O' in ufunc_inputs:
# Skip object arrays
continue
found = True
# Skip if any input or output argument is mismatching
for outer, inner in zip(np_input_types, ufunc_inputs):
# (outer is a dtype instance, inner is a type char)
if outer.char in 'mM' or inner in 'mM':
# For datetime64 and timedelta64, we want to retain
# precise typing (i.e. the units); therefore we look for
# an exact match.
if outer.char != inner:
found = False
break
elif not ufunc_can_cast(outer.char, inner,
has_mixed_inputs, 'safe'):
found = False
break
if found:
# Can we cast the inner result to the outer result type?
for outer, inner in zip(np_output_types, ufunc_outputs):
if (outer.char not in 'mM' and not
ufunc_can_cast(inner, outer.char,
has_mixed_inputs, 'same_kind')):
found = False
break
if found:
# Found: determine the Numba types for the loop's inputs and
# outputs.
try:
inputs = choose_types(input_types, ufunc_inputs)
outputs = choose_types(output_types, ufunc_outputs)
# if the left operand or both are timedeltas, or the first
# argument is datetime and the second argument is timedelta,
# then the output units need to be determined.
if ufunc_inputs[0] == 'm' or ufunc_inputs == 'Mm':
outputs = set_output_dt_units(inputs, outputs,
ufunc_inputs, ufunc.__name__)
except errors.NumbaNotImplementedError:
# One of the selected dtypes isn't supported by Numba
# (e.g. float16), try other candidates
continue
else:
return UFuncLoopSpec(inputs, outputs, candidate)
return None
def _is_aligned_struct(struct):
return struct.isalignedstruct
def from_struct_dtype(dtype):
"""Convert a NumPy structured dtype to Numba Record type
"""
if dtype.hasobject:
raise TypeError("Do not support dtype containing object")
fields = []
for name, info in dtype.fields.items():
# *info* may have 3 element
[elemdtype, offset] = info[:2]
title = info[2] if len(info) == 3 else None
ty = from_dtype(elemdtype)
infos = {
'type': ty,
'offset': offset,
'title': title,
}
fields.append((name, infos))
# Note: dtype.alignment is not consistent.
# It is different after passing into a recarray.
# recarray(N, dtype=mydtype).dtype.alignment != mydtype.alignment
size = dtype.itemsize
aligned = _is_aligned_struct(dtype)
return types.Record(fields, size, aligned)
def _get_bytes_buffer(ptr, nbytes):
"""
Get a ctypes array of *nbytes* starting at *ptr*.
"""
if isinstance(ptr, ctypes.c_void_p):
ptr = ptr.value
arrty = ctypes.c_byte * nbytes
return arrty.from_address(ptr)
def _get_array_from_ptr(ptr, nbytes, dtype):
return np.frombuffer(_get_bytes_buffer(ptr, nbytes), dtype)
def carray(ptr, shape, dtype=None):
"""
Return a Numpy array view over the data pointed to by *ptr* with the
given *shape*, in C order. If *dtype* is given, it is used as the
array's dtype, otherwise the array's dtype is inferred from *ptr*'s type.
"""
from numba.core.typing.ctypes_utils import from_ctypes
try:
# Use ctypes parameter protocol if available
ptr = ptr._as_parameter_
except AttributeError:
pass
# Normalize dtype, to accept e.g. "int64" or np.int64
if dtype is not None:
dtype = np.dtype(dtype)
if isinstance(ptr, ctypes.c_void_p):
if dtype is None:
raise TypeError("explicit dtype required for void* argument")
p = ptr
elif isinstance(ptr, ctypes._Pointer):
ptrty = from_ctypes(ptr.__class__)
assert isinstance(ptrty, types.CPointer)
ptr_dtype = as_dtype(ptrty.dtype)
if dtype is not None and dtype != ptr_dtype:
raise TypeError("mismatching dtype '%s' for pointer %s"
% (dtype, ptr))
dtype = ptr_dtype
p = ctypes.cast(ptr, ctypes.c_void_p)
else:
raise TypeError("expected a ctypes pointer, got %r" % (ptr,))
nbytes = dtype.itemsize * np.product(shape, dtype=np.intp)
return _get_array_from_ptr(p, nbytes, dtype).reshape(shape)
def farray(ptr, shape, dtype=None):
"""
Return a Numpy array view over the data pointed to by *ptr* with the
given *shape*, in Fortran order. If *dtype* is given, it is used as the
array's dtype, otherwise the array's dtype is inferred from *ptr*'s type.
"""
if not isinstance(shape, int):
shape = shape[::-1]
return carray(ptr, shape, dtype).T
def is_contiguous(dims, strides, itemsize):
"""Is the given shape, strides, and itemsize of C layout?
Note: The code is usable as a numba-compiled function
"""
nd = len(dims)
# Check and skip 1s or 0s in inner dims
innerax = nd - 1
while innerax > -1 and dims[innerax] <= 1:
innerax -= 1
# Early exit if all axis are 1s or 0s
if innerax < 0:
return True
# Check itemsize matches innermost stride
if itemsize != strides[innerax]:
return False
# Check and skip 1s or 0s in outer dims
outerax = 0
while outerax < innerax and dims[outerax] <= 1:
outerax += 1
# Check remaining strides to be contiguous
ax = innerax
while ax > outerax:
if strides[ax] * dims[ax] != strides[ax - 1]:
return False
ax -= 1
return True
def is_fortran(dims, strides, itemsize):
"""Is the given shape, strides, and itemsize of F layout?
Note: The code is usable as a numba-compiled function
"""
nd = len(dims)
# Check and skip 1s or 0s in inner dims
firstax = 0
while firstax < nd and dims[firstax] <= 1:
firstax += 1
# Early exit if all axis are 1s or 0s
if firstax >= nd:
return True
# Check itemsize matches innermost stride
if itemsize != strides[firstax]:
return False
# Check and skip 1s or 0s in outer dims
lastax = nd - 1
while lastax > firstax and dims[lastax] <= 1:
lastax -= 1
# Check remaining strides to be contiguous
ax = firstax
while ax < lastax:
if strides[ax] * dims[ax] != strides[ax + 1]:
return False
ax += 1
return True
def type_can_asarray(arr):
""" Returns True if the type of 'arr' is supported by the Numba `np.asarray`
implementation, False otherwise.
"""
ok = (types.Array, types.Sequence, types.Tuple, types.StringLiteral,
types.Number, types.Boolean, types.containers.ListType)
return isinstance(arr, ok)
def type_is_scalar(typ):
""" Returns True if the type of 'typ' is a scalar type, according to
NumPy rules. False otherwise.
https://numpy.org/doc/stable/reference/arrays.scalars.html#built-in-scalar-types
"""
ok = (types.Boolean, types.Number, types.UnicodeType, types.StringLiteral,
types.NPTimedelta, types.NPDatetime)
return isinstance(typ, ok)
def check_is_integer(v, name):
"""Raises TypingError if the value is not an integer."""
if not isinstance(v, (int, types.Integer)):
raise TypingError('{} must be an integer'.format(name))
| {
"content_hash": "119b228417c842f407f5637758101160",
"timestamp": "",
"source": "github",
"line_count": 731,
"max_line_length": 84,
"avg_line_length": 34.808481532147745,
"alnum_prop": 0.6048732560424445,
"repo_name": "numba/numba",
"id": "025f9a89e51a444779816354f092cbc86d7a0a01",
"size": "25445",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "numba/np/numpy_support.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3522"
},
{
"name": "C",
"bytes": "574888"
},
{
"name": "C++",
"bytes": "166526"
},
{
"name": "Cuda",
"bytes": "2063"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "9400448"
},
{
"name": "Shell",
"bytes": "13621"
}
],
"symlink_target": ""
} |
"""Namespace for registering numpy_extension ops for imperative programming."""
from ..ndarray import numpy_extension as _mx_nd_npx
from ..util import set_module
__all__ = ['softmax', 'log_softmax', 'masked_softmax', 'masked_log_softmax',
'activation', 'batch_norm', 'fully_connected', 'pick', 'convolution',
'deconvolution', 'pooling', 'dropout', 'one_hot', 'rnn', 'embedding',
'topk', 'layer_norm', 'leaky_relu', 'batch_dot', 'broadcast_like',
'arange_like', 'group_norm']
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def softmax(data, length=None, axis=-1, temperature=None, use_length=False, dtype=None):
r"""Applies the softmax function.
The resulting array contains elements in the range (0,1) and the elements along the given axis sum up to 1.
.. math::
softmax(\mathbf{z/t})_j = \frac{e^{z_j/t}}{\sum_{k=1}^K e^{z_k/t}}
for :math:`j = 1, ..., K`
t is the temperature parameter in softmax function. By default, t equals 1.0
Parameters
----------
data : NDArray
The input array.
axis : int, optional, default='-1'
The axis along which to compute softmax.
length : NDArray
The length array.
temperature : double or None, optional, default=None
Temperature parameter in softmax
dtype : {None, 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to
the same as input's dtype if not defined (dtype=None).
use_length : boolean or None, optional, default=0
Whether to use the length input as a mask over the data input.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> data = np.ones((2, 3))
>>> npx.softmax(data, axis=0)
array([[0.5, 0.5, 0.5],
[0.5, 0.5, 0.5]])
>>> npx.softmax(data, axis=1)
array([[0.33333334, 0.33333334, 0.33333334],
[0.33333334, 0.33333334, 0.33333334]])
"""
return _mx_nd_npx.softmax(data, axis=axis, length=length, temperature=temperature,
use_length=use_length, dtype=dtype)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def log_softmax(data, axis=-1, length=None, temperature=None, use_length=False, dtype=None):
r"""Computes the log softmax of the input.
This is equivalent to computing softmax followed by log.
Parameters
----------
data : NDArray
The input array.
axis : int, optional, default='-1'
The axis along which to compute softmax.
length : NDArray
The length array.
temperature : double or None, optional, default=None
Temperature parameter in softmax
dtype : {None, 'float16', 'float32', 'float64'},optional, default='None'
DType of the output in case this can't be inferred. Defaults to
the same as input's dtype if not defined (dtype=None).
use_length : boolean or None, optional, default=0
Whether to use the length input as a mask over the data input.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Examples
--------
>>> data = np.array([1, 2, .1])
>>> npx.log_softmax(data)
array([-1.4170278, -0.4170278, -2.3170278])
>>> data = np.array([[1, 2, .1],[.1, 2, 1]])
>>> npx.log_softmax(data, axis=0)
array([[-0.34115386, -0.6931472 , -1.2411538 ],
[-1.2411538 , -0.6931472 , -0.34115386]])
"""
return _mx_nd_npx.log_softmax(data, axis=axis, length=length, temperature=temperature,
use_length=use_length, dtype=dtype)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def masked_softmax(data, mask, axis=-1, temperature=1.0, normalize=True):
r"""Applies the softmax function masking elements according to the mask provided
Parameters
----------
data : NDArray
The input array.
mask : NDArray
Mask to apply.
axis : int, optional, default='-1'
The axis along which to compute softmax.
temperature : double or None, optional, default=None
Temperature parameter in softmax
normalize : boolean or None, optional, default=1
Whether to normalize input data x: x = x - max(x)
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Examples
--------
>>> data = np.arange(5)
>>> mask = np.array([1, 0, 1, 0, 1])
>>> npx.masked_softmax(data, mask)
array([0.01587624, 0. , 0.11731042, 0. , 0.8668133 ])
>>> data = np.arange(10).reshape((2, 5))
>>> npx.masked_softmax(data, mask, axis=0)
array([[0.00669285, 0. , 0.00669285, 0. , 0.00669285],
[0.9933072 , 0. , 0.9933072 , 0. , 0.9933072 ]])
"""
return _mx_nd_npx.masked_softmax(data, mask, axis=axis, temperature=temperature,
normalize=normalize)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def masked_log_softmax(data, mask, axis=-1, temperature=1.0, normalize=True):
r"""Computes the masked log softmax of the input.
This is equivalent to computing masked softmax followed by log.
Parameters
----------
data : NDArray
The input array.
mask : NDArray
Mask to apply.
axis : int, optional, default='-1'
The axis along which to compute softmax.
temperature : double or None, optional, default=None
Temperature parameter in softmax
normalize : boolean or None, optional, default=1
Whether to normalize input data x: x = x - max(x)
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Examples
--------
>>> data = np.arange(5)
>>> mask = np.array([1, 0, 1, 0, 1])
>>> npx.masked_log_softmax(data, mask)
array([-4.1429286 , -inf, -2.1429286 , -inf, -0.14292854])
>>> data = np.arange(10).reshape((2, 5))
>>> npx.masked_log_softmax(data, mask, axis=0)
array([[-5.0067153 , -inf, -5.0067153 , -inf, -5.0067153 ],
[-0.00671535, -inf, -0.00671535, -inf, -0.00671535]])
"""
return _mx_nd_npx.masked_log_softmax(data, mask, axis=axis, temperature=temperature,
normalize=normalize)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def activation(data, act_type='relu', **kwargs):
r"""Applies an activation function element-wise to the input.
The following activation functions are supported:
- `log_sigmoid`: :math:`y = log(\frac{1}{1 + exp(-x)})`
- `mish`: :math:`y = x * tanh(log(1 + exp(x)))`
- `relu`: Rectified Linear Unit, :math:`y = max(x, 0)`
- `sigmoid`: :math:`y = \frac{1}{1 + exp(-x)}`
- `tanh`: Hyperbolic tangent, :math:`y = \frac{exp(x) - exp(-x)}{exp(x) + exp(-x)}`
- `softrelu`: Soft ReLU, or SoftPlus, :math:`y = log(1 + exp(x))`
- `softsign`: :math:`y = \frac{x}{1 + abs(x)}`
Parameters
----------
data : NDArray
The input array.
act_type : {'log_sigmoid', 'mish', 'relu', 'sigmoid', 'softrelu', 'softsign', 'tanh'}, required
Activation function to be applied.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.activation(data, act_type=act_type)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def batch_norm(x, gamma, beta, running_mean, running_var, eps=1e-3, momentum=0.9,
fix_gamma=True, use_global_stats=False, output_mean_var=False, axis=1,
cudnn_off=False, min_calib_range=None, max_calib_range=None, **kwargs):
r"""Batch normalization.
Normalizes a data batch by mean and variance, and applies a scale ``gamma`` as
well as offset ``beta``.
Assume the input has more than one dimension and we normalize along axis 1.
We first compute the mean and variance along this axis:
.. math::
data\_mean[i] = mean(data[:,i,:,...]) \\
data\_var[i] = var(data[:,i,:,...])
Then compute the normalized output, which has the same shape as input, as following:
.. math::
out[:,i,:,...] = \frac{data[:,i,:,...] - data\_mean[i]}{\sqrt{data\_var[i]+\epsilon}} * gamma[i] + beta[i]
Both *mean* and *var* returns a scalar by treating the input as a vector.
Assume the input has size *k* on axis 1, then both ``gamma`` and ``beta``
have shape *(k,)*. If ``output_mean_var`` is set to be true, then outputs both ``data_mean`` and
the inverse of ``data_var``, which are needed for the backward pass. Note that gradient of these
two outputs are blocked.
Besides the inputs and the outputs, this operator accepts two auxiliary
states, ``moving_mean`` and ``moving_var``, which are *k*-length
vectors. They are global statistics for the whole dataset, which are updated
by::
moving_mean = moving_mean * momentum + data_mean * (1 - momentum)
moving_var = moving_var * momentum + data_var * (1 - momentum)
If ``use_global_stats`` is set to be true, then ``moving_mean`` and
``moving_var`` are used instead of ``data_mean`` and ``data_var`` to compute
the output. It is often used during inference.
The parameter ``axis`` specifies which axis of the input shape denotes
the 'channel' (separately normalized groups). The default is 1. Specifying -1 sets the channel
axis to be the last item in the input shape.
Both ``gamma`` and ``beta`` are learnable parameters. But if ``fix_gamma`` is true,
then set ``gamma`` to 1 and its gradient to 0.
.. Note::
When ``fix_gamma`` is set to True, no sparse support is provided. If ``fix_gamma is`` set to False,
the sparse tensors will fallback.
Parameters
----------
data : NDArray
Input data to batch normalization
gamma : NDArray
gamma array
beta : NDArray
beta array
moving_mean : NDArray
running mean of input
moving_var : NDArray
running variance of input
eps : double, optional, default=0.0010000000474974513
Epsilon to prevent div 0. Must be no less than CUDNN_BN_MIN_EPSILON
defined in cudnn.h when using cudnn (usually 1e-5)
momentum : float, optional, default=0.899999976
Momentum for moving average
fix_gamma : boolean, optional, default=1
Fix gamma while training
use_global_stats : boolean, optional, default=0
Whether use global moving statistics instead of local batch-norm.
This will force change batch-norm into a scale shift operator.
output_mean_var : boolean, optional, default=0
Output the mean and inverse std
axis : int, optional, default='1'
Specify which shape axis the channel is specified
cudnn_off : boolean, optional, default=0
Do not select CUDNN operator, if available
min_calib_range : float or None, optional, default=None
The minimum scalar value in the form of float32 obtained through calibration.
If present, it will be used to by quantized batch norm op to calculate primitive scale.
Note: this calib_range is to calib bn output.
max_calib_range : float or None, optional, default=None
The maximum scalar value in the form of float32 obtained through calibration.
If present, it will be used to by quantized batch norm op to calculate primitive scale.
Note: this calib_range is to calib bn output.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.batch_norm(x, gamma, beta, running_mean, running_var, eps=eps,
momentum=momentum, fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var, axis=axis, cudnn_off=cudnn_off,
min_calib_range=min_calib_range, max_calib_range=max_calib_range)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def fully_connected(x, weight, bias=None, num_hidden=None,
no_bias=True, flatten=True, **kwargs):
r"""Applies a linear transformation: :math:`Y = XW^T + b`.
If ``flatten`` is set to be true, then the shapes are:
- **data**: `(batch_size, x1, x2, ..., xn)`
- **weight**: `(num_hidden, x1 * x2 * ... * xn)`
- **bias**: `(num_hidden,)`
- **out**: `(batch_size, num_hidden)`
If ``flatten`` is set to be false, then the shapes are:
- **data**: `(x1, x2, ..., xn, input_dim)`
- **weight**: `(num_hidden, input_dim)`
- **bias**: `(num_hidden,)`
- **out**: `(x1, x2, ..., xn, num_hidden)`
The learnable parameters include both ``weight`` and ``bias``.
If ``no_bias`` is set to be true, then the ``bias`` term is ignored.
.. Note::
The sparse support for FullyConnected is limited to forward evaluation with `row_sparse`
weight and bias, where the length of `weight.indices` and `bias.indices` must be equal
to `num_hidden`. This could be useful for model inference with `row_sparse` weights
trained with importance sampling or noise contrastive estimation.
To compute linear transformation with 'csr' sparse data, sparse.dot is recommended instead
of sparse.FullyConnected.
Parameters
----------
data : NDArray
Input data.
weight : NDArray
Weight matrix.
bias : NDArray
Bias parameter.
num_hidden : int, required
Number of hidden nodes of the output.
no_bias : boolean, optional, default=0
Whether to disable bias parameter.
flatten : boolean, optional, default=1
Whether to collapse all but the first axis of the input data tensor.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.fully_connected(x, weight, bias, num_hidden=num_hidden,
no_bias=no_bias, flatten=flatten)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def pick(data, index, axis=-1, mode='clip', keepdims=False):
r"""Picks elements from an input array according to the input indices along the given axis.
Given an input array of shape ``(d0, d1)`` and indices of shape ``(i0,)``, the result will be
an output array of shape ``(i0,)`` with::
output[i] = input[i, indices[i]]
By default, if any index mentioned is too large, it is replaced by the index that addresses
the last element along an axis (the `clip` mode).
This function supports n-dimensional input and (n-1)-dimensional indices arrays.
Parameters
----------
data : NDArray
The input array
index : NDArray
The index array
axis : int or None, optional, default='-1'
int or None. The axis to picking the elements.
Negative values means indexing from right to left.
If is `None`, the elements in the index w.r.t the flattened input will be picked.
keepdims : boolean, optional, default=0
If true, the axis where we pick the elements is
left in the result as dimension with size one.
mode : {'clip', 'wrap'},optional, default='clip'
Specify how out-of-bound indices behave. Default is "clip".
"clip" means clip to the range. So, if all indices mentioned are too large,
they are replaced by the index that addresses the last element along an axis.
"wrap" means to wrap around.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> x = np.array([[1., 2.],[3., 4.],[5., 6.]])
picks elements with specified indices along axis 0
>>> npx.pick(x, np.array([0, 1]), 0)
array([1., 4.])
picks elements with specified indices along axis 1
>>> npx.pick(x, np.array([0, 1, 0]), 1)
array([1., 4., 5.])
picks elements with specified indices along axis 1 using 'wrap' mode
to place indicies that would normally be out of bounds
>>> npx.pick(x, np.array([2, -1, -2]), 1, mode='wrap')
array([1., 4., 5.])
picks elements with specified indices along axis 1 and dims are maintained
>>> npx.pick(x, np.array([[1.], [0.], [2.]]), 1, keepdims=True)
array([[2.],
[3.],
[6.]])
"""
return _mx_nd_npx.pick(data, index, axis, mode, keepdims)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def convolution(data=None, weight=None, bias=None, kernel=None, stride=None, dilate=None,
pad=None, num_filter=1, num_group=1, workspace=1024, no_bias=False,
cudnn_tune=None, cudnn_off=False, layout=None):
r"""Compute *N*-D convolution on *(N+2)*-D input.
In the 2-D convolution, given input data with shape *(batch_size,
channel, height, width)*, the output is computed by
.. math::
out[n,i,:,:] = bias[i] + \sum_{j=0}^{channel} data[n,j,:,:] \star
weight[i,j,:,:]
where :math:`\star` is the 2-D cross-correlation operator.
For general 2-D convolution, the shapes are
- **data**: *(batch_size, channel, height, width)*
- **weight**: *(num_filter, channel, kernel[0], kernel[1])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_height, out_width)*.
Define::
f(x,k,p,s,d) = floor((x+2*p-d*(k-1)-1)/s)+1
then we have::
out_height=f(height, kernel[0], pad[0], stride[0], dilate[0])
out_width=f(width, kernel[1], pad[1], stride[1], dilate[1])
If ``no_bias`` is set to be true, then the ``bias`` term is ignored.
The default data ``layout`` is *NCHW*, namely *(batch_size, channel, height,
width)*. We can choose other layouts such as *NWC*.
If ``num_group`` is larger than 1, denoted by *g*, then split the input ``data``
evenly into *g* parts along the channel axis, and also evenly split ``weight``
along the first dimension. Next compute the convolution on the *i*-th part of
the data with the *i*-th weight part. The output is obtained by concatenating all
the *g* results.
1-D convolution does not have *height* dimension but only *width* in space.
- **data**: *(batch_size, channel, width)*
- **weight**: *(num_filter, channel, kernel[0])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_width)*.
3-D convolution adds an additional *depth* dimension besides *height* and
*width*. The shapes are
- **data**: *(batch_size, channel, depth, height, width)*
- **weight**: *(num_filter, channel, kernel[0], kernel[1], kernel[2])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_depth, out_height, out_width)*.
Both ``weight`` and ``bias`` are learnable parameters.
There are other options to tune the performance.
- **cudnn_tune**: enable this option leads to higher startup time but may give
faster speed. Options are
- **off**: no tuning
- **limited_workspace**:run test and pick the fastest algorithm that doesn't
exceed workspace limit.
- **fastest**: pick the fastest algorithm and ignore workspace limit.
- **None** (default): the behavior is determined by environment variable
``MXNET_CUDNN_AUTOTUNE_DEFAULT``. 0 for off, 1 for limited workspace
(default), 2 for fastest.
- **workspace**: A large number leads to more (GPU) memory usage but may improve
the performance.
Parameters
----------
data : NDArray
Input data to the ConvolutionOp.
weight : NDArray
Weight matrix.
bias : NDArray
Bias parameter.
kernel : Shape(tuple), required
Convolution kernel size: (w,), (h, w) or (d, h, w)
stride : Shape(tuple), optional, default=[]
Convolution stride: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.
dilate : Shape(tuple), optional, default=[]
Convolution dilate: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.
pad : Shape(tuple), optional, default=[]
Zero pad for convolution: (w,), (h, w) or (d, h, w). Defaults to no padding.
num_filter : int (non-negative), required
Convolution filter(channel) number
num_group : int (non-negative), optional, default=1
Number of group partitions.
workspace : long (non-negative), optional, default=1024
Maximum temporary workspace allowed (MB) in convolution.This parameter has two usages.
When CUDNN is not used, it determines the effective batch size of the convolution kernel.
When CUDNN is used, it controls the maximum temporary storage used for tuning the best
CUDNN kernel when `limited_workspace` strategy is used.
no_bias : boolean, optional, default=0
Whether to disable bias parameter.
cudnn_tune : {None, 'fastest', 'limited_workspace', 'off'},optional, default='None'
Whether to pick convolution algo by running performance test.
cudnn_off : boolean, optional, default=0
Turn off cudnn for this layer.
layout : {None, 'NCDHW', 'NCHW', 'NCW', 'NDHWC', 'NHWC'},optional, default='None'
Set layout for input, output and weight. Empty for
default layout: NCW for 1d, NCHW for 2d and NCDHW for 3d.
NHWC and NDHWC are only supported on GPU.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.convolution(data=data, weight=weight, bias=bias, kernel=kernel,
stride=stride, dilate=dilate, pad=pad, num_filter=num_filter,
num_group=num_group, workspace=workspace, no_bias=no_bias,
cudnn_tune=cudnn_tune, cudnn_off=cudnn_off, layout=layout)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def deconvolution(data=None, weight=None, bias=None, kernel=None, stride=None, dilate=None,
pad=None, adj=None, target_shape=None, num_filter=1, num_group=1,
workspace=1024, no_bias=False, cudnn_tune=None,
cudnn_off=False, layout=None):
r"""Computes 1D, 2D or 3D transposed convolution (aka fractionally strided convolution) of
the input tensor. This operation can be seen as the gradient of Convolution operation
with respect to its input. Convolution usually reduces the size of the input.
Transposed convolution works the other way, going from a smaller input
to a larger output while preserving the connectivity pattern.
Parameters
----------
data : NDArray
Input tensor to the deconvolution operation.
weight : NDArray
Weights representing the kernel.
bias : NDArray
Bias added to the result after the deconvolution operation.
kernel : Shape(tuple), required
Deconvolution kernel size: (w,), (h, w) or (d, h, w).
This is same as the kernel size used for the corresponding convolution
stride : Shape(tuple), optional, default=[]
The stride used for the corresponding convolution: (w,), (h, w) or (d, h, w).
Defaults to 1 for each dimension.
dilate : Shape(tuple), optional, default=[]
Dilation factor for each dimension of the input: (w,), (h, w) or (d, h, w).
Defaults to 1 for each dimension.
pad : Shape(tuple), optional, default=[]
The amount of implicit zero padding added during convolution for each dimension of
the input: (w,), (h, w) or (d, h, w). ``(kernel-1)/2`` is usually a good choice.
If `target_shape` is set, `pad` will be ignored and a padding that will generate
the target shape will be used. Defaults to no padding.
adj : Shape(tuple), optional, default=[]
Adjustment for output shape: (w,), (h, w) or (d, h, w).
If `target_shape` is set, `adj` will be ignored and computed accordingly.
target_shape : Shape(tuple), optional, default=[]
Shape of the output tensor: (w,), (h, w) or (d, h, w).
num_filter : int (non-negative), required
Number of output filters.
num_group : int (non-negative), optional, default=1
Number of groups partition.
workspace : long (non-negative), optional, default=512
Maximum temporary workspace allowed (MB) in deconvolution. This parameter has two usages.
When CUDNN is not used, it determines the effective batch size of the deconvolution kernel.
When CUDNN is used, it controls the maximum temporary storage used for tuning
the best CUDNN kernel when `limited_workspace` strategy is used.
no_bias : boolean, optional, default=1
Whether to disable bias parameter.
cudnn_tune : {None, 'fastest', 'limited_workspace', 'off'},optional, default='None'
Whether to pick convolution algorithm by running performance test.
cudnn_off : boolean, optional, default=0
Turn off cudnn for this layer.
layout : {None, 'NCDHW', 'NCHW', 'NCW', 'NDHWC', 'NHWC'},optional, default='None'
Set layout for input, output and weight. Empty for
default layout, NCW for 1d, NCHW for 2d and NCDHW for 3d.
NHWC and NDHWC are only supported on GPU.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.deconvolution(data=data, weight=weight, bias=bias, kernel=kernel,
stride=stride, dilate=dilate, pad=pad, adj=adj,
target_shape=target_shape, num_filter=num_filter,
num_group=num_group, workspace=workspace, no_bias=no_bias,
cudnn_tune=cudnn_tune, cudnn_off=cudnn_off, layout=layout)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def pooling(data=None, kernel=None, stride=None, pad=None, pool_type="max",
pooling_convention="valid", global_pool=False, cudnn_off=False,
p_value=None, count_include_pad=None, layout=None, **kwargs):
r"""Performs pooling on the input.
The shapes for 1-D pooling are
- **data** and **out**: *(batch_size, channel, width)* (NCW layout) or
*(batch_size, width, channel)* (NWC layout),
The shapes for 2-D pooling are
- **data** and **out**: *(batch_size, channel, height, width)* (NCHW layout) or
*(batch_size, height, width, channel)* (NHWC layout),
out_height = f(height, kernel[0], pad[0], stride[0])
out_width = f(width, kernel[1], pad[1], stride[1])
The definition of *f* depends on ``pooling_convention``, which has two options:
- **valid** (default)::
f(x, k, p, s) = floor((x+2*p-k)/s)+1
- **full**, which is compatible with Caffe::
f(x, k, p, s) = ceil((x+2*p-k)/s)+1
When ``global_pool`` is set to be true, then global pooling is performed. It will reset
``kernel=(height, width)`` and set the appropiate padding to 0.
Three pooling options are supported by ``pool_type``:
- **avg**: average pooling
- **max**: max pooling
- **sum**: sum pooling
- **lp**: Lp pooling
For 3-D pooling, an additional *depth* dimension is added before
*height*. Namely the input data and output will have shape *(batch_size, channel, depth,
height, width)* (NCDHW layout) or *(batch_size, depth, height, width, channel)* (NDHWC layout).
Notes on Lp pooling:
Lp pooling was first introduced by this paper: https://arxiv.org/pdf/1204.3968.pdf.
L-1 pooling is simply sum pooling, while L-inf pooling is simply max pooling.
We can see that Lp pooling stands between those two, in practice the most common value for p is 2.
For each window ``X``, the mathematical expression for Lp pooling is:
:math:`f(X) = \sqrt[p]{\sum_{x}^{X} x^p}`
Parameters
----------
data : NDArray
Input data to the pooling operator.
kernel : Shape(tuple), optional, default=[]
Pooling kernel size: (y, x) or (d, y, x)
pool_type : {'avg', 'lp', 'max', 'sum'},optional, default='max'
Pooling type to be applied.
global_pool : boolean, optional, default=0
Ignore kernel size, do global pooling based on current input feature map.
cudnn_off : boolean, optional, default=0
Turn off cudnn pooling and use MXNet pooling operator.
pooling_convention : {'full', 'same', 'valid'},optional, default='valid'
Pooling convention to be applied.
stride : Shape(tuple), optional, default=[]
Stride: for pooling (y, x) or (d, y, x). Defaults to 1 for each dimension.
pad : Shape(tuple), optional, default=[]
Pad for pooling: (y, x) or (d, y, x). Defaults to no padding.
p_value : int or None, optional, default='None'
Value of p for Lp pooling, can be 1 or 2, required for Lp Pooling.
count_include_pad : boolean or None, optional, default=None
Only used for AvgPool, specify whether to count padding elements for averagecalculation.
For example, with a 5*5 kernel on a 3*3 corner of a image,the sum of the 9 valid elements will
be divided by 25 if this is set to true,or it will be divided by 9 if this is set to false.
Defaults to true.
layout : {None, 'NCDHW', 'NCHW', 'NCW', 'NDHWC', 'NHWC', 'NWC'},optional, default='None'
Set layout for input and output. Empty for
default layout: NCW for 1d, NCHW for 2d and NCDHW for 3d.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.pooling(data=data, kernel=kernel, stride=stride, pad=pad,
pool_type=pool_type, pooling_convention=pooling_convention,
global_pool=global_pool, cudnn_off=cudnn_off, p_value=p_value,
count_include_pad=count_include_pad, layout=layout)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def dropout(data, p=0.5, mode="training", axes=None, cudnn_off=False, **kwargs):
r"""Applies dropout operation to input array.
- During training, each element of the input is set to zero with probability p.
The whole array is rescaled by :math:`1/(1-p)` to keep the expected
sum of the input unchanged.
- During testing, this operator does not change the input if mode is 'training'.
If mode is 'always', the same computaion as during training will be applied.
Parameters
----------
data : NDArray
Input array to which dropout will be applied.
p : float, optional, default=0.5
Fraction of the input that gets dropped out during training time.
mode : {'always', 'training'},optional, default='training'
Whether to only turn on dropout during training or to also turn on for inference.
axes : Shape(tuple), optional, default=[]
Axes for variational dropout kernel.
cudnn_off : boolean or None, optional, default=0
Whether to turn off cudnn in dropout operator. This option is ignored if axes is specified.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.dropout(data=data, p=p, mode=mode, axes=axes, cudnn_off=cudnn_off)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def one_hot(data, depth=None, on_value=1.0, off_value=0.0, dtype="float32"):
r"""Returns a one-hot array.
The locations represented by `indices` take value `on_value`, while all
other locations take value `off_value`.
`one_hot` operation with `indices` of shape ``(i0, i1)`` and `depth` of ``d`` would result
in an output array of shape ``(i0, i1, d)`` with::
output[i,j,:] = off_value
output[i,j,indices[i,j]] = on_value
Parameters
----------
indices : NDArray
array of locations where to set on_value
depth : long, required
Depth of the one hot dimension.
on_value : double, optional, default=1
The value assigned to the locations represented by indices.
off_value : double, optional, default=0
The value assigned to the locations not represented by indices.
dtype : {'bfloat16', 'float16', 'float32', 'float64', 'int32', 'int64', 'int8', 'uint8'},
optional, default='float32'
DType of the output
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> data = np.array([1,0,2,0])
>>> npx.one_hot(data, 3)
array([[0., 1., 0.],
[1., 0., 0.],
[0., 0., 1.],
[1., 0., 0.]], dtype=float64)
>>> npx.one_hot(data, 3, on_value=8, off_value=1, dtype='int32')
array([[1, 8, 1],
[8, 1, 1],
[1, 1, 8],
[8, 1, 1]], dtype=int32)
>>> data = np.array([[1,0],[1,0],[2,0]])
>>> npx.one_hot(data, 3)
array([[[0., 1., 0.],
[1., 0., 0.]],
[[0., 1., 0.],
[1., 0., 0.]],
[[0., 0., 1.],
[1., 0., 0.]]], dtype=float64)
"""
return _mx_nd_npx.one_hot(data=data, depth=depth, on_value=on_value, off_value=off_value,
dtype=dtype)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def rnn(data=None, parameters=None, state=None, state_cell=None, sequence_length=None,
mode=None, state_size=None, num_layers=None, bidirectional=False,
state_outputs=False, p=0.0, use_sequence_length=False, projection_size=None,
lstm_state_clip_min=None, lstm_state_clip_max=None, lstm_state_clip_nan=None):
r"""Applies recurrent layers to input data. Currently, vanilla RNN, LSTM and GRU are
implemented, with both multi-layer and bidirectional support.
When the input data is of type float32 and the environment variables MXNET_CUDA_ALLOW_TENSOR_CORE
and MXNET_CUDA_TENSOR_OP_MATH_ALLOW_CONVERSION are set to 1, this operator will try to use
pseudo-float16 precision (float32 math with float16 I/O) precision in order to use
Tensor Cores on suitable NVIDIA GPUs. This can sometimes give significant speedups.
**Vanilla RNN**
Applies a single-gate recurrent layer to input X. Two kinds of activation function are supported:
ReLU and Tanh.
With ReLU activation function:
.. math::
h_t = relu(W_{ih} * x_t + b_{ih} + W_{hh} * h_{(t-1)} + b_{hh})
With Tanh activtion function:
.. math::
h_t = \tanh(W_{ih} * x_t + b_{ih} + W_{hh} * h_{(t-1)} + b_{hh})
Reference paper: Finding structure in time - Elman, 1988.
https://axon.cs.byu.edu/~martinez/classes/678/Papers/Elman_time.pdf
**LSTM**
Long Short-Term Memory - Hochreiter, 1997. http://www.bioinf.jku.at/publications/older/2604.pdf
.. math::
\begin{array}{ll}
i_t = \mathrm{sigmoid}(W_{ii} x_t + b_{ii} + W_{hi} h_{(t-1)} + b_{hi}) \\
f_t = \mathrm{sigmoid}(W_{if} x_t + b_{if} + W_{hf} h_{(t-1)} + b_{hf}) \\
g_t = \tanh(W_{ig} x_t + b_{ig} + W_{hc} h_{(t-1)} + b_{hg}) \\
o_t = \mathrm{sigmoid}(W_{io} x_t + b_{io} + W_{ho} h_{(t-1)} + b_{ho}) \\
c_t = f_t * c_{(t-1)} + i_t * g_t \\
h_t = o_t * \tanh(c_t)
\end{array}
With the projection size being set, LSTM could use the projection feature to reduce the parameters
size and give some speedups without significant damage to the accuracy.
Long Short-Term Memory Based Recurrent Neural Network Architectures for Large Vocabulary Speech
Recognition - Sak et al. 2014. https://arxiv.org/abs/1402.1128
.. math::
\begin{array}{ll}
i_t = \mathrm{sigmoid}(W_{ii} x_t + b_{ii} + W_{ri} r_{(t-1)} + b_{ri}) \\
f_t = \mathrm{sigmoid}(W_{if} x_t + b_{if} + W_{rf} r_{(t-1)} + b_{rf}) \\
g_t = \tanh(W_{ig} x_t + b_{ig} + W_{rc} r_{(t-1)} + b_{rg}) \\
o_t = \mathrm{sigmoid}(W_{io} x_t + b_{o} + W_{ro} r_{(t-1)} + b_{ro}) \\
c_t = f_t * c_{(t-1)} + i_t * g_t \\
h_t = o_t * \tanh(c_t)
r_t = W_{hr} h_t
\end{array}
**GRU**
Gated Recurrent Unit - Cho et al. 2014. http://arxiv.org/abs/1406.1078
The definition of GRU here is slightly different from paper but compatible with CUDNN.
.. math::
\begin{array}{ll}
r_t = \mathrm{sigmoid}(W_{ir} x_t + b_{ir} + W_{hr} h_{(t-1)} + b_{hr}) \\
z_t = \mathrm{sigmoid}(W_{iz} x_t + b_{iz} + W_{hz} h_{(t-1)} + b_{hz}) \\
n_t = \tanh(W_{in} x_t + b_{in} + r_t * (W_{hn} h_{(t-1)}+ b_{hn})) \\
h_t = (1 - z_t) * n_t + z_t * h_{(t-1)} \\
\end{array}
Parameters
----------
data : NDArray
Input data to RNN
parameters : NDArray
Vector of all RNN trainable parameters concatenated
state : NDArray
initial hidden state of the RNN
state_cell : NDArray
initial cell state for LSTM networks (only for LSTM)
sequence_length : NDArray
Vector of valid sequence lengths for each element in batch.
(Only used if use_sequence_length kwarg is True)
state_size : int (non-negative), required
size of the state for each layer
num_layers : int (non-negative), required
number of stacked layers
bidirectional : boolean, optional, default=0
whether to use bidirectional recurrent layers
mode : {'gru', 'lstm', 'rnn_relu', 'rnn_tanh'}, required
the type of RNN to compute
p : float, optional, default=0
drop rate of the dropout on the outputs of each RNN layer, except the last layer.
state_outputs : boolean, optional, default=0
Whether to have the states as symbol outputs.
projection_size : int or None, optional, default='None'
size of project size
lstm_state_clip_min : double or None, optional, default=None
Minimum clip value of LSTM states. This option must be used together with lstm_state_clip_max.
lstm_state_clip_max : double or None, optional, default=None
Maximum clip value of LSTM states. This option must be used together with lstm_state_clip_min.
lstm_state_clip_nan : boolean, optional, default=0
Whether to stop NaN from propagating in state by clipping it to min/max.
If clipping range is not specified, this option is ignored.
use_sequence_length : boolean, optional, default=0
If set to true, this layer takes in an extra input parameter `sequence_length`
to specify variable length sequence
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.rnn(data=data, parameters=parameters, state=state, state_cell=state_cell,
sequence_length=sequence_length, mode=mode, state_size=state_size,
num_layers=num_layers, bidirectional=bidirectional,
state_outputs=state_outputs, p=p, use_sequence_length=use_sequence_length,
projection_size=projection_size, lstm_state_clip_min=lstm_state_clip_min,
lstm_state_clip_max=lstm_state_clip_max,
lstm_state_clip_nan=lstm_state_clip_nan)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def embedding(data, weight, input_dim=None, output_dim=None, dtype="float32", sparse_grad=False,
**kwargs):
r"""Maps integer indices to vector representations (embeddings).
This operator maps words to real-valued vectors in a high-dimensional space,
called word embeddings. These embeddings can capture semantic and syntactic properties of the words.
For example, it has been noted that in the learned embedding spaces, similar words tend
to be close to each other and dissimilar words far apart.
For an input array of shape (d1, ..., dK),
the shape of an output array is (d1, ..., dK, output_dim).
All the input values should be integers in the range [0, input_dim).
If the input_dim is ip0 and output_dim is op0, then shape of the embedding weight matrix must be
(ip0, op0).
When "sparse_grad" is False, if any index mentioned is too large, it is replaced by the index that
addresses the last vector in an embedding matrix.
When "sparse_grad" is True, an error will be raised if invalid indices are found.
The storage type of weight can be either row_sparse or default.
.. Note::
If "sparse_grad" is set to True, the storage type of gradient w.r.t weights will be
"row_sparse". Only a subset of optimizers support sparse gradients, including SGD, AdaGrad
and Adam. Note that by default lazy updates is turned on, which may perform differently
from standard updates. For more details, please check the Optimization API at:
https://mxnet.apache.org/versions/master/api/python/docs/api/optimizer/index.html
Parameters
----------
data : NDArray
The input array to the embedding operator.
weight : NDArray
The embedding weight matrix.
input_dim : long, required
Vocabulary size of the input indices.
output_dim : long, required
Dimension of the embedding vectors.
dtype : {'bfloat16', 'float16', 'float32', 'float64', 'int32', 'int64', 'int8', 'uint8'},
optional, default='float32'
Data type of weight.
sparse_grad : boolean, optional, default=0
Compute row sparse gradient in the backward calculation.
If set to True, the grad's storage type is row_sparse.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> input_dim = 4
>>> output_dim = 5
Each row in weight matrix y represents a word. So, y = (w0,w1,w2,w3)
>>> y = np.arange(input_dim * output_dim).reshape(input_dim, output_dim)
>>> y
array([[ 0., 1., 2., 3., 4.],
[ 5., 6., 7., 8., 9.],
[10., 11., 12., 13., 14.],
[15., 16., 17., 18., 19.]])
Input array x represents n-grams(2-gram). So, x = [(w1,w3), (w0,w2)]
>>> x = np.array([[1., 3.], [0., 2.]])
>>> x
array([[1., 3.],
[0., 2.]])
Mapped input x to its vector representation y.
>>> npx.embedding(x, y, input_dim, output_dim)
array([[[ 5., 6., 7., 8., 9.],
[15., 16., 17., 18., 19.]],
[[ 0., 1., 2., 3., 4.],
[10., 11., 12., 13., 14.]]])
"""
return _mx_nd_npx.embedding(data=data, weight=weight, input_dim=input_dim, output_dim=output_dim,
dtype=dtype, sparse_grad=sparse_grad)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def topk(data, axis=-1, k=1, ret_typ="indices", is_ascend=False, dtype="float32"):
r"""Returns the indices of the top *k* elements in an input array along the given
axis (by default).
If ret_type is set to 'value' returns the value of top *k* elements (instead of indices).
In case of ret_type = 'both', both value and index would be returned.
The returned elements will be sorted.
Parameters
----------
data : NDArray
The input array
axis : int or None, optional, default='-1'
Axis along which to choose the top k indices.
If not given, the flattened array is used. Default is -1.
k : int, optional, default='1'
Number of top elements to select, should be always smaller than or equal to
the element number in the given axis. A global sort is performed if set k < 1.
ret_typ : {'both', 'indices', 'mask', 'value'},optional, default='indices'
The return type.
"value" means to return the top k values,
"indices" means to return the indices of the top k values,
"mask" means to return a mask array containing 0 and 1. 1 means the top k values.
"both" means to return a list of both values and indices of top k elements.
is_ascend : boolean, optional, default=0
Whether to choose k largest or k smallest elements.
Top K largest elements will be chosen if set to false.
dtype : {'float16', 'float32', 'float64', 'int32', 'int64', 'uint8'},
optional, default='float32'
DType of the output indices when ret_typ is "indices" or "both".
An error will be raised if the selected data type cannot precisely represent the indices.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> x = np.array([[0.3, 0.2, 0.4], [0.1, 0.3, 0.2]])
returns an index of the largest element on last axis
>>> npx.topk(x)
array([[2.],
[1.]])
returns the value of top-2 largest elements on last axis
>>> npx.topk(x, ret_typ='value', k=2)
array([[0.4, 0.3],
[0.3, 0.2]])
returns the value of top-2 smallest elements on last axis
>>> npx.topk(x, ret_typ='value', k=2, is_ascend=1)
array([[0.2, 0.3],
[0.1, 0.2]])
returns the value of top-2 largest elements on axis 0
>>> npx.topk(x, axis=0, ret_typ='value', k=2)
array([[0.3, 0.3, 0.4],
[0.1, 0.2, 0.2]])
flattens and then returns list of both values and indices
>>> npx.topk(x, ret_typ='both', k=2)
[array([[0.4, 0.3], [0.3, 0.2]]),
array([[2., 0.], [1., 2.]])]
"""
return _mx_nd_npx.topk(data=data, axis=axis, k=k, ret_typ=ret_typ, is_ascend=is_ascend, dtype=dtype)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def layer_norm(data=None, gamma=None, beta=None, axis=None, eps=None, output_mean_var=None):
r"""Layer normalization.
Normalizes the channels of the input tensor by mean and variance, and applies a scale ``gamma`` as
well as offset ``beta``.
Assume the input has more than one dimension and we normalize along axis 1.
We first compute the mean and variance along this axis and then
compute the normalized output, which has the same shape as input, as following:
.. math::
out = \frac{data - mean(data, axis)}{\sqrt{var(data, axis) + \epsilon}} * gamma + beta
Both ``gamma`` and ``beta`` are learnable parameters.
Unlike BatchNorm and InstanceNorm, the *mean* and *var* are computed along the channel dimension.
Assume the input has size *k* on axis 1, then both ``gamma`` and ``beta``
have shape *(k,)*. If ``output_mean_var`` is set to be true, then outputs both ``data_mean`` and
``data_std``. Note that no gradient will be passed through these two outputs.
The parameter ``axis`` specifies which axis of the input shape denotes
the 'channel' (separately normalized groups). The default is -1, which sets the channel
axis to be the last item in the input shape.
Parameters
----------
data : NDArray
Input data to layer normalization
gamma : NDArray
gamma array
beta : NDArray
beta array
axis : int, optional, default='-1'
The axis to perform layer normalization.
Usually, this should be be axis of the channel dimension.
Negative values means indexing from right to left.
eps : float, optional, default=9.99999975e-06
An `epsilon` parameter to prevent division by 0.
output_mean_var : boolean, optional, default=0
Output the mean and std calculated along the given axis.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.layer_norm(data=data, gamma=gamma, beta=beta, axis=axis, eps=eps,
output_mean_var=output_mean_var)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def leaky_relu(data=None, gamma=None, act_type="leaky", slope=0.25, lower_bound=0.125,
upper_bound=0.334, **kwargs):
r"""Applies Leaky rectified linear unit activation element-wise to the input.
Leaky ReLUs attempt to fix the "dying ReLU" problem by allowing a small `slope`
when the input is negative and has a slope of one when input is positive.
The following modified ReLU Activation functions are supported:
- *elu*: Exponential Linear Unit. `y = x > 0 ? x : slope * (exp(x)-1)`
- *gelu*: Gaussian Error Linear Unit. `y = 0.5 * x * (1 + erf(x / sqrt(2)))`
- *selu*: Scaled Exponential Linear Unit. `y = lambda * (x > 0 ? x : alpha * (exp(x) - 1))` where
*lambda = 1.0507009873554804934193349852946* and *alpha = 1.6732632423543772848170429916717*.
- *leaky*: Leaky ReLU. `y = x > 0 ? x : slope * x`
- *prelu*: Parametric ReLU. This is same as *leaky* except that `slope` is learnt during training.
- *rrelu*: Randomized ReLU. same as *leaky* but the `slope` is uniformly and randomly chosen from
*[lower_bound, upper_bound)* for training, while fixed to be
*(lower_bound+upper_bound)/2* for inference.
Parameters
----------
data : NDArray
Input data to activation function.
gamma : NDArray
Input data to activation function.
act_type : {'elu', 'gelu', 'leaky', 'prelu', 'rrelu', 'selu'},optional, default='leaky'
Activation function to be applied.
slope : float, optional, default=0.25
Init slope for the activation. (For leaky and elu only)
lower_bound : float, optional, default=0.125
Lower bound of random slope. (For rrelu only)
upper_bound : float, optional, default=0.333999991
Upper bound of random slope. (For rrelu only)
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.leaky_relu(data=data, gamma=gamma, act_type=act_type, slope=slope,
lower_bound=lower_bound, upper_bound=upper_bound)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def batch_dot(a, b, transpose_a=False, transpose_b=False, forward_stype="default"):
r"""Batchwise dot product.
``batch_dot`` is used to compute dot product of ``x`` and ``y`` when ``x`` and
``y`` are data in batch, namely N-D (N >= 3) arrays in shape of `(B0, ..., B_i, :, :)`.
For example, given ``x`` with shape `(B_0, ..., B_i, N, M)` and ``y`` with shape
`(B_0, ..., B_i, M, K)`, the result array will have shape `(B_0, ..., B_i, N, K)`,
which is computed by::
batch_dot(x,y)[b_0, ..., b_i, :, :] = dot(x[b_0, ..., b_i, :, :], y[b_0, ..., b_i, :, :])
Parameters
----------
lhs : NDArray
The first input
rhs : NDArray
The second input
transpose_a : boolean, optional, default=0
If true then transpose the first input before dot.
transpose_b : boolean, optional, default=0
If true then transpose the second input before dot.
forward_stype : {None, 'csr', 'default', 'row_sparse'},optional, default='None'
The desired storage type of the forward output given by user,
if thecombination of input storage types and this hint does not matchany implemented ones,
the dot operator will perform fallback operationand still produce
an output of the desired storage type.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.batch_dot(a=a, b=b, transpose_a=transpose_a,
transpose_b=transpose_b, forward_stype=forward_stype)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def broadcast_like(lhs, rhs, lhs_axes=None, rhs_axes=None):
r"""Broadcasts lhs to have the same shape as rhs.
Broadcasting is a mechanism that allows NDArrays to perform arithmetic operations
with arrays of different shapes efficiently without creating multiple copies of arrays.
Also see, `Broadcasting <https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html>`_
for more explanation.
Broadcasting is allowed on axes with size 1, such as from `(2,1,3,1)` to
`(2,8,3,9)`. Elements will be duplicated on the broadcasted axes.
Parameters
----------
lhs : NDArray
First input.
rhs : NDArray
Second input.
lhs_axes : Shape or None, optional, default=None
Axes to perform broadcast on in the first input array
rhs_axes : Shape or None, optional, default=None
Axes to copy from the second input array
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
example
-------
>>> a = np.array([[1,2,3]])
>>> b = np.array([[5,6,7],[7,8,9]])
>>> npx.broadcast_like(a, b)
array([[1., 2., 3.],
[1., 2., 3.]])
>>> a = np.array([9])
>>> b = np.array([1,2,3,4,5])
>>> npx.broadcast_like(a, b, lhs_axes=(0,), rhs_axes=(-1,))
array([9., 9., 9., 9., 9.])
"""
return _mx_nd_npx.broadcast_like(lhs=lhs, rhs=rhs, lhs_axes=lhs_axes, rhs_axes=rhs_axes)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def arange_like(data, start=0.0, step=1.0, repeat=1, ctx=None, axis=None):
r"""Return an array with evenly spaced values. If axis is not given, the output will
have the same shape as the input array. Otherwise, the output will be a 1-D array with size of
the specified axis in input shape.
Parameters
----------
data : NDArray
The input
start : double, optional, default=0
Start of interval. The interval includes this value. The default start value is 0.
step : double, optional, default=1
Spacing between values.
repeat : int, optional, default='1'
The repeating time of all elements.
E.g repeat=3, the element a will be repeated three times --> a, a, a.
ctx : string, optional, default=''
Context of output, in format [cpu|gpu|cpu_pinned](n).Only used for imperative calls.
axis : int or None, optional, default='None'
Arange elements according to the size of a certain axis of input array.
The negative numbers are interpreted counting from the backward.
If not provided, will arange elements according to the input shape.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> x = np.random.uniform(0, 1, size=(3,4))
>>> x
array([[0.5488135 , 0.5928446 , 0.71518934, 0.84426576],
[0.60276335, 0.8579456 , 0.5448832 , 0.8472517 ],
[0.4236548 , 0.6235637 , 0.6458941 , 0.3843817 ]])
>>> npx.arange_like(x, start=0)
array([[ 0., 1., 2., 3.],
[ 4., 5., 6., 7.],
[ 8., 9., 10., 11.]])
>>> npx.arange_like(x, start=0, axis=-1)
array([0., 1., 2., 3.])
"""
return _mx_nd_npx.arange_like(data=data, start=start, step=step, repeat=repeat,
ctx=ctx, axis=axis)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def group_norm(data, gamma, beta, num_groups=1, eps=1e-3, output_mean_var=False):
r"""Group normalization.
The input channels are separated into ``num_groups`` groups,
each containing ``num_channels / num_groups`` channels.
The mean and standard-deviation are calculated separately over the each group.
.. math::
data = data.reshape((N, num_groups, C // num_groups, ...))
out = \frac{data - mean(data, axis)}{\sqrt{var(data, axis) + \epsilon}} * gamma + beta
Both ``gamma`` and ``beta`` are learnable parameters.
Defined in ../src/operator/nn/group_norm.cc:L78
Parameters
----------
data : NDArray
Input data
gamma : NDArray
gamma array
beta : NDArray
beta array
num_groups : int, optional, default='1'
Total number of groups.
eps : float, optional, default=9.99999975e-06
An `epsilon` parameter to prevent division by 0.
output_mean_var : boolean, optional, default=0
Output the mean and std calculated along the given axis.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.group_norm(data=data, gamma=gamma, beta=beta, num_groups=num_groups,
eps=eps, output_mean_var=output_mean_var)
| {
"content_hash": "4cb20de89f4473468600ad3eb54c0348",
"timestamp": "",
"source": "github",
"line_count": 1391,
"max_line_length": 112,
"avg_line_length": 41.123652048885695,
"alnum_prop": 0.6195129626068563,
"repo_name": "DickJC123/mxnet",
"id": "d2818304ca4755d97bf784e1a6d85b744cef8b5e",
"size": "57989",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/mxnet/numpy_extension/_op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "151356"
},
{
"name": "C++",
"bytes": "12029257"
},
{
"name": "CMake",
"bytes": "213440"
},
{
"name": "Cuda",
"bytes": "1528224"
},
{
"name": "Cython",
"bytes": "26285"
},
{
"name": "Dockerfile",
"bytes": "54893"
},
{
"name": "Groovy",
"bytes": "132682"
},
{
"name": "Jupyter Notebook",
"bytes": "1889643"
},
{
"name": "Makefile",
"bytes": "8991"
},
{
"name": "PowerShell",
"bytes": "6699"
},
{
"name": "Python",
"bytes": "8615578"
},
{
"name": "Shell",
"bytes": "172547"
}
],
"symlink_target": ""
} |
import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=1, faild_point=100000, path_list=[
[TestAction.create_mini_vm, 'vm1', 'cluster=cluster1'],
[TestAction.destroy_vm, 'vm1'],
[TestAction.recover_vm, 'vm1'],
[TestAction.create_mini_vm, 'vm2', 'cluster=cluster2'],
[TestAction.create_volume, 'volume1', 'cluster=cluster2', 'flag=scsi'],
[TestAction.attach_volume, 'vm2', 'volume1'],
[TestAction.create_volume_backup, 'volume1', 'volume1-backup1'],
[TestAction.create_mini_vm, 'vm3', 'network=random', 'cluster=cluster2'],
[TestAction.resize_volume, 'vm3', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.create_volume, 'volume2', 'size=random', 'cluster=cluster1', 'flag=scsi'],
[TestAction.delete_volume, 'volume2'],
[TestAction.recover_volume, 'volume2'],
[TestAction.add_image, 'image1', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.create_vm_backup, 'vm2', 'vm2-backup2'],
[TestAction.delete_vm_backup, 'vm2-backup2'],
[TestAction.delete_image, 'image1'],
[TestAction.expunge_image, 'image1'],
[TestAction.create_volume, 'volume3', 'cluster=cluster2', 'flag=scsi'],
[TestAction.attach_volume, 'vm3', 'volume3'],
[TestAction.create_volume_backup, 'volume3', 'volume3-backup4'],
[TestAction.destroy_vm, 'vm3'],
[TestAction.recover_vm, 'vm3'],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.start_vm, 'vm2'],
[TestAction.migrate_vm, 'vm2'],
[TestAction.attach_volume, 'vm3', 'volume3'],
[TestAction.create_volume, 'volume4', 'cluster=cluster1', 'flag=thick,scsi'],
[TestAction.stop_vm, 'vm2'],
[TestAction.use_volume_backup, 'volume1-backup1'],
[TestAction.start_vm, 'vm2'],
[TestAction.create_mini_vm, 'vm4', 'network=random', 'cluster=cluster1'],
[TestAction.delete_volume, 'volume1'],
[TestAction.expunge_volume, 'volume1'],
[TestAction.create_mini_vm, 'vm5', 'data_volume=true', 'cluster=cluster1'],
[TestAction.start_vm, 'vm3'],
[TestAction.create_vm_backup, 'vm3', 'vm3-backup5'],
[TestAction.stop_vm, 'vm3'],
[TestAction.resize_data_volume, 'volume4', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.use_volume_backup, 'volume3-backup5'],
])
'''
The final status:
Running:['vm2']
Stopped:['vm1', 'vm3', 'vm4', 'vm5']
Enadbled:['volume1-backup1', 'volume3-backup4', 'vm3-backup5', 'volume3-backup5']
attached:['volume3', 'auto-volume5']
Detached:['volume2', 'volume4']
Deleted:['vm2-backup2', 'volume1-backup2']
Expunged:['volume1', 'image1']
Ha:[]
Group:
vm_backup1:['vm3-backup5', 'volume3-backup5']---vm3@volume3
''' | {
"content_hash": "c8f26e1b88b3d72a19ad5341137d16d1",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 104,
"avg_line_length": 41.96923076923077,
"alnum_prop": 0.6946480938416423,
"repo_name": "zstackio/zstack-woodpecker",
"id": "9901d36e02eb60b6c8af9c961fc00b38aca4c96a",
"size": "2728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/mini/multiclusters/paths/multi_path274.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
} |
__author__ = 'Daan Wierstra and Tom Schaul'
from scipy import size, zeros, ndarray, array
from numpy.random import randn
from pybrain.structure.evolvables.evolvable import Evolvable
class ParameterContainer(Evolvable):
""" A common interface implemented by all classes which
contains data that can change during execution (i.e. trainable parameters)
and should be losslessly storable and retrievable to files. """
# standard deviation for random values, and for mutation
stdParams = 1.
mutationStd = 0.1
# if this variable is set, then only the owner can set the params or the derivs of the container
owner = None
# a flag that enables storage of derivatives
hasDerivatives = False
def __init__(self, paramdim = 0, **args):
""" initialize all parameters with random values, normally distributed around 0
:key stdParams: standard deviation of the values (default: 1).
"""
self.setArgs(**args)
self.paramdim = paramdim
if paramdim > 0:
self._params = zeros(self.paramdim)
# enable derivatives if it is a instance of Module or Connection
# CHECKME: the import can not be global?
from pybrain.structure.modules.module import Module
from pybrain.structure.connections.connection import Connection
if isinstance(self, Module) or isinstance(self, Connection):
self.hasDerivatives = True
if self.hasDerivatives:
self._derivs = zeros(self.paramdim)
self.randomize()
@property
def params(self):
""" @rtype: an array of numbers. """
return self._params
def __len__(self):
return self.paramdim
def _setParameters(self, p, owner = None):
""" :key p: an array of numbers """
if isinstance(p, list):
p = array(p)
assert isinstance(p, ndarray)
if self.owner == self:
# the object owns it parameter array, which means it cannot be set,
# only updated with new values.
self._params[:] = p
elif self.owner != owner:
raise Exception("Parameter ownership mismatch: cannot set to new array.")
else:
self._params = p
self.paramdim = size(self.params)
@property
def derivs(self):
""" :rtype: an array of numbers. """
return self._derivs
def _setDerivatives(self, d, owner = None):
""" :key d: an array of numbers of self.paramdim """
assert self.owner == owner
assert size(d) == self.paramdim
self._derivs = d
def resetDerivatives(self):
""" :note: this method only sets the values to zero, it does not initialize the array. """
assert self.hasDerivatives
self._derivs *= 0
def randomize(self):
self._params[:] = randn(self.paramdim)*self.stdParams
if self.hasDerivatives:
self.resetDerivatives()
def mutate(self):
self._params += randn(self.paramdim)*self.mutationStd
| {
"content_hash": "7a69ec645dda9c077af3122e8b4c9804",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 100,
"avg_line_length": 35.72222222222222,
"alnum_prop": 0.5968895800933126,
"repo_name": "rbalda/neural_ocr",
"id": "13538c1e5a4a93564bd5d09e2eea5170a3abd8ca",
"size": "3215",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/pybrain/structure/parametercontainer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "497604"
},
{
"name": "C++",
"bytes": "3309990"
},
{
"name": "CSS",
"bytes": "135235"
},
{
"name": "FORTRAN",
"bytes": "10375"
},
{
"name": "HTML",
"bytes": "215390"
},
{
"name": "JavaScript",
"bytes": "206780"
},
{
"name": "Jupyter Notebook",
"bytes": "16254"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "26980034"
},
{
"name": "Shell",
"bytes": "3895"
}
],
"symlink_target": ""
} |
"""
Main module from which the application is started and the web interface mounted
To start the application directly using the python web server, you can just do
::
python web.py
Refer to server installation documentation for more details how to deploy in production.
"""
from octopus.core import app, initialise, add_configuration
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true", help="pycharm debug support enable")
parser.add_argument("-c", "--config", help="additional configuration to load (e.g. for testing)")
args = parser.parse_args()
if args.config:
add_configuration(app, args.config)
pycharm_debug = app.config.get('DEBUG_PYCHARM', False)
if args.debug:
pycharm_debug = True
if pycharm_debug:
app.config['DEBUG'] = False
import pydevd
pydevd.settrace(app.config.get('DEBUG_SERVER_HOST', 'localhost'), port=app.config.get('DEBUG_SERVER_PORT', 51234), stdoutToServer=True, stderrToServer=True)
print "STARTED IN REMOTE DEBUG MODE"
initialise()
# most of the imports should be done here, after initialise()
from service.view.oaipmh import blueprint as oai
app.register_blueprint(oai)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=app.config['DEBUG'], port=app.config['PORT'], threaded=False)
| {
"content_hash": "ec9b3274be214d1351ecbd82c289a88b",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 164,
"avg_line_length": 32.74418604651163,
"alnum_prop": 0.6910511363636364,
"repo_name": "JiscPER/jper-oaipmh",
"id": "fe3a319fb8b805172465d6fa5c0b1525d9ff1f01",
"size": "1408",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "service/web.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "232"
},
{
"name": "JavaScript",
"bytes": "1199"
},
{
"name": "Python",
"bytes": "62060"
},
{
"name": "Shell",
"bytes": "765"
}
],
"symlink_target": ""
} |
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.api.v2 import attributes
from neutron.db import db_base_plugin_v2
from neutron.db import model_base
from neutron.db import models_v2
from neutron.db import portbindings_base
from neutron.extensions import portbindings
class PortBindingPort(model_base.BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False)
port = orm.relationship(
models_v2.Port,
backref=orm.backref("portbinding",
lazy='joined', uselist=False,
cascade='delete'))
class PortBindingMixin(portbindings_base.PortBindingBaseMixin):
extra_binding_dict = None
def _port_model_hook(self, context, original_model, query):
query = query.outerjoin(PortBindingPort,
(original_model.id ==
PortBindingPort.port_id))
return query
def _port_result_filter_hook(self, query, filters):
values = filters and filters.get(portbindings.HOST_ID, [])
if not values:
return query
if len(values) == 1:
query = query.filter(PortBindingPort.host == values[0])
else:
query = query.filter(PortBindingPort.host.in_(values))
return query
db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook(
models_v2.Port,
"portbindings_port",
'_port_model_hook',
None,
'_port_result_filter_hook')
def _process_portbindings_create_and_update(self, context, port_data,
port):
binding_profile = port.get(portbindings.PROFILE)
binding_profile_set = attributes.is_attr_set(binding_profile)
if not binding_profile_set and binding_profile is not None:
del port[portbindings.PROFILE]
binding_vnic = port.get(portbindings.VNIC_TYPE)
binding_vnic_set = attributes.is_attr_set(binding_vnic)
if not binding_vnic_set and binding_vnic is not None:
del port[portbindings.VNIC_TYPE]
# REVISIT(irenab) Add support for vnic_type for plugins that
# can handle more than one type.
# Currently implemented for ML2 plugin that does not use
# PortBindingMixin.
host = port_data.get(portbindings.HOST_ID)
host_set = attributes.is_attr_set(host)
with context.session.begin(subtransactions=True):
bind_port = context.session.query(
PortBindingPort).filter_by(port_id=port['id']).first()
if host_set:
if not bind_port:
context.session.add(PortBindingPort(port_id=port['id'],
host=host))
else:
bind_port.host = host
else:
host = (bind_port and bind_port.host or None)
self._extend_port_dict_binding_host(port, host)
def get_port_host(self, context, port_id):
with context.session.begin(subtransactions=True):
bind_port = context.session.query(
PortBindingPort).filter_by(port_id=port_id).first()
return bind_port and bind_port.host or None
def _extend_port_dict_binding_host(self, port_res, host):
super(PortBindingMixin, self).extend_port_dict_binding(
port_res, None)
port_res[portbindings.HOST_ID] = host
def extend_port_dict_binding(self, port_res, port_db):
host = (port_db.portbinding and port_db.portbinding.host or None)
self._extend_port_dict_binding_host(port_res, host)
def _extend_port_dict_binding(plugin, port_res, port_db):
if not isinstance(plugin, PortBindingMixin):
return
plugin.extend_port_dict_binding(port_res, port_db)
# Register dict extend functions for ports
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.PORTS, [_extend_port_dict_binding])
| {
"content_hash": "4a45ad0e9aff8cc97ab9de0610da2d56",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 75,
"avg_line_length": 39.39047619047619,
"alnum_prop": 0.6160541586073501,
"repo_name": "CingHu/neutron-ustack",
"id": "1f94f83974b796d216ffabec91c91c14466a353d",
"size": "4800",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "neutron/db/portbindings_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1109"
},
{
"name": "Python",
"bytes": "11544804"
},
{
"name": "Shell",
"bytes": "29485"
}
],
"symlink_target": ""
} |
import datetime
import re
import time
import urllib
from typing import Any, Dict, List, Optional, Sequence
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
import orjson
from django.conf import settings
from django.contrib.auth.views import PasswordResetConfirmView
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.test import Client, override_settings
from django.urls import reverse
from django.utils.timezone import now as timezone_now
from confirmation import settings as confirmation_settings
from confirmation.models import (
Confirmation,
ConfirmationKeyException,
MultiuseInvite,
confirmation_url,
create_confirmation_link,
generate_key,
get_object_from_key,
one_click_unsubscribe_link,
)
from corporate.lib.stripe import get_latest_seat_count
from zerver.context_processors import common_context
from zerver.decorator import do_two_factor_login
from zerver.forms import HomepageForm, check_subdomain_available
from zerver.lib.actions import (
add_new_user_history,
change_user_is_active,
do_add_default_stream,
do_change_full_name,
do_change_realm_subdomain,
do_change_user_role,
do_create_default_stream_group,
do_create_realm,
do_create_user,
do_deactivate_realm,
do_deactivate_user,
do_get_user_invites,
do_invite_users,
do_set_realm_property,
get_default_streams_for_realm,
get_stream,
)
from zerver.lib.email_notifications import enqueue_welcome_emails, followup_day2_email_delay
from zerver.lib.initial_password import initial_password
from zerver.lib.mobile_auth_otp import (
ascii_to_hex,
hex_to_ascii,
is_valid_otp,
otp_decrypt_api_key,
otp_encrypt_api_key,
xor_hex_strings,
)
from zerver.lib.name_restrictions import is_disposable_domain
from zerver.lib.rate_limiter import add_ratelimit_rule, remove_ratelimit_rule
from zerver.lib.send_email import (
EmailNotDeliveredException,
FromAddress,
deliver_scheduled_emails,
send_future_email,
)
from zerver.lib.stream_subscription import get_stream_subscriptions_for_user
from zerver.lib.streams import create_stream_if_needed
from zerver.lib.subdomains import is_root_domain_available
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import (
avatar_disk_path,
cache_tries_captured,
find_key_by_email,
get_test_image_file,
load_subdomain_token,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
DefaultStream,
Message,
PreregistrationUser,
Realm,
RealmAuditLog,
Recipient,
ScheduledEmail,
Stream,
Subscription,
UserMessage,
UserProfile,
flush_per_request_caches,
get_realm,
get_system_bot,
get_user,
get_user_by_delivery_email,
)
from zerver.views.auth import redirect_and_log_into_subdomain, start_two_factor_auth
from zerver.views.development.registration import confirmation_key
from zerver.views.invite import get_invitee_emails_set
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
class RedirectAndLogIntoSubdomainTestCase(ZulipTestCase):
def test_data(self) -> None:
realm = get_realm("zulip")
user_profile = self.example_user("hamlet")
name = user_profile.full_name
email = user_profile.delivery_email
response = redirect_and_log_into_subdomain(ExternalAuthResult(user_profile=user_profile))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{"full_name": name, "email": email, "subdomain": realm.subdomain, "is_signup": False},
)
data_dict = ExternalAuthDataDict(is_signup=True, multiuse_object_key="key")
response = redirect_and_log_into_subdomain(
ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
)
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": name,
"email": email,
"subdomain": realm.subdomain,
# the email has an account at the subdomain,
# so is_signup get overridden to False:
"is_signup": False,
"multiuse_object_key": "key",
},
)
data_dict = ExternalAuthDataDict(
email=self.nonreg_email("alice"),
full_name="Alice",
subdomain=realm.subdomain,
is_signup=True,
full_name_validated=True,
multiuse_object_key="key",
)
response = redirect_and_log_into_subdomain(ExternalAuthResult(data_dict=data_dict))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": "Alice",
"email": self.nonreg_email("alice"),
"full_name_validated": True,
"subdomain": realm.subdomain,
"is_signup": True,
"multiuse_object_key": "key",
},
)
class DeactivationNoticeTestCase(ZulipTestCase):
def test_redirection_for_deactivated_realm(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 302)
self.assertIn("deactivated", result.url)
def test_redirection_for_active_realm(self) -> None:
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
def test_deactivation_notice_when_realm_is_active(self) -> None:
result = self.client_get("/accounts/deactivated/")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result.url)
def test_deactivation_notice_when_deactivated(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_get("/accounts/deactivated/")
self.assertIn("Zulip Dev, has been deactivated.", result.content.decode())
self.assertNotIn("It has moved to", result.content.decode())
def test_deactivation_notice_when_deactivated_and_deactivated_redirect_is_set(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.deactivated_redirect = "http://example.zulipchat.com"
realm.save(update_fields=["deactivated", "deactivated_redirect"])
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://example.zulipchat.com">http://example.zulipchat.com</a>.',
result.content.decode(),
)
def test_deactivation_notice_when_realm_subdomain_is_changed(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-subdomain-name", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-subdomain-name.testserver">http://new-subdomain-name.testserver</a>.',
result.content.decode(),
)
def test_deactivated_redirect_field_of_placeholder_realms_are_modified_on_changing_subdomain_multiple_times(
self,
) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name-1", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-1.testserver">http://new-name-1.testserver</a>.',
result.content.decode(),
)
realm = get_realm("new-name-1")
do_change_realm_subdomain(realm, "new-name-2", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-2.testserver">http://new-name-2.testserver</a>.',
result.content.decode(),
)
class AddNewUserHistoryTest(ZulipTestCase):
def test_add_new_user_history_race(self) -> None:
"""Sends a message during user creation"""
# Create a user who hasn't had historical messages added
realm = get_realm("zulip")
stream = Stream.objects.get(realm=realm, name="Denmark")
DefaultStream.objects.create(stream=stream, realm=realm)
# Make sure at least 3 messages are sent to Denmark and it's a default stream.
message_id = self.send_stream_message(self.example_user("hamlet"), stream.name, "test 1")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 2")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 3")
with patch("zerver.lib.actions.add_new_user_history"):
self.register(self.nonreg_email("test"), "test")
user_profile = self.nonreg_user("test")
subs = Subscription.objects.select_related("recipient").filter(
user_profile=user_profile, recipient__type=Recipient.STREAM
)
streams = Stream.objects.filter(id__in=[sub.recipient.type_id for sub in subs])
# Sent a message afterwards to trigger a race between message
# sending and `add_new_user_history`.
race_message_id = self.send_stream_message(
self.example_user("hamlet"), streams[0].name, "test"
)
# Overwrite ONBOARDING_UNREAD_MESSAGES to 2
ONBOARDING_UNREAD_MESSAGES = 2
with patch("zerver.lib.actions.ONBOARDING_UNREAD_MESSAGES", ONBOARDING_UNREAD_MESSAGES):
add_new_user_history(user_profile, streams)
# Our first message is in the user's history
self.assertTrue(
UserMessage.objects.filter(user_profile=user_profile, message_id=message_id).exists()
)
# The race message is in the user's history and marked unread.
self.assertTrue(
UserMessage.objects.filter(
user_profile=user_profile, message_id=race_message_id
).exists()
)
self.assertFalse(
UserMessage.objects.get(
user_profile=user_profile, message_id=race_message_id
).flags.read.is_set
)
# Verify that the ONBOARDING_UNREAD_MESSAGES latest messages
# that weren't the race message are marked as unread.
latest_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[0:ONBOARDING_UNREAD_MESSAGES]
)
self.assert_length(latest_messages, 2)
for msg in latest_messages:
self.assertFalse(msg.flags.read.is_set)
# Verify that older messages are correctly marked as read.
older_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[ONBOARDING_UNREAD_MESSAGES : ONBOARDING_UNREAD_MESSAGES + 1]
)
self.assertGreater(len(older_messages), 0)
for msg in older_messages:
self.assertTrue(msg.flags.read.is_set)
def test_auto_subbed_to_personals(self) -> None:
"""
Newly created users are auto-subbed to the ability to receive
personals.
"""
test_email = self.nonreg_email("test")
self.register(test_email, "test")
user_profile = self.nonreg_user("test")
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with patch("zerver.models.get_display_recipient", return_value="recip"):
self.assertEqual(
str(message),
"<Message: recip / / "
"<UserProfile: {} {}>>".format(user_profile.email, user_profile.realm),
)
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
f"<UserMessage: recip / {user_profile.email} ([])>",
)
class InitialPasswordTest(ZulipTestCase):
def test_none_initial_password_salt(self) -> None:
with self.settings(INITIAL_PASSWORD_SALT=None):
self.assertIsNone(initial_password("test@test.com"))
class PasswordResetTest(ZulipTestCase):
"""
Log in, reset password, log out, log in with new password.
"""
def get_reset_mail_body(self, subdomain: str = "zulip") -> str:
from django.core.mail import outbox
[message] = outbox
self.assertEqual(self.email_envelope_from(message), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(message),
fr"^Zulip Account Security <{self.TOKENIZED_NOREPLY_REGEX}>\Z",
)
self.assertIn(f"{subdomain}.testserver", message.extra_headers["List-Id"])
return message.body
def test_password_reset(self) -> None:
user = self.example_user("hamlet")
email = user.delivery_email
old_password = initial_password(email)
assert old_password is not None
self.login_user(user)
# test password reset template
result = self.client_get("/accounts/password/reset/")
self.assert_in_response("Reset your password", result)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
# Visit the password reset link.
password_reset_url = self.get_confirmation_url_from_outbox(
email, url_pattern=settings.EXTERNAL_HOST + r"(\S\S+)"
)
result = self.client_get(password_reset_url)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.endswith(f"/{PasswordResetConfirmView.reset_url_token}/"))
final_reset_url = result.url
result = self.client_get(final_reset_url)
self.assertEqual(result.status_code, 200)
# Reset your password
with self.settings(PASSWORD_MIN_LENGTH=3, PASSWORD_MIN_GUESSES=1000):
# Verify weak passwords don't work.
result = self.client_post(
final_reset_url, {"new_password1": "easy", "new_password2": "easy"}
)
self.assert_in_response("The password is too weak.", result)
result = self.client_post(
final_reset_url, {"new_password1": "f657gdGGk9", "new_password2": "f657gdGGk9"}
)
# password reset succeeded
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/password/done/"))
# log back in with new password
self.login_by_email(email, password="f657gdGGk9")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
# make sure old password no longer works
self.assert_login_failure(email, password=old_password)
def test_password_reset_for_non_existent_user(self) -> None:
email = "nonexisting@mars.com"
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_password_reset_for_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_user(user_profile, acting_user=None)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("has been deactivated", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("not have an account", body)
def test_password_reset_with_deactivated_realm(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_realm(user_profile.realm, acting_user=None)
# start the password reset process by supplying an email address
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(m.output, ["INFO:root:Realm is deactivated"])
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(RATE_LIMITING=True)
def test_rate_limiting(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
from django.core.mail import outbox
add_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 2)
# Too many password reset emails sent to the address, we won't send more.
with self.assertLogs(level="INFO") as info_logs:
self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
info_logs.output,
["INFO:root:Too many password reset attempts for email hamlet@zulip.com"],
)
self.assert_length(outbox, 2)
# Resetting for a different address works though.
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 3)
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 4)
# After time, password reset emails can be sent again.
with patch("time.time", return_value=start_time + 11):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 6)
remove_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
def test_wrong_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email}, subdomain="zephyr")
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
body = self.get_reset_mail_body("zephyr")
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn(
"active accounts in the following organization(s).\nhttp://zulip.testserver", body
)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_invalid_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post(
"/accounts/password/reset/", {"email": email}, subdomain="invalid"
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auth_only(self) -> None:
"""If the email auth backend is not enabled, password reset should do nothing"""
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output,
[
"INFO:root:Password reset attempted for hamlet@zulip.com even though password auth is disabled."
],
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
from django.core.mail import outbox
self.assert_length(outbox, 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_and_email_auth(self) -> None:
"""If both email and LDAP auth backends are enabled, limit password
reset to users outside the LDAP domain"""
# If the domain matches, we don't generate an email
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output, ["INFO:root:Password reset not allowed for user in LDAP domain"]
)
from django.core.mail import outbox
self.assert_length(outbox, 0)
# If the domain doesn't match, we do generate an email
with self.settings(LDAP_APPEND_DOMAIN="example.com"):
email = self.example_email("hamlet")
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
def test_redirect_endpoints(self) -> None:
"""
These tests are mostly designed to give us 100% URL coverage
in our URL coverage reports. Our mechanism for finding URL
coverage doesn't handle redirects, so we just have a few quick
tests here.
"""
result = self.client_get("/accounts/password/reset/done/")
self.assert_in_success_response(["Check your email"], result)
result = self.client_get("/accounts/password/done/")
self.assert_in_success_response(["We've reset your password!"], result)
result = self.client_get("/accounts/send_confirm/alice@example.com")
self.assert_in_success_response(["/accounts/home/"], result)
result = self.client_get("/accounts/new/send_confirm/alice@example.com")
self.assert_in_success_response(["/new/"], result)
class LoginTest(ZulipTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self) -> None:
self.login("hamlet")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
def test_login_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
do_deactivate_user(user_profile, acting_user=None)
result = self.login_with_return(self.example_email("hamlet"), "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Your account is no longer active.", result)
self.assert_logged_in_user_id(None)
def test_login_bad_password(self) -> None:
user = self.example_user("hamlet")
password: Optional[str] = "wrongpassword"
result = self.login_with_return(user.delivery_email, password=password)
self.assert_in_success_response([user.delivery_email], result)
self.assert_logged_in_user_id(None)
# Parallel test to confirm that the right password works using the
# same login code, which verifies our failing test isn't broken
# for some other reason.
password = initial_password(user.delivery_email)
result = self.login_with_return(user.delivery_email, password=password)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user.id)
@override_settings(RATE_LIMITING_AUTHENTICATE=True)
def test_login_bad_password_rate_limiter(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
add_ratelimit_rule(10, 2, domain="authenticate_by_username")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
# We're over the allowed limit, so the next attempt, even with the correct
# password, will get blocked.
result = self.login_with_return(email)
self.assert_in_success_response(["Try again in 10 seconds"], result)
# After time passes, we should be able to log in.
with patch("time.time", return_value=start_time + 11):
self.login_with_return(email)
self.assert_logged_in_user_id(user_profile.id)
remove_ratelimit_rule(10, 2, domain="authenticate_by_username")
def test_login_with_old_weak_password_after_hasher_change(self) -> None:
user_profile = self.example_user("hamlet")
password = "a_password_of_22_chars"
with self.settings(PASSWORD_HASHERS=("django.contrib.auth.hashers.SHA1PasswordHasher",)):
user_profile.set_password(password)
user_profile.save()
with self.settings(
PASSWORD_HASHERS=(
"django.contrib.auth.hashers.MD5PasswordHasher",
"django.contrib.auth.hashers.SHA1PasswordHasher",
),
PASSWORD_MIN_LENGTH=30,
), self.assertLogs("zulip.auth.email", level="INFO"):
result = self.login_with_return(self.example_email("hamlet"), password)
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Your password has been disabled because it is too weak.", result
)
self.assert_logged_in_user_id(None)
def test_login_nonexist_user(self) -> None:
result = self.login_with_return("xxx@zulip.com", "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please enter a correct email and password", result)
self.assert_logged_in_user_id(None)
def test_login_wrong_subdomain(self) -> None:
with self.assertLogs(level="WARNING") as m:
result = self.login_with_return(self.mit_email("sipbtest"), "xxx")
self.assertEqual(
m.output,
[
"WARNING:root:User sipbtest@mit.edu attempted password login to wrong subdomain zulip"
],
)
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Your Zulip account is not a member of the "
"organization associated with this subdomain.",
result,
)
self.assert_logged_in_user_id(None)
def test_login_invalid_subdomain(self) -> None:
result = self.login_with_return(self.example_email("hamlet"), "xxx", subdomain="invalid")
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
self.assert_logged_in_user_id(None)
def test_register(self) -> None:
reset_emails_in_zulip_realm()
realm = get_realm("zulip")
stream_names = [f"stream_{i}" for i in range(40)]
for stream_name in stream_names:
stream = self.make_stream(stream_name, realm=realm)
DefaultStream.objects.create(stream=stream, realm=realm)
# Clear all the caches.
flush_per_request_caches()
ContentType.objects.clear_cache()
with queries_captured() as queries, cache_tries_captured() as cache_tries:
self.register(self.nonreg_email("test"), "test")
# Ensure the number of queries we make is not O(streams)
self.assert_length(queries, 88)
# We can probably avoid a couple cache hits here, but there doesn't
# seem to be any O(N) behavior. Some of the cache hits are related
# to sending messages, such as getting the welcome bot, looking up
# the alert words for a realm, etc.
self.assert_length(cache_tries, 20)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.assertFalse(user_profile.enable_stream_desktop_notifications)
def test_register_deactivated(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_post(
"/accounts/home/", {"email": self.nonreg_email("test")}, subdomain="zulip"
)
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_register_with_invalid_email(self) -> None:
"""
If you try to register with invalid email, you get an invalid email
page
"""
invalid_email = "foo\x00bar"
result = self.client_post("/accounts/home/", {"email": invalid_email}, subdomain="zulip")
self.assertEqual(result.status_code, 200)
self.assertContains(result, "Enter a valid email address")
def test_register_deactivated_partway_through(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
email = self.nonreg_email("test")
result = self.client_post("/accounts/home/", {"email": email}, subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertNotIn("deactivated", result.url)
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.submit_reg_form_for_user(email, "abcd1234", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_login_deactivated_realm(self) -> None:
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login_with_return(self.example_email("hamlet"), subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
def test_logout(self) -> None:
self.login("hamlet")
# We use the logout API, not self.logout, to make sure we test
# the actual logout code path.
self.client_post("/accounts/logout/")
self.assert_logged_in_user_id(None)
def test_non_ascii_login(self) -> None:
"""
You can log in even if your password contain non-ASCII characters.
"""
email = self.nonreg_email("test")
password = "hümbüǵ"
# Registering succeeds.
self.register(email, password)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.logout()
self.assert_logged_in_user_id(None)
# Logging in succeeds.
self.logout()
self.login_by_email(email, password)
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=False)
def test_login_page_redirects_logged_in_user(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
self.login("cordelia")
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_options_request_to_login_page(self) -> None:
response = self.client_options("/login/")
self.assertEqual(response.status_code, 200)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True)
def test_login_page_redirects_logged_in_user_under_2fa(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
user_profile = self.example_user("cordelia")
self.create_default_device(user_profile)
self.login("cordelia")
self.login_2fa(user_profile)
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_start_two_factor_auth(self) -> None:
request = MagicMock(POST={})
with patch("zerver.views.auth.TwoFactorLoginView") as mock_view:
mock_view.as_view.return_value = lambda *a, **k: HttpResponse()
response = start_two_factor_auth(request)
self.assertTrue(isinstance(response, HttpResponse))
def test_do_two_factor_login(self) -> None:
user_profile = self.example_user("hamlet")
self.create_default_device(user_profile)
request = MagicMock()
with patch("zerver.decorator.django_otp.login") as mock_login:
do_two_factor_login(request, user_profile)
mock_login.assert_called_once()
def test_zulip_default_context_does_not_load_inline_previews(self) -> None:
realm = get_realm("zulip")
description = "https://www.google.com/images/srpr/logo4w.png"
realm.description = description
realm.save(update_fields=["description"])
response = self.client_get("/login/")
expected_response = """<p><a href="https://www.google.com/images/srpr/logo4w.png">\
https://www.google.com/images/srpr/logo4w.png</a></p>"""
self.assertEqual(response.context_data["realm_description"], expected_response)
self.assertEqual(response.status_code, 200)
class InviteUserBase(ZulipTestCase):
def check_sent_emails(self, correct_recipients: List[str]) -> None:
from django.core.mail import outbox
self.assert_length(outbox, len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertEqual(sorted(email_recipients), sorted(correct_recipients))
if len(outbox) == 0:
return
self.assertIn("Zulip", self.email_display_from(outbox[0]))
self.assertEqual(self.email_envelope_from(outbox[0]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(outbox[0]), fr" <{self.TOKENIZED_NOREPLY_REGEX}>\Z"
)
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def invite(
self,
invitee_emails: str,
stream_names: Sequence[str],
body: str = "",
invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
) -> HttpResponse:
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
stream_ids = []
for stream_name in stream_names:
stream_ids.append(self.get_stream_id(stream_name))
return self.client_post(
"/json/invites",
{
"invitee_emails": invitee_emails,
"stream_ids": orjson.dumps(stream_ids).decode(),
"invite_as": invite_as,
},
)
class InviteUserTest(InviteUserBase):
def test_successful_invite_user(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
def test_newbie_restrictions(self) -> None:
user_profile = self.example_user("hamlet")
invitee = "alice-test@zulip.com"
stream_name = "Denmark"
self.login_user(user_profile)
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
user_profile.date_joined = timezone_now() - datetime.timedelta(days=10)
user_profile.save()
with self.settings(INVITES_MIN_USER_AGE_DAYS=5):
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
with self.settings(INVITES_MIN_USER_AGE_DAYS=15):
result = self.invite(invitee, [stream_name])
self.assert_json_error_contains(result, "Your account is too new")
def test_invite_limits(self) -> None:
user_profile = self.example_user("hamlet")
realm = user_profile.realm
stream_name = "Denmark"
# These constants only need to be in descending order
# for this test to trigger an InvitationError based
# on max daily counts.
site_max = 50
realm_max = 40
num_invitees = 30
max_daily_count = 20
daily_counts = [(1, max_daily_count)]
invite_emails = [f"foo-{i:02}@zulip.com" for i in range(num_invitees)]
invitees = ",".join(invite_emails)
self.login_user(user_profile)
realm.max_invites = realm_max
realm.date_created = timezone_now()
realm.save()
def try_invite() -> HttpResponse:
with self.settings(
OPEN_REALM_CREATION=True,
INVITES_DEFAULT_REALM_DAILY_MAX=site_max,
INVITES_NEW_REALM_LIMIT_DAYS=daily_counts,
):
result = self.invite(invitees, [stream_name])
return result
result = try_invite()
self.assert_json_error_contains(result, "enough remaining invites")
# Next show that aggregate limits expire once the realm is old
# enough.
realm.date_created = timezone_now() - datetime.timedelta(days=8)
realm.save()
with queries_captured() as queries:
with cache_tries_captured() as cache_tries:
result = try_invite()
self.assert_json_success(result)
# TODO: Fix large query count here.
#
# TODO: There is some test OTHER than this one
# that is leaking some kind of state change
# that throws off the query count here. It
# is hard to investigate currently (due to
# the large number of queries), so I just
# use an approximate equality check.
actual_count = len(queries)
expected_count = 251
if abs(actual_count - expected_count) > 1:
raise AssertionError(
f"""
Unexpected number of queries:
expected query count: {expected_count}
actual: {actual_count}
"""
)
# Almost all of these cache hits are to re-fetch each one of the
# invitees. These happen inside our queue processor for sending
# confirmation emails, so they are somewhat difficult to avoid.
#
# TODO: Mock the call to queue_json_publish, so we can measure the
# queue impact separately from the user-perceived impact.
self.assert_length(cache_tries, 32)
# Next get line coverage on bumping a realm's max_invites.
realm.date_created = timezone_now()
realm.max_invites = site_max + 10
realm.save()
result = try_invite()
self.assert_json_success(result)
# Finally get coverage on the case that OPEN_REALM_CREATION is False.
with self.settings(OPEN_REALM_CREATION=False):
result = self.invite(invitees, [stream_name])
self.assert_json_success(result)
def test_invite_user_to_realm_on_manual_license_plan(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
_, ledger = self.subscribe_realm_to_monthly_plan_on_manual_license_management(
user.realm, 50, 50
)
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("alice"), ["Denmark"])
self.assert_json_success(result)
ledger.licenses_at_next_renewal = 5
ledger.save(update_fields=["licenses_at_next_renewal"])
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("bob"), ["Denmark"])
self.assert_json_success(result)
ledger.licenses = get_latest_seat_count(user.realm) + 1
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
invitee_emails = self.nonreg_email("bob") + "," + self.nonreg_email("alice")
result = self.invite(invitee_emails, ["Denmark"])
self.assert_json_error_contains(
result, "Your organization does not have enough unused Zulip licenses to invite 2 users"
)
ledger.licenses = get_latest_seat_count(user.realm)
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
result = self.invite(self.nonreg_email("bob"), ["Denmark"])
self.assert_json_error_contains(
result, "All Zulip licenses for this organization are currently in use"
)
def test_cross_realm_bot(self) -> None:
inviter = self.example_user("hamlet")
self.login_user(inviter)
cross_realm_bot_email = "emailgateway@zulip.com"
legit_new_email = "fred@zulip.com"
invitee_emails = ",".join([cross_realm_bot_email, legit_new_email])
result = self.invite(invitee_emails, ["Denmark"])
self.assert_json_error(
result,
"Some of those addresses are already using Zulip,"
+ " so we didn't send them an invitation."
+ " We did send invitations to everyone else!",
)
def test_invite_mirror_dummy_user(self) -> None:
"""
A mirror dummy account is a temporary account
that we keep in our system if we are mirroring
data from something like Zephyr or IRC.
We want users to eventually just sign up or
register for Zulip, in which case we will just
fully "activate" the account.
Here we test that you can invite a person who
has a mirror dummy account.
"""
inviter = self.example_user("hamlet")
self.login_user(inviter)
mirror_user = self.example_user("cordelia")
mirror_user.is_mirror_dummy = True
mirror_user.save()
change_user_is_active(mirror_user, False)
self.assertEqual(
PreregistrationUser.objects.filter(email=mirror_user.email).count(),
0,
)
result = self.invite(mirror_user.email, ["Denmark"])
self.assert_json_success(result)
prereg_user = PreregistrationUser.objects.get(email=mirror_user.email)
self.assertEqual(
prereg_user.referred_by.email,
inviter.email,
)
def test_successful_invite_user_as_owner_from_owner_account(self) -> None:
self.login("desdemona")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_owner_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_error(response, "Must be an organization owner")
def test_successful_invite_user_as_admin_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_admin)
self.assertFalse(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_admin_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_successful_invite_user_as_moderator_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_moderator)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_moderator_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_invite_user_as_moderator_from_moderator_account(self) -> None:
self.login("shiva")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["MODERATOR"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_invite_user_as_invalid_type(self) -> None:
"""
Test inviting a user as invalid type of user i.e. type of invite_as
is not in PreregistrationUser.INVITE_AS
"""
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(invitee, ["Denmark"], invite_as=10)
self.assert_json_error(response, "Must be invited as an valid type of user")
def test_successful_invite_user_as_guest_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_as_guest_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_with_name(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "alice-test@zulip.com"
invitee = f"Alice Test <{email}>"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.check_sent_emails([email])
def test_successful_invite_user_with_name_and_normal_one(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_can_invite_others_to_realm(self) -> None:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_invite_others_to_realm()
self.check_has_permission_policies("invite_to_realm_policy", validation_func)
def test_invite_others_to_realm_setting(self) -> None:
"""
The invite_to_realm_policy realm setting works properly.
"""
realm = get_realm("zulip")
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
)
self.login("shiva")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
# Now verify an administrator can do it
self.login("iago")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
from django.core import mail
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
)
self.login("hamlet")
email = "carol-test@zulip.com"
email2 = "earl-test@zulip.com"
invitee = f"Carol Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
self.login("shiva")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
)
self.login("polonius")
email = "dave-test@zulip.com"
email2 = "mark-test@zulip.com"
invitee = f"Dave Test <{email}>, {email2}"
self.assert_json_error(self.invite(invitee, ["Denmark"]), "Not allowed for guest users")
self.login("hamlet")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
mail.outbox = []
do_set_realm_property(
realm, "invite_to_realm_policy", Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
)
do_set_realm_property(realm, "waiting_period_threshold", 1000, acting_user=None)
hamlet = self.example_user("hamlet")
hamlet.date_joined = timezone_now() - datetime.timedelta(
days=(realm.waiting_period_threshold - 1)
)
email = "issac-test@zulip.com"
email2 = "steven-test@zulip.com"
invitee = f"Issac Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]),
"Insufficient permission",
)
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_invite_user_signup_initial_history(self) -> None:
"""
Test that a new user invited to a stream receives some initial
history but only from public streams.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
private_stream_name = "Secret"
self.make_stream(private_stream_name, invite_only=True)
self.subscribe(user_profile, private_stream_name)
public_msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="Public topic",
content="Public message",
)
secret_msg_id = self.send_stream_message(
self.example_user("hamlet"),
private_stream_name,
topic_name="Secret topic",
content="Secret message",
)
invitee = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee, [private_stream_name, "Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
invitee_msg_ids = [
um.message_id for um in UserMessage.objects.filter(user_profile=invitee_profile)
]
self.assertTrue(public_msg_id in invitee_msg_ids)
self.assertFalse(secret_msg_id in invitee_msg_ids)
self.assertFalse(invitee_profile.is_realm_admin)
invitee_msg, signups_stream_msg, inviter_msg, secret_msg = Message.objects.all().order_by(
"-id"
)[0:4]
self.assertEqual(secret_msg.id, secret_msg_id)
self.assertEqual(inviter_msg.sender.email, "notification-bot@zulip.com")
self.assertTrue(
inviter_msg.content.startswith(
f"alice_zulip.com <`{invitee_profile.email}`> accepted your",
)
)
self.assertEqual(signups_stream_msg.sender.email, "notification-bot@zulip.com")
self.assertTrue(
signups_stream_msg.content.startswith(
f"@_**alice_zulip.com|{invitee_profile.id}** just signed up",
)
)
self.assertEqual(invitee_msg.sender.email, "welcome-bot@zulip.com")
self.assertTrue(invitee_msg.content.startswith("Hello, and welcome to Zulip!"))
def test_multi_user_invite(self) -> None:
"""
Invites multiple users with a variety of delimiters.
"""
self.login("hamlet")
# Intentionally use a weird string.
self.assert_json_success(
self.invite(
"""bob-test@zulip.com, carol-test@zulip.com,
dave-test@zulip.com
earl-test@zulip.com""",
["Denmark"],
)
)
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email(f"{user}-test@zulip.com"))
self.check_sent_emails(
[
"bob-test@zulip.com",
"carol-test@zulip.com",
"dave-test@zulip.com",
"earl-test@zulip.com",
]
)
def test_max_invites_model(self) -> None:
realm = get_realm("zulip")
self.assertEqual(realm.max_invites, settings.INVITES_DEFAULT_REALM_DAILY_MAX)
realm.max_invites = 3
realm.save()
self.assertEqual(get_realm("zulip").max_invites, 3)
realm.max_invites = settings.INVITES_DEFAULT_REALM_DAILY_MAX
realm.save()
def test_invite_too_many_users(self) -> None:
# Only a light test of this pathway; e.g. doesn't test that
# the limit gets reset after 24 hours
self.login("iago")
invitee_emails = "1@zulip.com, 2@zulip.com"
self.invite(invitee_emails, ["Denmark"])
invitee_emails = ", ".join(str(i) for i in range(get_realm("zulip").max_invites - 1))
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"You do not have enough remaining invites for today. "
"Please contact desdemona+admin@zulip.com to have your limit raised. "
"No invitations were sent.",
)
def test_missing_or_invalid_params(self) -> None:
"""
Tests inviting with various missing or invalid parameters.
"""
realm = get_realm("zulip")
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
self.login("hamlet")
invitee_emails = "foo@zulip.com"
self.assert_json_error(
self.invite(invitee_emails, []),
"You must specify at least one stream for invitees to join.",
)
for address in ("noatsign.com", "outsideyourdomain@example.net"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
self.check_sent_emails([])
self.assert_json_error(
self.invite("", ["Denmark"]), "You must specify at least one email address."
)
self.check_sent_emails([])
def test_guest_user_invitation(self) -> None:
"""
Guest user can't invite new users
"""
self.login("polonius")
invitee = "alice-test@zulip.com"
self.assert_json_error(self.invite(invitee, ["Denmark"]), "Not allowed for guest users")
self.assertEqual(find_key_by_email(invitee), None)
self.check_sent_emails([])
def test_invalid_stream(self) -> None:
"""
Tests inviting to a non-existent stream.
"""
self.login("hamlet")
self.assert_json_error(
self.invite("iago-test@zulip.com", ["NotARealStream"]),
f"Stream does not exist with id: {self.INVALID_STREAM_ID}. No invites were sent.",
)
self.check_sent_emails([])
def test_invite_existing_user(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("hamlet")
hamlet_email = "hAmLeT@zUlIp.com"
result = self.invite(hamlet_email, ["Denmark"])
self.assert_json_error(result, "We weren't able to invite anyone.")
self.assertFalse(
PreregistrationUser.objects.filter(email__iexact=hamlet_email).exists(),
)
self.check_sent_emails([])
def normalize_string(self, s: str) -> str:
s = s.strip()
return re.sub(r"\s+", " ", s)
def test_invite_links_in_name(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
hamlet = self.example_user("hamlet")
self.login_user(hamlet)
# Test we properly handle links in user full names
do_change_full_name(hamlet, "</a> https://www.google.com", hamlet)
result = self.invite("newuser@zulip.com", ["Denmark"])
self.assert_json_success(result)
self.check_sent_emails(["newuser@zulip.com"])
from django.core.mail import outbox
body = self.normalize_string(outbox[0].alternatives[0][0])
# Verify that one can't get Zulip to send invitation emails
# that third-party products will linkify using the full_name
# field, because we've included that field inside the mailto:
# link for the sender.
self.assertIn(
'<a href="mailto:hamlet@zulip.com" style="color:#46aa8f; text-decoration:underline"></a> https://www.google.com (hamlet@zulip.com)</a> wants',
body,
)
# TODO: Ideally, this test would also test the Invitation
# Reminder email generated, but the test setup for that is
# annoying.
def test_invite_some_existing_some_new(self) -> None:
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("hamlet")
existing = [self.example_email("hamlet"), "othello@zulip.com"]
new = ["foo-test@zulip.com", "bar-test@zulip.com"]
invitee_emails = "\n".join(existing + new)
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!",
)
# We only created accounts for the new users.
for email in existing:
self.assertRaises(
PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(email=email),
)
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
prereg_user = PreregistrationUser.objects.get(email="foo-test@zulip.com")
self.assertEqual(prereg_user.email, "foo-test@zulip.com")
def test_invite_outside_domain_in_closed_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_using_disposable_email(self) -> None:
"""
In a realm with `disallow_disposable_email_addresses = True`, you can't invite
people with a disposable domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
self.login("hamlet")
external_address = "foo@mailnator.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_outside_domain_in_open_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_outside_domain_before_closing(self) -> None:
"""
If you invite someone with a different domain from that of the realm
when `emails_restricted_to_domains = False`, but `emails_restricted_to_domains` later
changes to true, the invitation should succeed but the invitee's signup
attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user("foo@example.com", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("only allows users with email addresses", result)
def test_disposable_emails_before_closing(self) -> None:
"""
If you invite someone with a disposable email when
`disallow_disposable_email_addresses = False`, but
later changes to true, the invitation should succeed
but the invitee's signup attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo@mailnator.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
result = self.submit_reg_form_for_user("foo@mailnator.com", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please sign up using a real email address.", result)
def test_invite_with_email_containing_plus_before_closing(self) -> None:
"""
If you invite someone with an email containing plus when
`emails_restricted_to_domains = False`, but later change
`emails_restricted_to_domains = True`, the invitation should
succeed but the invitee's signup attempt should fail as
users are not allowed to sign up using email containing +
when the realm is restricted to domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "foo+label@zulip.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user(external_address, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Zulip Dev, does not allow signups using emails\n that contains +", result
)
def test_invalid_email_check_after_confirming_email(self) -> None:
self.login("hamlet")
email = "test@zulip.com"
self.assert_json_success(self.invite(email, ["Denmark"]))
obj = Confirmation.objects.get(confirmation_key=find_key_by_email(email))
prereg_user = obj.content_object
prereg_user.email = "invalid.email"
prereg_user.save()
result = self.submit_reg_form_for_user(email, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"The email address you are trying to sign up with is not valid", result
)
def test_invite_with_non_ascii_streams(self) -> None:
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("hamlet")
invitee = "alice-test@zulip.com"
stream_name = "hümbüǵ"
# Make sure we're subscribed before inviting someone.
self.subscribe(self.example_user("hamlet"), stream_name)
self.assert_json_success(self.invite(invitee, [stream_name]))
def test_invitation_reminder_email(self) -> None:
from django.core.mail import outbox
# All users belong to zulip realm
referrer_name = "hamlet"
current_user = self.example_user(referrer_name)
self.login_user(current_user)
invitee_email = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee_email, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee_email))
self.check_sent_emails([invitee_email])
data = {"email": invitee_email, "referrer_email": current_user.email}
invitee = PreregistrationUser.objects.get(email=data["email"])
referrer = self.example_user(referrer_name)
link = create_confirmation_link(invitee, Confirmation.INVITATION)
context = common_context(referrer)
context.update(
activate_url=link,
referrer_name=referrer.full_name,
referrer_email=referrer.email,
referrer_realm_name=referrer.realm.name,
)
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now()
)
self.assert_length(email_jobs_to_deliver, 1)
email_count = len(outbox)
for job in email_jobs_to_deliver:
deliver_scheduled_emails(job)
self.assert_length(outbox, email_count + 1)
self.assertEqual(self.email_envelope_from(outbox[-1]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertIn(FromAddress.NOREPLY, self.email_display_from(outbox[-1]))
# Now verify that signing up clears invite_reminder emails
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assert_length(email_jobs_to_deliver, 1)
self.register(invitee_email, "test")
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assert_length(email_jobs_to_deliver, 0)
def test_no_invitation_reminder_when_link_expires_quickly(self) -> None:
self.login("hamlet")
# Check invitation reminder email is scheduled with 4 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=4):
self.invite("alice@zulip.com", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# Check invitation reminder email is not scheduled with 3 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=3):
self.invite("bob@zulip.com", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# make sure users can't take a valid confirmation key from another
# pathway and use it with the invitation URL route
def test_confirmation_key_of_wrong_type(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
# Mainly a test of get_object_from_key, rather than of the invitation pathway
with self.assertRaises(ConfirmationKeyException) as cm:
get_object_from_key(registration_key, Confirmation.INVITATION)
self.assertEqual(cm.exception.error_type, ConfirmationKeyException.DOES_NOT_EXIST)
# Verify that using the wrong type doesn't work in the main confirm code path
email_change_url = create_confirmation_link(prereg_user, Confirmation.EMAIL_CHANGE)
email_change_key = email_change_url.split("/")[-1]
url = "/accounts/do_confirm/" + email_change_key
result = self.client_get(url)
self.assert_in_success_response(
["Whoops. We couldn't find your confirmation link in the system."], result
)
def test_confirmation_expired(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
conf = Confirmation.objects.filter(confirmation_key=registration_key).first()
conf.date_sent -= datetime.timedelta(weeks=3)
conf.save()
target_url = "/" + url.split("/", 3)[3]
result = self.client_get(target_url)
self.assert_in_success_response(
["Whoops. The confirmation link has expired or been deactivated."], result
)
def test_send_more_than_one_invite_to_same_user(self) -> None:
self.user_profile = self.example_user("iago")
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_user = PreregistrationUser.objects.get(email="foo@zulip.com")
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
# Also send an invite from a different realm.
lear = get_realm("lear")
lear_user = self.lear_user("cordelia")
do_invite_users(lear_user, ["foo@zulip.com"], [], False)
invites = PreregistrationUser.objects.filter(email__iexact="foo@zulip.com")
self.assert_length(invites, 4)
do_create_user(
"foo@zulip.com",
"password",
self.user_profile.realm,
"full name",
prereg_user=prereg_user,
acting_user=None,
)
accepted_invite = PreregistrationUser.objects.filter(
email__iexact="foo@zulip.com", status=confirmation_settings.STATUS_ACTIVE
)
revoked_invites = PreregistrationUser.objects.filter(
email__iexact="foo@zulip.com", status=confirmation_settings.STATUS_REVOKED
)
# If a user was invited more than once, when it accepts one invite and register
# the others must be canceled.
self.assert_length(accepted_invite, 1)
self.assertEqual(accepted_invite[0].id, prereg_user.id)
expected_revoked_invites = set(invites.exclude(id=prereg_user.id).exclude(realm=lear))
self.assertEqual(set(revoked_invites), expected_revoked_invites)
self.assertEqual(
PreregistrationUser.objects.get(email__iexact="foo@zulip.com", realm=lear).status, 0
)
def test_confirmation_obj_not_exist_error(self) -> None:
"""Since the key is a param input by the user to the registration endpoint,
if it inserts an invalid value, the confirmation object won't be found. This
tests if, in that scenario, we handle the exception by redirecting the user to
the confirmation_link_expired_error page.
"""
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = "invalid_confirmation_key"
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assertEqual(response.status_code, 404)
self.assert_in_response("The registration link has expired or is not valid.", response)
registration_key = confirmation_link.split("/")[-1]
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assert_in_success_response(["We just need you to do one last thing."], response)
response = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(response.status_code, 302)
def test_validate_email_not_already_in_realm(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.submit_reg_form_for_user(email, password, key=registration_key)
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
reverse("login") + "?" + urlencode({"email": email, "already_registered": 1}),
)
def test_confirmation_link_in_manual_license_plan(self) -> None:
inviter = self.example_user("iago")
realm = get_realm("zulip")
email = self.nonreg_email("alice")
realm = get_realm("zulip")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
response = self.submit_reg_form_for_user(email, "password", key=registration_key)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "http://zulip.testserver/")
self.subscribe_realm_to_monthly_plan_on_manual_license_management(realm, 5, 5)
email = self.nonreg_email("bob")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(url, {"key": registration_key, "from_confirmation": 1, "full_name": "bob"})
response = self.submit_reg_form_for_user(email, "password", key=registration_key)
self.assert_in_success_response(
["New members cannot join this organization because all Zulip licenses are"], response
)
class InvitationsTestCase(InviteUserBase):
def test_do_get_user_invites(self) -> None:
self.login("iago")
user_profile = self.example_user("iago")
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
prereg_user_one = PreregistrationUser(email="TestOne@zulip.com", referred_by=user_profile)
prereg_user_one.save()
prereg_user_two = PreregistrationUser(email="TestTwo@zulip.com", referred_by=user_profile)
prereg_user_two.save()
prereg_user_three = PreregistrationUser(email="TestThree@zulip.com", referred_by=hamlet)
prereg_user_three.save()
prereg_user_four = PreregistrationUser(email="TestFour@zulip.com", referred_by=othello)
prereg_user_four.save()
prereg_user_other_realm = PreregistrationUser(
email="TestOne@zulip.com", referred_by=self.mit_user("sipbtest")
)
prereg_user_other_realm.save()
multiuse_invite = MultiuseInvite.objects.create(
referred_by=user_profile, realm=user_profile.realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
self.assert_length(do_get_user_invites(user_profile), 5)
self.assert_length(do_get_user_invites(hamlet), 1)
self.assert_length(do_get_user_invites(othello), 1)
def test_successful_get_open_invitations(self) -> None:
"""
A GET call to /json/invites returns all unexpired invitations.
"""
realm = get_realm("zulip")
days_to_activate = getattr(settings, "INVITATION_LINK_VALIDITY_DAYS", "Wrong")
active_value = getattr(confirmation_settings, "STATUS_ACTIVE", "Wrong")
self.assertNotEqual(days_to_activate, "Wrong")
self.assertNotEqual(active_value, "Wrong")
self.login("iago")
user_profile = self.example_user("iago")
prereg_user_one = PreregistrationUser(email="TestOne@zulip.com", referred_by=user_profile)
prereg_user_one.save()
expired_datetime = timezone_now() - datetime.timedelta(days=(days_to_activate + 1))
prereg_user_two = PreregistrationUser(email="TestTwo@zulip.com", referred_by=user_profile)
prereg_user_two.save()
PreregistrationUser.objects.filter(id=prereg_user_two.id).update(
invited_at=expired_datetime
)
prereg_user_three = PreregistrationUser(
email="TestThree@zulip.com", referred_by=user_profile, status=active_value
)
prereg_user_three.save()
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
multiuse_invite_one = MultiuseInvite.objects.create(referred_by=hamlet, realm=realm)
create_confirmation_link(multiuse_invite_one, Confirmation.MULTIUSE_INVITE)
multiuse_invite_two = MultiuseInvite.objects.create(referred_by=othello, realm=realm)
create_confirmation_link(multiuse_invite_two, Confirmation.MULTIUSE_INVITE)
confirmation = Confirmation.objects.last()
confirmation.date_sent = expired_datetime
confirmation.save()
result = self.client_get("/json/invites")
self.assertEqual(result.status_code, 200)
invites = orjson.loads(result.content)["invites"]
self.assert_length(invites, 2)
self.assertFalse(invites[0]["is_multiuse"])
self.assertEqual(invites[0]["email"], "TestOne@zulip.com")
self.assertTrue(invites[1]["is_multiuse"])
self.assertEqual(invites[1]["invited_by_user_id"], hamlet.id)
def test_successful_delete_invitation(self) -> None:
"""
A DELETE call to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
self.login("iago")
invitee = "DeleteMe@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify that the scheduled email exists.
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_successful_member_delete_invitation(self) -> None:
"""
A DELETE call from member account to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
invitee = "DeleteMe@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify that the scheduled email exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify another non-admin can't delete
result = self.api_delete(
self.example_user("othello"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization administrator")
# Verify that the scheduled email still exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify deletion works.
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_owner_invitation(self) -> None:
self.login("desdemona")
owner = self.example_user("desdemona")
invitee = "DeleteMe@zulip.com"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
prereg_user = PreregistrationUser.objects.get(email=invitee)
result = self.api_delete(
self.example_user("iago"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization owner")
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_success(result)
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_multiuse_invite(self) -> None:
"""
A DELETE call to /json/invites/multiuse<ID> should delete the
multiuse_invite.
"""
self.login("iago")
zulip_realm = get_realm("zulip")
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("hamlet"), realm=zulip_realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assertEqual(result.status_code, 200)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test that trying to double-delete fails
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "No such invitation")
# Test deleting owner mutiuse_invite.
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("desdemona"),
realm=zulip_realm,
invited_as=PreregistrationUser.INVITE_AS["REALM_OWNER"],
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_success(result)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test deleting multiuse invite from another realm
mit_realm = get_realm("zephyr")
multiuse_invite_in_mit = MultiuseInvite.objects.create(
referred_by=self.mit_user("sipbtest"), realm=mit_realm
)
create_confirmation_link(multiuse_invite_in_mit, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete(
"/json/invites/multiuse/" + str(multiuse_invite_in_mit.id)
)
self.assert_json_error(error_result, "No such invitation")
def test_successful_resend_invitation(self) -> None:
"""
A POST call to /json/invites/<ID>/resend should send an invitation reminder email
and delete any scheduled invitation reminder email.
"""
self.login("iago")
invitee = "resend_me@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee])
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee])
def test_successful_member_resend_invitation(self) -> None:
"""A POST call from member a account to /json/invites/<ID>/resend
should send an invitation reminder email and delete any
scheduled invitation reminder email if they send the invite.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
invitee = "resend_me@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify hamlet has only one invitation (Member can resend invitations only sent by him).
invitation = PreregistrationUser.objects.filter(referred_by=user_profile)
self.assert_length(invitation, 1)
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee])
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee])
self.logout()
self.login("othello")
invitee = "TestOne@zulip.com"
prereg_user_one = PreregistrationUser(email=invitee, referred_by=user_profile)
prereg_user_one.save()
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization administrator")
def test_resend_owner_invitation(self) -> None:
self.login("desdemona")
invitee = "resend_owner@zulip.com"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
self.check_sent_emails([invitee])
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Test only organization owners can resend owner invitation.
self.login("iago")
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_success(result)
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
def test_accessing_invites_in_another_realm(self) -> None:
inviter = UserProfile.objects.exclude(realm=get_realm("zulip")).first()
prereg_user = PreregistrationUser.objects.create(
email="email", referred_by=inviter, realm=inviter.realm
)
self.login("iago")
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "No such invitation")
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
def test_prereg_user_status(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = UserProfile.objects.filter(realm=realm).first()
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
result = self.client_post(
"/accounts/register/",
{"key": registration_key, "from_confirmation": "1", "full_name": "alice"},
)
self.assertEqual(result.status_code, 200)
confirmation = Confirmation.objects.get(confirmation_key=registration_key)
prereg_user = confirmation.content_object
self.assertEqual(prereg_user.status, 0)
result = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(result.status_code, 302)
prereg_user = PreregistrationUser.objects.get(email=email, referred_by=inviter, realm=realm)
self.assertEqual(prereg_user.status, confirmation_settings.STATUS_ACTIVE)
user = get_user_by_delivery_email(email, realm)
self.assertIsNotNone(user)
self.assertEqual(user.delivery_email, email)
class InviteeEmailsParserTests(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.email1 = "email1@zulip.com"
self.email2 = "email2@zulip.com"
self.email3 = "email3@zulip.com"
def test_if_emails_separated_by_commas_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1} ,{self.email2}, {self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_separated_by_newlines_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1}\n {self.email2}\n {self.email3} "
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_from_email_client_separated_by_newlines_are_parsed_correctly(self) -> None:
emails_raw = (
f"Email One <{self.email1}>\nEmailTwo<{self.email2}>\nEmail Three<{self.email3}>"
)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_in_mixed_style_are_parsed_correctly(self) -> None:
emails_raw = f"Email One <{self.email1}>,EmailTwo<{self.email2}>\n{self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
class MultiuseInviteTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.realm = get_realm("zulip")
self.realm.invite_required = True
self.realm.save()
def generate_multiuse_invite_link(
self, streams: Optional[List[Stream]] = None, date_sent: Optional[datetime.datetime] = None
) -> str:
invite = MultiuseInvite(realm=self.realm, referred_by=self.example_user("iago"))
invite.save()
if streams is not None:
invite.streams.set(streams)
if date_sent is None:
date_sent = timezone_now()
key = generate_key()
Confirmation.objects.create(
content_object=invite,
date_sent=date_sent,
confirmation_key=key,
type=Confirmation.MULTIUSE_INVITE,
)
return confirmation_url(key, self.realm, Confirmation.MULTIUSE_INVITE)
def check_user_able_to_register(self, email: str, invite_link: str) -> None:
password = "password"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
from django.core.mail import outbox
outbox.pop()
def test_valid_multiuse_link(self) -> None:
email1 = self.nonreg_email("test")
email2 = self.nonreg_email("test1")
email3 = self.nonreg_email("alice")
date_sent = timezone_now() - datetime.timedelta(
days=settings.INVITATION_LINK_VALIDITY_DAYS - 1
)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
self.check_user_able_to_register(email1, invite_link)
self.check_user_able_to_register(email2, invite_link)
self.check_user_able_to_register(email3, invite_link)
def test_expired_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
date_sent = timezone_now() - datetime.timedelta(days=settings.INVITATION_LINK_VALIDITY_DAYS)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("The confirmation link has expired or been deactivated.", result)
def test_invalid_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("Whoops. The confirmation link is malformed.", result)
def test_invalid_multiuse_link_in_open_realm(self) -> None:
self.realm.invite_required = False
self.realm.save()
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
with patch("zerver.views.registration.get_realm_from_request", return_value=self.realm):
with patch("zerver.views.registration.get_realm", return_value=self.realm):
self.check_user_able_to_register(email, invite_link)
def test_multiuse_link_with_specified_streams(self) -> None:
name1 = "newuser"
name2 = "bob"
email1 = self.nonreg_email(name1)
email2 = self.nonreg_email(name2)
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email1, invite_link)
self.check_user_subscribed_only_to_streams(name1, streams)
stream_names = ["Rome", "Verona"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email2, invite_link)
self.check_user_subscribed_only_to_streams(name2, streams)
def test_create_multiuse_link_api_call(self) -> None:
self.login("iago")
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_with_specified_streams_api_call(self) -> None:
self.login("iago")
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
stream_ids = [stream.id for stream in streams]
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps(stream_ids).decode()}
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.check_user_subscribed_only_to_streams("test", streams)
def test_only_admin_can_create_multiuse_link_api_call(self) -> None:
self.login("iago")
# Only admins should be able to create multiuse invites even if
# invite_to_realm_policy is set to Realm.POLICY_MEMBERS_ONLY.
self.realm.invite_to_realm_policy = Realm.POLICY_MEMBERS_ONLY
self.realm.save()
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.login("hamlet")
result = self.client_post("/json/invites/multiuse")
self.assert_json_error(result, "Must be an organization administrator")
def test_multiuse_link_for_inviting_as_owner(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_error(result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_invalid_stream_api_call(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps([54321]).decode()}
)
self.assert_json_error(result, "Invalid stream id 54321. No invites were sent.")
class EmailUnsubscribeTests(ZulipTestCase):
def test_error_unsubscribe(self) -> None:
# An invalid unsubscribe token "test123" produces an error.
result = self.client_get("/accounts/unsubscribe/missed_messages/test123")
self.assert_in_response("Unknown email unsubscribe request", result)
# An unknown message type "fake" produces an error.
user_profile = self.example_user("hamlet")
unsubscribe_link = one_click_unsubscribe_link(user_profile, "fake")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assert_in_response("Unknown email unsubscribe request", result)
def test_message_notification_emails_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in message notification emails
that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
user_profile = self.example_user("hamlet")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(user_profile)
self.assertEqual(2, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_digest_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
context = {
"name": "",
"realm_uri": "",
"unread_pms": [],
"hot_conversations": [],
"new_users": [],
"new_streams": {"plain": []},
"unsubscribe_link": "",
}
send_future_email(
"zerver/emails/digest",
user_profile.realm,
to_user_ids=[user_profile.id],
context=context,
)
self.assertEqual(1, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_login_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in login
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_login_emails = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "login")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_login_emails)
def test_marketing_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in marketing e-mails that you can
click even when logged out to stop receiving them.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_marketing_emails)
# Simulate unsubscribing from marketing e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "marketing")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_marketing_emails)
def test_marketing_unsubscribe_post(self) -> None:
"""
The List-Unsubscribe-Post header lets email clients trigger an
automatic unsubscription request via POST (see RFC 8058), so
test that too.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_marketing_emails)
# Simulate unsubscribing from marketing e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "marketing")
client = Client(enforce_csrf_checks=True)
result = client.post(
urllib.parse.urlparse(unsubscribe_link).path, {"List-Unsubscribe": "One-Click"}
)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_marketing_emails)
class RealmCreationTest(ZulipTestCase):
@override_settings(OPEN_REALM_CREATION=True)
def check_able_to_create_realm(self, email: str, password: str = "test") -> None:
notification_bot = get_system_bot(settings.NOTIFICATION_BOT)
signups_stream, _ = create_stream_if_needed(notification_bot.realm, "signups")
string_id = "zuliptest"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Check confirmation email has the correct subject and body, extract
# confirmation link and visit it
confirmation_url = self.get_confirmation_url_from_outbox(
email,
email_subject_contains="Create your Zulip organization",
email_body_contains="You have requested a new Zulip organization",
)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password, realm_subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith("http://zuliptest.testserver/accounts/login/subdomain/")
)
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
user = get_user(email, realm)
self.assertEqual(user.realm, realm)
# Check that user is the owner.
self.assertEqual(user.role, UserProfile.ROLE_REALM_OWNER)
# Check defaults
self.assertEqual(realm.org_type, Realm.CORPORATE)
self.assertEqual(realm.emails_restricted_to_domains, False)
self.assertEqual(realm.invite_required, True)
# Check welcome messages
for stream_name, text, message_count in [
(Realm.DEFAULT_NOTIFICATION_STREAM_NAME, "with the topic", 3),
(Realm.INITIAL_PRIVATE_STREAM_NAME, "private stream", 1),
]:
stream = get_stream(stream_name, realm)
recipient = stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("date_sent")
self.assert_length(messages, message_count)
self.assertIn(text, messages[0].content)
# Check signup messages
recipient = signups_stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("id")
self.assert_length(messages, 2)
self.assertIn("Signups enabled", messages[0].content)
self.assertIn("signed up", messages[1].content)
self.assertEqual("zuliptest", messages[1].topic_name())
realm_creation_audit_log = RealmAuditLog.objects.get(
realm=realm, event_type=RealmAuditLog.REALM_CREATED
)
self.assertEqual(realm_creation_audit_log.acting_user, user)
self.assertEqual(realm_creation_audit_log.event_time, realm.date_created)
# Piggyback a little check for how we handle
# empty string_ids.
realm.string_id = ""
self.assertEqual(realm.display_subdomain, ".")
def test_create_realm_non_existing_email(self) -> None:
self.check_able_to_create_realm("user1@test.com")
def test_create_realm_existing_email(self) -> None:
self.check_able_to_create_realm("hamlet@zulip.com")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_create_realm_ldap_email(self) -> None:
self.init_default_ldap_database()
with self.settings(LDAP_EMAIL_ATTR="mail"):
self.check_able_to_create_realm(
"newuser_email@zulip.com", self.ldap_password("newuser_with_email")
)
def test_create_realm_as_system_bot(self) -> None:
result = self.client_post("/new/", {"email": "notification-bot@zulip.com"})
self.assertEqual(result.status_code, 200)
self.assert_in_response("notification-bot@zulip.com is reserved for system bots", result)
def test_create_realm_no_creation_key(self) -> None:
"""
Trying to create a realm without a creation_key should fail when
OPEN_REALM_CREATION is false.
"""
email = "user1@test.com"
with self.settings(OPEN_REALM_CREATION=False):
# Create new realm with the email, but no creation key.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("New organization creation disabled", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_with_subdomain(self) -> None:
password = "test"
string_id = "zuliptest"
email = "user1@test.com"
realm_name = "Test"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zuliptest.testserver")
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True, FREE_TRIAL_DAYS=30)
def test_create_realm_during_free_trial(self) -> None:
password = "test"
string_id = "zuliptest"
email = "user1@test.com"
realm_name = "Test"
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.url, "http://zuliptest.testserver/upgrade/?onboarding=true")
result = self.client_get(result.url, subdomain=string_id)
self.assert_in_success_response(["Not ready to start your trial?"], result)
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_two_realms(self) -> None:
"""
Verify correct behavior and PreregistrationUser handling when using
two pre-generated realm creation links to create two different realms.
"""
password = "test"
first_string_id = "zuliptest"
second_string_id = "zuliptest2"
email = "user1@test.com"
first_realm_name = "Test"
second_realm_name = "Test"
# Make sure the realms do not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(first_string_id)
with self.assertRaises(Realm.DoesNotExist):
get_realm(second_string_id)
# Now we pre-generate two realm creation links
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Get a second realm creation link.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 2)
# Create and verify the first realm
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=first_string_id,
realm_name=first_realm_name,
key=first_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(first_string_id)
self.assertEqual(realm.string_id, first_string_id)
self.assertEqual(realm.name, first_realm_name)
# One of the PreregistrationUsers should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Create and verify the second realm
result = self.client_get(second_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=second_string_id,
realm_name=second_realm_name,
key=second_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(second_string_id)
self.assertEqual(realm.string_id, second_string_id)
self.assertEqual(realm.name, second_realm_name)
# The remaining PreregistrationUser should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 0)
@override_settings(OPEN_REALM_CREATION=True)
def test_mailinator_signup(self) -> None:
result = self.client_post("/new/", {"email": "hi@mailinator.com"})
self.assert_in_response("Please use your real email address.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
errors = {
"id": "length 3 or greater",
"-id": "cannot start or end with a",
"string-ID": "lowercase letters",
"string_id": "lowercase letters",
"stream": "unavailable",
"streams": "unavailable",
"about": "unavailable",
"abouts": "unavailable",
"zephyr": "unavailable",
}
for string_id, error_msg in errors.items():
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assert_in_response(error_msg, result)
# test valid subdomain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="a-0", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://a-0.testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_using_old_subdomain_of_a_realm(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name", acting_user=None)
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="zulip", realm_name=realm_name
)
self.assert_in_response("Subdomain unavailable. Please choose a different one.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain_option(self) -> None:
password = "test"
email = "user1@test.com"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
def test_is_root_domain_available(self) -> None:
self.assertTrue(is_root_domain_available())
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
self.assertFalse(is_root_domain_available())
realm = get_realm("zulip")
realm.string_id = Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
realm.save()
self.assertFalse(is_root_domain_available())
def test_subdomain_check_api(self) -> None:
result = self.client_get("/json/realm/subdomain/zulip")
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
result = self.client_get("/json/realm/subdomain/zu_lip")
self.assert_in_success_response(
["Subdomain can only have lowercase letters, numbers, and '-'s."], result
)
result = self.client_get("/json/realm/subdomain/hufflepuff")
self.assert_in_success_response(["available"], result)
self.assert_not_in_success_response(["unavailable"], result)
def test_subdomain_check_management_command(self) -> None:
# Short names should not work, even with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("aa")
with self.assertRaises(ValidationError):
check_subdomain_available("aa", allow_reserved_subdomain=True)
# Malformed names should never work
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-")
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-", allow_reserved_subdomain=True)
with patch("zerver.lib.name_restrictions.is_reserved_subdomain", return_value=False):
# Existing realms should never work even if they are not reserved keywords
with self.assertRaises(ValidationError):
check_subdomain_available("zulip")
with self.assertRaises(ValidationError):
check_subdomain_available("zulip", allow_reserved_subdomain=True)
# Reserved ones should only work with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("stream")
check_subdomain_available("stream", allow_reserved_subdomain=True)
class UserSignUpTest(InviteUserBase):
def _assert_redirected_to(self, result: HttpResponse, url: str) -> None:
self.assertEqual(result.status_code, 302)
self.assertEqual(result["LOCATION"], url)
def verify_signup(
self,
*,
email: str = "newguy@zulip.com",
password: Optional[str] = "newpassword",
full_name: str = "New user's name",
realm: Optional[Realm] = None,
subdomain: Optional[str] = None,
) -> UserProfile:
"""Common test function for signup tests. It is a goal to use this
common function for all signup tests to avoid code duplication; doing
so will likely require adding new parameters."""
if realm is None: # nocoverage
realm = get_realm("zulip")
client_kwargs: Dict[str, Any] = {}
if subdomain:
client_kwargs["subdomain"] = subdomain
result = self.client_post("/accounts/home/", {"email": email}, **client_kwargs)
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"], **client_kwargs)
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, **client_kwargs)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS. This should create our
# account, log us in, and redirect to the app.
result = self.submit_reg_form_for_user(
email, password, full_name=full_name, **client_kwargs
)
# Verify that we were served a redirect to the app.
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://lear.testserver/")
# Verify that we successfully logged in.
user_profile = get_user(email, realm)
self.assert_logged_in_user_id(user_profile.id)
return user_profile
def test_bad_email_configuration_for_accounts_home(self) -> None:
"""
Make sure we redirect for EmailNotDeliveredException.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=EmailNotDeliveredException,
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/accounts/home/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in accounts_home"])
def test_bad_email_configuration_for_create_realm(self) -> None:
"""
Make sure we redirect for EmailNotDeliveredException.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=EmailNotDeliveredException,
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/new/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in create_realm"])
def test_user_default_language_and_timezone(self) -> None:
"""
Check if the default language of new user is the default language
of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
timezone = "US/Mountain"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_language", "de", acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, timezone=timezone)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.default_language, realm.default_language)
self.assertEqual(user_profile.timezone, timezone)
from django.core.mail import outbox
outbox.pop()
def test_default_twenty_four_hour_time(self) -> None:
"""
Check if the default twenty_four_hour_time setting of new user
is the default twenty_four_hour_time of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_twenty_four_hour_time", True, acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.twenty_four_hour_time, realm.default_twenty_four_hour_time)
def test_signup_already_active(self) -> None:
"""
Check if signing up with an active email redirects to a login page.
"""
email = self.example_email("hamlet")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
self.assert_in_response("You've already registered", result)
def test_signup_system_bot(self) -> None:
email = "notification-bot@zulip.com"
result = self.client_post("/accounts/home/", {"email": email}, subdomain="lear")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
# This is not really the right error message, but at least it's an error.
self.assert_in_response("You've already registered", result)
def test_signup_existing_email(self) -> None:
"""
Check if signing up with an email used in another realm succeeds.
"""
email = self.example_email("hamlet")
self.verify_signup(email=email, realm=get_realm("lear"), subdomain="lear")
self.assertEqual(UserProfile.objects.filter(delivery_email=email).count(), 2)
def test_signup_invalid_name(self) -> None:
"""
Check if an invalid name during signup is handled properly.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, full_name="<invalid>")
self.assert_in_success_response(["Invalid characters in name!"], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_without_password(self) -> None:
"""
Check if signing up without a password works properly when
password_auth_enabled is False.
"""
email = self.nonreg_email("newuser")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with patch("zerver.views.registration.password_auth_enabled", return_value=False):
result = self.client_post(
"/accounts/register/",
{"full_name": "New User", "key": find_key_by_email(email), "terms": True},
)
# User should now be logged in.
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newuser")
self.assert_logged_in_user_id(user_profile.id)
def test_signup_without_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_email_message_contains_org_header(self) -> None:
email = "newguy@zulip.com"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
from django.core.mail import outbox
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def test_signup_with_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
def test_signup_with_weak_password(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "newguy@zulip.com"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000):
result = self.client_post(
"/accounts/register/",
{
"password": "easy",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
result = self.submit_reg_form_for_user(email, "easy", full_name="New Guy")
self.assert_in_success_response(["The password is too weak."], result)
with self.assertRaises(UserProfile.DoesNotExist):
# Account wasn't created.
get_user(email, get_realm("zulip"))
def test_signup_with_default_stream_group(self) -> None:
# Check if user is subscribed to the streams of default
# stream group as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
default_streams = []
existing_default_streams = DefaultStream.objects.filter(realm=realm)
self.assert_length(existing_default_streams, 1)
self.assertEqual(existing_default_streams[0].stream.name, "Verona")
default_streams.append(existing_default_streams[0].stream)
for stream_name in ["venice", "rome"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
result = self.submit_reg_form_for_user(email, password, default_stream_groups=["group 1"])
self.check_user_subscribed_only_to_streams("newguy", default_streams + group1_streams)
def test_signup_two_confirmation_links(self) -> None:
email = self.nonreg_email("newguy")
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
first_confirmation_key = find_key_by_email(email)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
# Sanity check:
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
# Register the account (this will use the second confirmation url):
result = self.submit_reg_form_for_user(
email, password, full_name="New Guy", from_confirmation="1"
)
self.assert_in_success_response(
["We just need you to do one last thing.", "New Guy", email], result
)
result = self.submit_reg_form_for_user(email, password, full_name="New Guy")
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(user_profile.delivery_email, email)
# Now try to to register using the first confirmation url:
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": first_confirmation_key,
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
# Error page should be displayed
self.assertEqual(result.status_code, 404)
self.assert_in_response("The registration link has expired or is not valid.", result)
def test_signup_with_multiple_default_stream_groups(self) -> None:
# Check if user is subscribed to the streams of default
# stream groups as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
DefaultStream.objects.filter(realm=realm).delete()
default_streams = []
for stream_name in ["venice", "verona"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
group2_streams = []
for stream_name in ["scotland", "rome"]:
stream = get_stream(stream_name, realm)
group2_streams.append(stream)
do_create_default_stream_group(realm, "group 2", "group 2 description", group2_streams)
result = self.submit_reg_form_for_user(
email, password, default_stream_groups=["group 1", "group 2"]
)
self.check_user_subscribed_only_to_streams(
"newguy", list(set(default_streams + group1_streams + group2_streams))
)
def test_signup_without_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
realm = get_realm("lear")
# Make an account in the Zulip realm, but we're not copying from there.
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, source_realm_id="", HTTP_HOST=subdomain + ".testserver"
)
hamlet = get_user(self.example_email("hamlet"), realm)
self.assertEqual(hamlet.left_side_userlist, False)
self.assertEqual(hamlet.default_language, "en")
self.assertEqual(hamlet.emojiset, "google-blob")
self.assertEqual(hamlet.high_contrast_mode, False)
self.assertEqual(hamlet.enable_stream_audible_notifications, False)
self.assertEqual(hamlet.enter_sends, False)
self.assertEqual(hamlet.tutorial_status, UserProfile.TUTORIAL_WAITING)
def test_signup_with_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
lear_realm = get_realm("lear")
self.login("hamlet")
with get_test_image_file("img.png") as image_file:
self.client_post("/json/users/me/avatar", {"file": image_file})
hamlet_in_zulip.refresh_from_db()
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{"password": password, "key": find_key_by_email(email), "from_confirmation": "1"},
subdomain=subdomain,
)
self.assert_in_success_response(
[
"Import settings from existing Zulip account",
"selected >\n Zulip Dev",
"We just need you to do one last thing.",
],
result,
)
result = self.submit_reg_form_for_user(
email,
password,
source_realm_id=str(hamlet_in_zulip.realm.id),
HTTP_HOST=subdomain + ".testserver",
)
hamlet_in_lear = get_user(email, lear_realm)
self.assertEqual(hamlet_in_lear.left_side_userlist, True)
self.assertEqual(hamlet_in_lear.default_language, "de")
self.assertEqual(hamlet_in_lear.emojiset, "twitter")
self.assertEqual(hamlet_in_lear.high_contrast_mode, True)
self.assertEqual(hamlet_in_lear.enter_sends, True)
self.assertEqual(hamlet_in_lear.enable_stream_audible_notifications, False)
self.assertEqual(hamlet_in_lear.tutorial_status, UserProfile.TUTORIAL_FINISHED)
zulip_path_id = avatar_disk_path(hamlet_in_zulip)
lear_path_id = avatar_disk_path(hamlet_in_lear)
with open(zulip_path_id, "rb") as f:
zulip_avatar_bits = f.read()
with open(lear_path_id, "rb") as f:
lear_avatar_bits = f.read()
self.assertGreater(len(zulip_avatar_bits), 500)
self.assertEqual(zulip_avatar_bits, lear_avatar_bits)
def test_signup_invalid_subdomain(self) -> None:
"""
Check if attempting to authenticate to the wrong subdomain logs an
error and redirects.
"""
email = "newuser@zulip.com"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
def invalid_subdomain(**kwargs: Any) -> Any:
return_data = kwargs.get("return_data", {})
return_data["invalid_subdomain"] = True
with patch("zerver.views.registration.authenticate", side_effect=invalid_subdomain):
with self.assertLogs(level="ERROR") as m:
result = self.client_post(
"/accounts/register/",
{
"password": password,
"full_name": "New User",
"key": find_key_by_email(email),
"terms": True,
},
)
self.assertEqual(
m.output,
["ERROR:root:Subdomain mismatch in registration zulip: newuser@zulip.com"],
)
self.assertEqual(result.status_code, 302)
def test_signup_using_invalid_subdomain_preserves_state_of_form(self) -> None:
"""
Check that when we give invalid subdomain and submit the registration form
all the values in the form are preserved.
"""
realm = get_realm("zulip")
password = "test"
email = self.example_email("iago")
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
result = self.submit_reg_form_for_user(
email,
password,
# Subdomain is already used, by construction.
realm_subdomain=realm.string_id,
realm_name=realm_name,
source_realm_id=str(realm.id),
)
self.assert_in_success_response(
[
"Subdomain unavailable. Please choose a different one.",
"Zulip Dev\n",
'value="test"',
'name="realm_name"',
],
result,
)
def test_replace_subdomain_in_confirmation_link(self) -> None:
"""
Check that manually changing the subdomain in a registration
confirmation link doesn't allow you to register to a different realm.
"""
email = "newuser@zulip.com"
self.client_post("/accounts/home/", {"email": email})
result = self.client_post(
"/accounts/register/",
{
"password": "password",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New User",
"from_confirmation": "1",
},
subdomain="zephyr",
)
self.assert_in_success_response(["We couldn't find your confirmation link"], result)
def test_signup_to_realm_on_manual_license_plan(self) -> None:
realm = get_realm("zulip")
denmark_stream = get_stream("Denmark", realm)
realm.signup_notifications_stream = denmark_stream
realm.save(update_fields=["signup_notifications_stream"])
_, ledger = self.subscribe_realm_to_monthly_plan_on_manual_license_management(realm, 5, 5)
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertIn(
"New members cannot join this organization because all Zulip licenses",
form.errors["email"][0],
)
last_message = Message.objects.last()
self.assertIn(
f"A new member ({self.nonreg_email('test')}) was unable to join your organization because all Zulip",
last_message.content,
)
self.assertEqual(last_message.recipient.type_id, denmark_stream.id)
ledger.licenses_at_next_renewal = 50
ledger.save(update_fields=["licenses_at_next_renewal"])
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertIn(
"New members cannot join this organization because all Zulip licenses",
form.errors["email"][0],
)
ledger.licenses = 50
ledger.save(update_fields=["licenses"])
with self.settings(BILLING_ENABLED=True):
form = HomepageForm({"email": self.nonreg_email("test")}, realm=realm)
self.assertEqual(form.errors, {})
def test_failed_signup_due_to_restricted_domain(self) -> None:
realm = get_realm("zulip")
do_set_realm_property(realm, "invite_required", False, acting_user=None)
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
email = "user@acme.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
f"Your email address, {email}, is not in one of the domains", form.errors["email"][0]
)
def test_failed_signup_due_to_disposable_email(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = False
realm.disallow_disposable_email_addresses = True
realm.save()
email = "abc@mailnator.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn("Please use your real email address", form.errors["email"][0])
def test_failed_signup_due_to_email_containing_plus(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = True
realm.save()
email = "iago+label@zulip.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
"Email addresses containing + are not allowed in this organization.",
form.errors["email"][0],
)
def test_failed_signup_due_to_invite_required(self) -> None:
realm = get_realm("zulip")
realm.invite_required = True
realm.save()
email = "user@zulip.com"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(f"Please request an invite for {email} from", form.errors["email"][0])
def test_failed_signup_due_to_nonexistent_realm(self) -> None:
email = "user@acme.com"
form = HomepageForm({"email": email}, realm=None)
self.assertIn(
f"organization you are trying to join using {email} does not exist",
form.errors["email"][0],
)
def test_access_signup_page_in_root_domain_without_realm(self) -> None:
result = self.client_get("/register", subdomain="", follow=True)
self.assert_in_success_response(["Find your Zulip accounts"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"newuser@zulip.com",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is asked for its LDAP/Active Directory password.
self.assert_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
self.assert_not_in_success_response(["id_password"], result)
# Test the TypeError exception handler
with patch(
"zproject.backends.ZulipLDAPAuthBackendBase.get_mapped_name", side_effect=TypeError
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "newuser@zulip.com"], result
)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipLDAPUserPopulator",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_populate_only_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_BIND_PASSWORD="",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTH_LDAP_USER_DN_TEMPLATE="uid=%(user)s,ou=users,dc=zulip,dc=com",
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"newuser@zulip.com",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is NOT asked for its LDAP/Active Directory password.
# LDAP is not configured for authentication in this test.
self.assert_not_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
# If we were using e.g. the SAML auth backend, there
# shouldn't be a password prompt, but since it uses the
# EmailAuthBackend, there should be password field here.
self.assert_in_success_response(["id_password"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_end_to_end(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
full_name = "New LDAP fullname"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Full name should be set from LDAP
self.assert_in_success_response(
["We just need you to do one last thing.", full_name, "newuser@zulip.com"], result
)
# Submit the final form with the wrong password.
result = self.submit_reg_form_for_user(
email,
"wrongpassword",
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Didn't create an account
with self.assertRaises(UserProfile.DoesNotExist):
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
# Submit the final form with the correct password.
result = self.submit_reg_form_for_user(
email,
password,
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_split_full_name_mapping(self) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"first_name": "sn", "last_name": "cn"}
subdomain = "zulip"
email = "newuser_splitname@zulip.com"
password = self.ldap_password("newuser_splitname")
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Test split name mapping.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, "First Last")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auto_registration_on_login(self) -> None:
"""The most common way for LDAP authentication to be used is with a
server that doesn't have a terms-of-service required, in which
case we offer a complete single-sign-on experience (where the
user just enters their LDAP username and password, and their
account is created if it doesn't already exist).
This test verifies that flow.
"""
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
"custom_profile_field__phone_number": "homePhone",
}
full_name = "New LDAP fullname"
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
# Test custom profile fields are properly synced.
phone_number_field = CustomProfileField.objects.get(
realm=user_profile.realm, name="Phone number"
)
phone_number_field_value = CustomProfileFieldValue.objects.get(
user_profile=user_profile, field=phone_number_field
)
self.assertEqual(phone_number_field_value.value, "a-new-number")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_ldap_auto_registration_on_login_invalid_email_in_directory(self) -> None:
password = self.ldap_password("newuser_with_email")
username = "newuser_with_email"
subdomain = "zulip"
self.init_default_ldap_database()
self.change_ldap_user_attr("newuser_with_email", "mail", "thisisnotavalidemail")
with self.settings(
LDAP_EMAIL_ATTR="mail",
), self.assertLogs("zulip.auth.ldap", "WARNING") as mock_log:
original_user_count = UserProfile.objects.count()
self.login_with_return(username, password, HTTP_HOST=subdomain + ".testserver")
# Verify that the process failed as intended - no UserProfile is created.
self.assertEqual(UserProfile.objects.count(), original_user_count)
self.assertEqual(
mock_log.output,
["WARNING:zulip.auth.ldap:thisisnotavalidemail is not a valid email address."],
)
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_ldap_registration_multiple_realms(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
}
do_create_realm("test", "test", emails_restricted_to_domains=False)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
subdomain = "zulip"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("zulip"))
self.logout()
# Test registration in another realm works.
subdomain = "test"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("test"))
self.assertEqual(user_profile.delivery_email, email)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_when_names_changes_are_disabled(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link. This will 'authenticated_full_name'
# session variable which will be used to set the fullname of
# the user.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from LDAP session.
self.assertEqual(user_profile.full_name, "New LDAP fullname")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_append_domain(self) -> None:
password = "nonldappassword"
email = "newuser@zulip.com"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# For the rest of the test we delete the user from ldap.
del self.mock_ldap.directory["uid=newuser,ou=users,dc=zulip,dc=com"]
# If the user's email is not in the LDAP directory, but fits LDAP_APPEND_DOMAIN,
# we refuse to create the account.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
), self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because emails matching LDAP_APPEND_DOMAIN,
# aren't allowed to create non-LDAP accounts.
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: newuser. Input username: newuser@zulip.com"
],
)
# If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-LDAP account,
# with the password managed in the Zulip database.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="example.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
["WARNING:root:New account email newuser@zulip.com could not be found in LDAP"],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: Email newuser@zulip.com does not match LDAP domain example.com."
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_email_search(self) -> None:
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
password = "nonldappassword"
email = "newuser_email@zulip.com" # belongs to user uid=newuser_with_email in the test directory
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser_email%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# If the user's email is not in the LDAP directory , though, we
# successfully create an account with a password in the Zulip
# database.
password = "nonldappassword"
email = "nonexistent@zulip.com"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
[
"WARNING:root:New account email nonexistent@zulip.com could not be found in LDAP"
],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: nonexistent@zulip.com. Input username: nonexistent@zulip.com"
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
def ldap_invite_and_signup_as(
self, invite_as: int, streams: Sequence[str] = ["Denmark"]
) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
subdomain = "zulip"
email = "newuser@zulip.com"
password = self.ldap_password("newuser")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
# Invite user.
self.login("iago")
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: iago. Input username: iago@zulip.com"
],
)
response = self.invite(
invitee_emails="newuser@zulip.com", stream_names=streams, invite_as=invite_as
)
self.assert_json_success(response)
self.logout()
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_admin(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["REALM_ADMIN"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_guest(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["GUEST_USER"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_guest)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_streams(self) -> None:
stream_name = "Rome"
realm = get_realm("zulip")
stream = get_stream(stream_name, realm)
default_streams = get_default_streams_for_realm(realm)
default_streams_name = [stream.name for stream in default_streams]
self.assertNotIn(stream_name, default_streams_name)
# Invite user.
self.ldap_invite_and_signup_as(
PreregistrationUser.INVITE_AS["REALM_ADMIN"], streams=[stream_name]
)
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
sub = get_stream_subscriptions_for_user(user_profile).filter(recipient__type_id=stream.id)
self.assert_length(sub, 1)
def test_registration_when_name_changes_are_disabled(self) -> None:
"""
Test `name_changes_disabled` when we are not running under LDAP.
"""
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
full_name="New Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# 'New Name' comes from POST data; not from LDAP session.
self.assertEqual(user_profile.full_name, "New Name")
def test_realm_creation_through_ldap(self) -> None:
password = self.ldap_password("newuser")
email = "newuser@zulip.com"
subdomain = "zulip"
realm_name = "Zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
TERMS_OF_SERVICE=False,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
key = find_key_by_email(email)
confirmation = Confirmation.objects.get(confirmation_key=key)
prereg_user = confirmation.content_object
prereg_user.realm_creation = True
prereg_user.save()
result = self.submit_reg_form_for_user(
email,
password,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "newuser@zulip.com"], result
)
@patch(
"DNS.dnslookup",
return_value=[["sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh"]],
)
def test_registration_of_mirror_dummy_user(self, ignored: Any) -> None:
password = "test"
subdomain = "zephyr"
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, False)
result = self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"], subdomain="zephyr")
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url, subdomain="zephyr")
self.assertEqual(result.status_code, 200)
# If the mirror dummy user is already active, attempting to
# submit the registration form should raise an AssertionError
# (this is an invalid state, so it's a bug we got here):
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertTrue(
"ERROR:django.request:Internal Server Error: /accounts/register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
change_user_is_active(user_profile, False)
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user_profile.id)
@patch(
"DNS.dnslookup",
return_value=[["sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh"]],
)
def test_registration_of_active_mirror_dummy_user(self, ignored: Any) -> None:
"""
Trying to activate an already-active mirror dummy user should
raise an AssertionError.
"""
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertTrue(
"ERROR:django.request:Internal Server Error: /register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration(self) -> None:
"""Verify that /devtools/register_user creates a new user, logs them
in, and redirects to the logged-in app."""
count = UserProfile.objects.count()
email = f"user-{count}@zulip.com"
result = self.client_post("/devtools/register_user/")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assertEqual(result.status_code, 302)
self.assertEqual(user_profile.delivery_email, email)
self.assertEqual(result["Location"], "http://zulip.testserver/")
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration_create_realm(self) -> None:
count = UserProfile.objects.count()
string_id = f"realm-{count}"
result = self.client_post("/devtools/register_realm/")
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith(f"http://{string_id}.testserver/accounts/login/subdomain")
)
result = self.client_get(result["Location"], subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], f"http://{string_id}.testserver")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assert_logged_in_user_id(user_profile.id)
class DeactivateUserTest(ZulipTestCase):
def test_deactivate_user(self) -> None:
user = self.example_user("hamlet")
email = user.email
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
user = self.example_user("hamlet")
self.assertFalse(user.is_active)
password = initial_password(email)
assert password is not None
self.assert_login_failure(email, password=password)
def test_do_not_deactivate_final_owner(self) -> None:
user = self.example_user("desdemona")
user_2 = self.example_user("iago")
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only organization owner.")
user = self.example_user("desdemona")
self.assertTrue(user.is_active)
self.assertTrue(user.is_realm_owner)
do_change_user_role(user_2, UserProfile.ROLE_REALM_OWNER, acting_user=None)
self.assertTrue(user_2.is_realm_owner)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
do_change_user_role(user, UserProfile.ROLE_REALM_OWNER, acting_user=None)
def test_do_not_deactivate_final_user(self) -> None:
realm = get_realm("zulip")
for user_profile in UserProfile.objects.filter(realm=realm).exclude(
role=UserProfile.ROLE_REALM_OWNER
):
do_deactivate_user(user_profile, acting_user=None)
user = self.example_user("desdemona")
self.login_user(user)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only user.")
class TestLoginPage(ZulipTestCase):
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_alias(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_domain(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
mock_get_host.return_value = "www.testserver.com"
with self.settings(
ROOT_DOMAIN_LANDING_PAGE=True,
EXTERNAL_HOST="www.testserver.com",
ROOT_SUBDOMAIN_ALIASES=["test"],
):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_works_without_subdomains(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
mock_get_host.return_value = "testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
def test_login_page_registration_hint(self) -> None:
response = self.client_get("/login/")
self.assert_not_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
realm = get_realm("zulip")
realm.invite_required = True
realm.save(update_fields=["invite_required"])
response = self.client_get("/login/")
self.assert_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
class TestFindMyTeam(ZulipTestCase):
def test_template(self) -> None:
result = self.client_get("/accounts/find/")
self.assertIn("Find your Zulip accounts", result.content.decode("utf8"))
def test_result(self) -> None:
# We capitalize a letter in cordelia's email to test that the search is case-insensitive.
result = self.client_post(
"/accounts/find/", dict(emails="iago@zulip.com,cordeliA@zulip.com")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2CcordeliA%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn("iago@zulip.com", content)
self.assertIn("cordeliA@zulip.com", content)
from django.core.mail import outbox
# 3 = 1 + 2 -- Cordelia gets an email each for the "zulip" and "lear" realms.
self.assert_length(outbox, 3)
def test_find_team_ignore_invalid_email(self) -> None:
result = self.client_post(
"/accounts/find/", dict(emails="iago@zulip.com,invalid_email@zulip.com")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2Cinvalid_email%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn(self.example_email("iago"), content)
self.assertIn("invalid_email@", content)
from django.core.mail import outbox
self.assert_length(outbox, 1)
def test_find_team_reject_invalid_email(self) -> None:
result = self.client_post("/accounts/find/", dict(emails="invalid_string"))
self.assertEqual(result.status_code, 200)
self.assertIn(b"Enter a valid email", result.content)
from django.core.mail import outbox
self.assert_length(outbox, 0)
# Just for coverage on perhaps-unnecessary validation code.
result = self.client_get("/accounts/find/", {"emails": "invalid"})
self.assertEqual(result.status_code, 200)
def test_find_team_zero_emails(self) -> None:
data = {"emails": ""}
result = self.client_post("/accounts/find/", data)
self.assertIn("This field is required", result.content.decode("utf8"))
self.assertEqual(result.status_code, 200)
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_one_email(self) -> None:
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 1)
def test_find_team_deactivated_user(self) -> None:
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_deactivated_realm(self) -> None:
do_deactivate_realm(get_realm("zulip"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_bot_email(self) -> None:
data = {"emails": self.example_email("webhook_bot")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=webhook-bot%40zulip.com")
from django.core.mail import outbox
self.assert_length(outbox, 0)
def test_find_team_more_than_ten_emails(self) -> None:
data = {"emails": ",".join(f"hamlet-{i}@zulip.com" for i in range(11))}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 200)
self.assertIn("Please enter at most 10", result.content.decode("utf8"))
from django.core.mail import outbox
self.assert_length(outbox, 0)
class ConfirmationKeyTest(ZulipTestCase):
def test_confirmation_key(self) -> None:
request = MagicMock()
request.session = {
"confirmation_key": {"confirmation_key": "xyzzy"},
}
result = confirmation_key(request)
self.assert_json_success(result)
self.assert_in_response("xyzzy", result)
class MobileAuthOTPTest(ZulipTestCase):
def test_xor_hex_strings(self) -> None:
self.assertEqual(xor_hex_strings("1237c81ab", "18989fd12"), "0aaf57cb9")
with self.assertRaises(AssertionError):
xor_hex_strings("1", "31")
def test_is_valid_otp(self) -> None:
self.assertEqual(is_valid_otp("1234"), False)
self.assertEqual(is_valid_otp("1234abcd" * 8), True)
self.assertEqual(is_valid_otp("1234abcZ" * 8), False)
def test_ascii_to_hex(self) -> None:
self.assertEqual(ascii_to_hex("ZcdR1234"), "5a63645231323334")
self.assertEqual(hex_to_ascii("5a63645231323334"), "ZcdR1234")
def test_otp_encrypt_api_key(self) -> None:
api_key = "12ac" * 8
otp = "7be38894" * 8
result = otp_encrypt_api_key(api_key, otp)
self.assertEqual(result, "4ad1e9f7" * 8)
decryped = otp_decrypt_api_key(result, otp)
self.assertEqual(decryped, api_key)
class FollowupEmailTest(ZulipTestCase):
def test_followup_day2_email(self) -> None:
user_profile = self.example_user("hamlet")
# Test date_joined == Sunday
user_profile.date_joined = datetime.datetime(
2018, 1, 7, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Tuesday
user_profile.date_joined = datetime.datetime(
2018, 1, 2, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Thursday
user_profile.date_joined = datetime.datetime(
2018, 1, 4, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
# Test date_joined == Friday
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=3, hours=-1)
)
# Time offset of America/Phoenix is -07:00
user_profile.timezone = "America/Phoenix"
# Test date_joined == Friday in UTC, but Thursday in the user's timezone
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
class NoReplyEmailTest(ZulipTestCase):
def test_noreply_email_address(self) -> None:
self.assertTrue(
re.search(self.TOKENIZED_NOREPLY_REGEX, FromAddress.tokenized_no_reply_address())
)
with self.settings(ADD_TOKENS_TO_NOREPLY_ADDRESS=False):
self.assertEqual(FromAddress.tokenized_no_reply_address(), "noreply@testserver")
class TwoFactorAuthTest(ZulipTestCase):
@patch("two_factor.models.totp")
def test_two_factor_login(self, mock_totp: MagicMock) -> None:
token = 123456
email = self.example_email("hamlet")
password = self.ldap_password("hamlet")
user_profile = self.example_user("hamlet")
user_profile.set_password(password)
user_profile.save()
self.create_default_device(user_profile)
def totp(*args: Any, **kwargs: Any) -> int:
return token
mock_totp.side_effect = totp
with self.settings(
AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",),
TWO_FACTOR_CALL_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_SMS_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_AUTHENTICATION_ENABLED=True,
):
first_step_data = {
"username": email,
"password": password,
"two_factor_login_view-current_step": "auth",
}
with self.assertLogs("two_factor.gateways.fake", "INFO") as info_logs:
result = self.client_post("/accounts/login/", first_step_data)
self.assertEqual(
info_logs.output,
['INFO:two_factor.gateways.fake:Fake SMS to +12125550100: "Your token is: 123456"'],
)
self.assertEqual(result.status_code, 200)
second_step_data = {
"token-otp_token": str(token),
"two_factor_login_view-current_step": "token",
}
result = self.client_post("/accounts/login/", second_step_data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zulip.testserver")
# Going to login page should redirect to '/' if user is already
# logged in.
result = self.client_get("/accounts/login/")
self.assertEqual(result["Location"], "http://zulip.testserver")
class NameRestrictionsTest(ZulipTestCase):
def test_whitelisted_disposable_domains(self) -> None:
self.assertFalse(is_disposable_domain("OPayQ.com"))
class RealmRedirectTest(ZulipTestCase):
def test_realm_redirect_without_next_param(self) -> None:
result = self.client_get("/accounts/go/")
self.assert_in_success_response(["Enter your organization's Zulip URL"], result)
result = self.client_post("/accounts/go/", {"subdomain": "zephyr"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zephyr.testserver")
result = self.client_post("/accounts/go/", {"subdomain": "invalid"})
self.assert_in_success_response(["We couldn't find that Zulip organization."], result)
def test_realm_redirect_with_next_param(self) -> None:
result = self.client_get("/accounts/go/", {"next": "billing"})
self.assert_in_success_response(
["Enter your organization's Zulip URL", 'action="/accounts/go/?next=billing"'], result
)
result = self.client_post("/accounts/go/?next=billing", {"subdomain": "lear"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://lear.testserver/billing")
| {
"content_hash": "e1c730c6f191d590de7f1fe5d4f16559",
"timestamp": "",
"source": "github",
"line_count": 5362,
"max_line_length": 175,
"avg_line_length": 41.98508019395748,
"alnum_prop": 0.6206312965299124,
"repo_name": "punchagan/zulip",
"id": "08f5c2f3e901b179a61bda19b895967fd777526b",
"size": "225136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zerver/tests/test_signup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "453615"
},
{
"name": "Dockerfile",
"bytes": "4898"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "607321"
},
{
"name": "Handlebars",
"bytes": "315160"
},
{
"name": "JavaScript",
"bytes": "3572990"
},
{
"name": "Perl",
"bytes": "9884"
},
{
"name": "Puppet",
"bytes": "94991"
},
{
"name": "Python",
"bytes": "8750579"
},
{
"name": "Ruby",
"bytes": "3875"
},
{
"name": "Shell",
"bytes": "134468"
},
{
"name": "TypeScript",
"bytes": "223296"
}
],
"symlink_target": ""
} |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 7, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 12); | {
"content_hash": "fd298600bd02bfe749c00a9fa187afb6",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 160,
"avg_line_length": 37.142857142857146,
"alnum_prop": 0.7,
"repo_name": "antoinecarme/pyaf",
"id": "34ad00a7d19ed321c5c50eae52396a2fc3a64895",
"size": "260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_None/trend_Lag1Trend/cycle_7/ar_12/test_artificial_32_None_Lag1Trend_7_12_100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
import os, base64, getpass
from suds.client import Client
defaultModelOptions = {"title":"Default Title", "desc":"Default Description",
"view_state":2, "tags":"model,auto-upload",
"has_color":0, "scale":1, "markup":0}
class Shapeways(object):
def __init__(self, username, password, appID = "ShapeWays Python API"):
self.appID = appID
# Retrieve the WSDL schema
self.client = Client("http://api.shapeways.com/v1/wsdl.php")
# Login and store the sessionID
self.sessionid = self.client.service.login(username, password, self.appID)
def uploadModel(self, filepath, **options):
'''This function uploads a model file. The first argument, the path to the file, is required.
Other optional keyword arguments are:
'title': The title the model will be given in the Shapeways website
'desc': A description of the model
'view_state': 0, 1, or 2 for view-only, for sale, or hidden respectively
'tags': a comma sperated string of tags
'scale': floating point value where upload-scale * scale = meters
'file_uri: ???
'markup': a floating point markup in $'s'''
# Extract the filename from the path
filename = os.path.split(filepath)[-1]
# Extract the extension from the path
modeltype = os.path.splitext(filename)[-1][1:].upper()
# Read in the file and base64 encode it
encodedModel = base64.b64encode( open(filepath).read() )
# Make a current "SWModel" object
current_model = defaultModelOptions.copy()
current_model.update(options)
current_model.update({"file":encodedModel,"filename":filename, "file_uri":filename, "modeltype":modeltype})
print "Uploading model", filepath
print self.client.service.submitModel(session_id=self.sessionid, application_id = self.appID, model=current_model)
print "Upload complete."
def getPrinters(self):
return self.client.service.getPrinters(session_id=self.sessionid, application_id = self.appID)
if __name__ == "__main__":
un, pw = raw_input("Shapeways Username:"), getpass.getpass("Shapeways Password:")
sw = Shapeways(un, pw)
sw.uploadModel("sphere.stl")
print sw.getPrinters()
| {
"content_hash": "3374c684710bac13d838fe2f8b78d7ac",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 122,
"avg_line_length": 42.872727272727275,
"alnum_prop": 0.6361323155216285,
"repo_name": "meawoppl/shapewaysAPI",
"id": "60e4a3f8b8c1bf606036f279deca2d50f3ad8de4",
"size": "2358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Shapeways.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2358"
}
],
"symlink_target": ""
} |
"""Simple parsers for registry keys and values."""
import re
import logging
from grr.lib import artifact_lib
from grr.lib import parsers
from grr.lib import rdfvalue
from grr.lib import type_info
from grr.lib import utils
SID_RE = re.compile(r"^S-\d-\d+-(\d+-){1,14}\d+$")
class CurrentControlSetKBParser(parsers.RegistryValueParser):
"""Parser for CurrentControlSet value."""
output_types = ["RDFString"]
supported_artifacts = ["CurrentControlSet"]
def Parse(self, stat, unused_knowledge_base):
"""Parse the key currentcontrolset output."""
value = stat.registry_data.GetValue()
if not str(value).isdigit() or int(value) > 999 or int(value) < 0:
raise parsers.ParseError("Invalid value for CurrentControlSet key %s" %
value)
yield rdfvalue.RDFString("HKEY_LOCAL_MACHINE\\SYSTEM\\ControlSet%03d" %
int(value))
class WinEnvironmentParser(parsers.RegistryValueParser):
"""Parser for registry retrieved environment variables."""
output_types = ["RDFString"]
supported_artifacts = ["WinPathEnvironmentVariable",
"WinDirEnvironmentVariable", "TempEnvironmentVariable",
"AllUsersAppDataEnvironmentVariable"]
# Required for environment variable expansion
knowledgebase_dependencies = ["environ_systemdrive", "environ_systemroot"]
def Parse(self, stat, knowledge_base):
"""Parse the key currentcontrolset output."""
value = stat.registry_data.GetValue()
if not value:
raise parsers.ParseError("Invalid value for key %s" % stat.pathspec.path)
value = artifact_lib.ExpandWindowsEnvironmentVariables(value,
knowledge_base)
if value:
yield rdfvalue.RDFString(value)
class WinSystemDriveParser(parsers.RegistryValueParser):
"""Parser for SystemDrive environment variable."""
output_types = ["RDFString"]
supported_artifacts = ["SystemDriveEnvironmentVariable"]
def Parse(self, stat, _):
"""Parse the key currentcontrolset output."""
value = stat.registry_data.GetValue()
if not value:
raise parsers.ParseError("Invalid value for key %s" % stat.pathspec.path)
systemdrive = value[0:2]
if re.match(r"^[A-Za-z]:$", systemdrive):
yield rdfvalue.RDFString(systemdrive)
else:
raise parsers.ParseError(
"Bad drive letter for key %s" % stat.pathspec.path)
class WinSystemRootParser(parsers.RegistryValueParser):
"""Parser for SystemRoot environment variables."""
output_types = ["RDFString"]
supported_artifacts = ["SystemRoot"]
def Parse(self, stat, _):
value = stat.registry_data.GetValue()
if value:
yield rdfvalue.RDFString(value)
else:
raise parsers.ParseError("Invalid value for key %s" % stat.pathspec.path)
class CodepageParser(parsers.RegistryValueParser):
"""Parser for Codepage values."""
output_types = ["RDFString"]
supported_artifacts = ["WinCodePage"]
def Parse(self, stat, knowledge_base):
_ = knowledge_base
value = stat.registry_data.GetValue()
yield rdfvalue.RDFString("cp_%s" % value)
class AllUsersProfileEnvironmentVariable(parsers.RegistryParser):
"""Parser for AllUsersProfile variable.
This requires combining two registry values together and applying a default
if one or the registry values doesn't exist.
"""
output_types = ["RDFString"]
supported_artifacts = ["AllUsersProfileEnvironmentVariable"]
# Required for environment variable expansion
knowledgebase_dependencies = ["environ_systemdrive", "environ_systemroot"]
process_together = True
def ParseMultiple(self, stats, knowledge_base):
"""Parse each returned registry variable."""
prof_directory = r"%SystemDrive%\Documents and Settings"
all_users = "All Users" # Default value.
for stat in stats:
value = stat.registry_data.GetValue()
if stat.pathspec.Basename() == "ProfilesDirectory" and value:
prof_directory = value
elif stat.pathspec.Basename() == "AllUsersProfile" and value:
all_users = value
all_users_dir = r"%s\%s" % (prof_directory, all_users)
all_users_dir = artifact_lib.ExpandWindowsEnvironmentVariables(
all_users_dir, knowledge_base)
yield rdfvalue.RDFString(all_users_dir)
class WinUserSids(parsers.RegistryParser):
"""Parser for extracting SID for multiple users.
This reads a listing of the profile paths to extract a list of SIDS for
users with profiles on a system.
"""
output_types = ["KnowledgeBaseUser"]
supported_artifacts = ["WindowsRegistryProfiles"]
def Parse(self, stat, knowledge_base):
"""Parse each returned registry value."""
_ = knowledge_base # Unused.
sid_str = stat.pathspec.Dirname().Basename()
if SID_RE.match(sid_str):
kb_user = rdfvalue.KnowledgeBaseUser()
kb_user.sid = sid_str
if stat.pathspec.Basename() == "ProfileImagePath":
if stat.resident:
# Support old clients.
kb_user.homedir = utils.SmartUnicode(stat.resident)
else:
kb_user.homedir = stat.registry_data.GetValue()
kb_user.userprofile = kb_user.homedir
try:
# Assume username is the last component of the path. This is not
# robust, but other user artifacts will override it if there is a
# better match.
kb_user.username = kb_user.homedir.rsplit("\\", 1)[-1]
except IndexError:
pass
yield kb_user
class WinUserSpecialDirs(parsers.RegistryParser):
r"""Parser for extracting special folders from registry.
Keys will come from HKEY_USERS and will list the Shell Folders and user's
Environment key. We extract each subkey that matches on of our knowledge base
attributes.
Known folder GUIDs:
http://msdn.microsoft.com/en-us/library/windows/desktop/dd378457(v=vs.85).aspx
"""
output_types = ["KnowledgeBaseUser"]
supported_artifacts = ["UserShellFolders"]
process_together = True
# Required for environment variable expansion
knowledgebase_dependencies = ["environ_systemdrive", "environ_systemroot",
"users.userprofile"]
key_var_mapping = {
"Shell Folders": {
"{A520A1A4-1780-4FF6-BD18-167343C5AF16}": "localappdata_low",
"Desktop": "desktop",
"AppData": "appdata",
"Local AppData": "localappdata",
"Cookies": "cookies",
"Cache": "internet_cache",
"Recent": "recent",
"Startup": "startup",
"Personal": "personal",
},
"Environment": {
"TEMP": "temp",
},
"Volatile Environment": {
"USERDOMAIN": "userdomain",
},
}
def ParseMultiple(self, stats, knowledge_base):
"""Parse each returned registry value."""
user_dict = {}
for stat in stats:
sid_str = stat.pathspec.path.split("/", 3)[2]
if SID_RE.match(sid_str):
if sid_str not in user_dict:
user_dict[sid_str] = rdfvalue.KnowledgeBaseUser(sid=sid_str)
if stat.registry_data.GetValue():
# Look up in the mapping if we can use this entry to populate a user
# attribute, and if so, set it.
reg_key_name = stat.pathspec.Dirname().Basename()
if reg_key_name in self.key_var_mapping:
map_dict = self.key_var_mapping[reg_key_name]
reg_key = stat.pathspec.Basename()
kb_attr = map_dict.get(reg_key)
if kb_attr:
value = artifact_lib.ExpandWindowsEnvironmentVariables(
stat.registry_data.GetValue(), knowledge_base)
value = artifact_lib.ExpandWindowsUserEnvironmentVariables(
value, knowledge_base, sid=sid_str)
user_dict[sid_str].Set(kb_attr, value)
# Now yield each user we found.
return user_dict.itervalues()
class WinServicesParser(parsers.RegistryValueParser):
"""Parser for Windows services values from the registry.
See service key doco:
http://support.microsoft.com/kb/103000
"""
output_types = ["WindowsServiceInformation"]
supported_artifacts = ["WindowsServices"]
process_together = True
def __init__(self):
# The key can be "services" or "Services" on different versions of windows.
self.service_re = re.compile(
r".*HKEY_LOCAL_MACHINE/SYSTEM/[^/]+/services/([^/]+)(/(.*))?$",
re.IGNORECASE)
super(WinServicesParser, self).__init__()
def _GetServiceName(self, path):
return self.service_re.match(path).group(1)
def _GetKeyName(self, path):
return self.service_re.match(path).group(3)
def ParseMultiple(self, stats, knowledge_base):
"""Parse Service registry keys and return WindowsServiceInformation."""
_ = knowledge_base
services = {}
field_map = {"Description": "description",
"DisplayName": "display_name",
"Group": "group_name",
"DriverPackageId": "driver_package_id",
"ErrorControl": "error_control",
"ImagePath": "image_path",
"ObjectName": "object_name",
"Start": "startup_type",
"Type": "service_type",
"Parameters/ServiceDLL": "service_dll"}
for stat in stats:
# Ignore subkeys
if not stat.HasField("registry_data"):
continue
service_name = self._GetServiceName(stat.pathspec.path)
reg_key = stat.aff4path.Dirname()
service_info = rdfvalue.WindowsServiceInformation(name=service_name,
registry_key=reg_key)
services.setdefault(service_name, service_info)
key = self._GetKeyName(stat.pathspec.path)
if key in field_map:
try:
services[service_name].Set(field_map[key],
stat.registry_data.GetValue())
except type_info.TypeValueError:
# Flatten multi strings into a simple string
if stat.registry_type == rdfvalue.StatEntry.RegistryType.REG_MULTI_SZ:
services[service_name].Set(field_map[key],
utils.SmartUnicode(
stat.registry_data.GetValue()))
else:
# Log failures for everything else
# TODO(user): change this to yield a ParserAnomaly object.
dest_type = type(services[service_name].Get(field_map[key]))
logging.debug("Wrong type set for %s:%s, expected %s, got %s",
stat.pathspec.path, stat.registry_data.GetValue(),
dest_type, type(stat.registry_data.GetValue()))
return services.itervalues()
class WinTimezoneParser(parsers.RegistryValueParser):
"""Parser for TimeZoneKeyName value."""
output_types = ["RDFString"]
supported_artifacts = ["WinTimeZone"]
def Parse(self, stat, knowledge_base):
"""Convert the timezone to Olson format."""
_ = knowledge_base
value = stat.registry_data.GetValue()
result = ZONE_LIST.get(value.strip())
if not result:
yield rdfvalue.RDFString("Unknown (%s)" % value.strip())
yield rdfvalue.RDFString(result)
# Prebuilt from HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT"
# \CurrentVersion\Time Zones\
# Note that these may not be consistent across Windows versions so may need
# adjustment in the future.
ZONE_LIST = {
"IndiaStandardTime": "Asia/Kolkata",
"EasternStandardTime": "EST5EDT",
"EasternDaylightTime": "EST5EDT",
"MountainStandardTime": "MST7MDT",
"MountainDaylightTime": "MST7MDT",
"PacificStandardTime": "PST8PDT",
"PacificDaylightTime": "PST8PDT",
"CentralStandardTime": "CST6CDT",
"CentralDaylightTime": "CST6CDT",
"SamoaStandardTime": "US/Samoa",
"HawaiianStandardTime": "US/Hawaii",
"AlaskanStandardTime": "US/Alaska",
"MexicoStandardTime2": "MST7MDT",
"USMountainStandardTime": "MST7MDT",
"CanadaCentralStandardTime": "CST6CDT",
"MexicoStandardTime": "CST6CDT",
"CentralAmericaStandardTime": "CST6CDT",
"USEasternStandardTime": "EST5EDT",
"SAPacificStandardTime": "EST5EDT",
"MalayPeninsulaStandardTime": "Asia/Kuching",
"PacificSAStandardTime": "Canada/Atlantic",
"AtlanticStandardTime": "Canada/Atlantic",
"SAWesternStandardTime": "Canada/Atlantic",
"NewfoundlandStandardTime": "Canada/Newfoundland",
"AzoresStandardTime": "Atlantic/Azores",
"CapeVerdeStandardTime": "Atlantic/Azores",
"GMTStandardTime": "GMT",
"GreenwichStandardTime": "GMT",
"W.CentralAfricaStandardTime": "Europe/Belgrade",
"W.EuropeStandardTime": "Europe/Belgrade",
"CentralEuropeStandardTime": "Europe/Belgrade",
"RomanceStandardTime": "Europe/Belgrade",
"CentralEuropeanStandardTime": "Europe/Belgrade",
"E.EuropeStandardTime": "Egypt",
"SouthAfricaStandardTime": "Egypt",
"IsraelStandardTime": "Egypt",
"EgyptStandardTime": "Egypt",
"NorthAsiaEastStandardTime": "Asia/Bangkok",
"SingaporeStandardTime": "Asia/Bangkok",
"ChinaStandardTime": "Asia/Bangkok",
"W.AustraliaStandardTime": "Australia/Perth",
"TaipeiStandardTime": "Asia/Bangkok",
"TokyoStandardTime": "Asia/Tokyo",
"KoreaStandardTime": "Asia/Seoul",
"@tzres.dll,-10": "Atlantic/Azores",
"@tzres.dll,-11": "Atlantic/Azores",
"@tzres.dll,-12": "Atlantic/Azores",
"@tzres.dll,-20": "Atlantic/Cape_Verde",
"@tzres.dll,-21": "Atlantic/Cape_Verde",
"@tzres.dll,-22": "Atlantic/Cape_Verde",
"@tzres.dll,-40": "Brazil/East",
"@tzres.dll,-41": "Brazil/East",
"@tzres.dll,-42": "Brazil/East",
"@tzres.dll,-70": "Canada/Newfoundland",
"@tzres.dll,-71": "Canada/Newfoundland",
"@tzres.dll,-72": "Canada/Newfoundland",
"@tzres.dll,-80": "Canada/Atlantic",
"@tzres.dll,-81": "Canada/Atlantic",
"@tzres.dll,-82": "Canada/Atlantic",
"@tzres.dll,-104": "America/Cuiaba",
"@tzres.dll,-105": "America/Cuiaba",
"@tzres.dll,-110": "EST5EDT",
"@tzres.dll,-111": "EST5EDT",
"@tzres.dll,-112": "EST5EDT",
"@tzres.dll,-120": "EST5EDT",
"@tzres.dll,-121": "EST5EDT",
"@tzres.dll,-122": "EST5EDT",
"@tzres.dll,-130": "EST5EDT",
"@tzres.dll,-131": "EST5EDT",
"@tzres.dll,-132": "EST5EDT",
"@tzres.dll,-140": "CST6CDT",
"@tzres.dll,-141": "CST6CDT",
"@tzres.dll,-142": "CST6CDT",
"@tzres.dll,-150": "America/Guatemala",
"@tzres.dll,-151": "America/Guatemala",
"@tzres.dll,-152": "America/Guatemala",
"@tzres.dll,-160": "CST6CDT",
"@tzres.dll,-161": "CST6CDT",
"@tzres.dll,-162": "CST6CDT",
"@tzres.dll,-170": "America/Mexico_City",
"@tzres.dll,-171": "America/Mexico_City",
"@tzres.dll,-172": "America/Mexico_City",
"@tzres.dll,-180": "MST7MDT",
"@tzres.dll,-181": "MST7MDT",
"@tzres.dll,-182": "MST7MDT",
"@tzres.dll,-190": "MST7MDT",
"@tzres.dll,-191": "MST7MDT",
"@tzres.dll,-192": "MST7MDT",
"@tzres.dll,-200": "MST7MDT",
"@tzres.dll,-201": "MST7MDT",
"@tzres.dll,-202": "MST7MDT",
"@tzres.dll,-210": "PST8PDT",
"@tzres.dll,-211": "PST8PDT",
"@tzres.dll,-212": "PST8PDT",
"@tzres.dll,-220": "US/Alaska",
"@tzres.dll,-221": "US/Alaska",
"@tzres.dll,-222": "US/Alaska",
"@tzres.dll,-230": "US/Hawaii",
"@tzres.dll,-231": "US/Hawaii",
"@tzres.dll,-232": "US/Hawaii",
"@tzres.dll,-260": "GMT",
"@tzres.dll,-261": "GMT",
"@tzres.dll,-262": "GMT",
"@tzres.dll,-271": "UTC",
"@tzres.dll,-272": "UTC",
"@tzres.dll,-280": "Europe/Budapest",
"@tzres.dll,-281": "Europe/Budapest",
"@tzres.dll,-282": "Europe/Budapest",
"@tzres.dll,-290": "Europe/Warsaw",
"@tzres.dll,-291": "Europe/Warsaw",
"@tzres.dll,-292": "Europe/Warsaw",
"@tzres.dll,-331": "Europe/Nicosia",
"@tzres.dll,-332": "Europe/Nicosia",
"@tzres.dll,-340": "Africa/Cairo",
"@tzres.dll,-341": "Africa/Cairo",
"@tzres.dll,-342": "Africa/Cairo",
"@tzres.dll,-350": "Europe/Sofia",
"@tzres.dll,-351": "Europe/Sofia",
"@tzres.dll,-352": "Europe/Sofia",
"@tzres.dll,-365": "Egypt",
"@tzres.dll,-390": "Asia/Kuwait",
"@tzres.dll,-391": "Asia/Kuwait",
"@tzres.dll,-392": "Asia/Kuwait",
"@tzres.dll,-400": "Asia/Baghdad",
"@tzres.dll,-401": "Asia/Baghdad",
"@tzres.dll,-402": "Asia/Baghdad",
"@tzres.dll,-410": "Africa/Nairobi",
"@tzres.dll,-411": "Africa/Nairobi",
"@tzres.dll,-412": "Africa/Nairobi",
"@tzres.dll,-434": "Asia/Tbilisi",
"@tzres.dll,-435": "Asia/Tbilisi",
"@tzres.dll,-440": "Asia/Muscat",
"@tzres.dll,-441": "Asia/Muscat",
"@tzres.dll,-442": "Asia/Muscat",
"@tzres.dll,-447": "Asia/Baku",
"@tzres.dll,-448": "Asia/Baku",
"@tzres.dll,-449": "Asia/Baku",
"@tzres.dll,-450": "Asia/Yerevan",
"@tzres.dll,-451": "Asia/Yerevan",
"@tzres.dll,-452": "Asia/Yerevan",
"@tzres.dll,-460": "Asia/Kabul",
"@tzres.dll,-461": "Asia/Kabul",
"@tzres.dll,-462": "Asia/Kabul",
"@tzres.dll,-471": "Asia/Yekaterinburg",
"@tzres.dll,-472": "Asia/Yekaterinburg",
"@tzres.dll,-511": "Asia/Aqtau",
"@tzres.dll,-512": "Asia/Aqtau",
"@tzres.dll,-570": "Asia/Chongqing",
"@tzres.dll,-571": "Asia/Chongqing",
"@tzres.dll,-572": "Asia/Chongqing",
"@tzres.dll,-650": "Australia/Darwin",
"@tzres.dll,-651": "Australia/Darwin",
"@tzres.dll,-652": "Australia/Darwin",
"@tzres.dll,-660": "Australia/Adelaide",
"@tzres.dll,-661": "Australia/Adelaide",
"@tzres.dll,-662": "Australia/Adelaide",
"@tzres.dll,-670": "Australia/Sydney",
"@tzres.dll,-671": "Australia/Sydney",
"@tzres.dll,-672": "Australia/Sydney",
"@tzres.dll,-680": "Australia/Brisbane",
"@tzres.dll,-681": "Australia/Brisbane",
"@tzres.dll,-682": "Australia/Brisbane",
"@tzres.dll,-721": "Pacific/Port_Moresby",
"@tzres.dll,-722": "Pacific/Port_Moresby",
"@tzres.dll,-731": "Pacific/Fiji",
"@tzres.dll,-732": "Pacific/Fiji",
"@tzres.dll,-840": "America/Argentina/Buenos_Aires",
"@tzres.dll,-841": "America/Argentina/Buenos_Aires",
"@tzres.dll,-842": "America/Argentina/Buenos_Aires",
"@tzres.dll,-880": "UTC",
"@tzres.dll,-930": "UTC",
"@tzres.dll,-931": "UTC",
"@tzres.dll,-932": "UTC",
"@tzres.dll,-1010": "Asia/Aqtau",
"@tzres.dll,-1020": "Asia/Dhaka",
"@tzres.dll,-1021": "Asia/Dhaka",
"@tzres.dll,-1022": "Asia/Dhaka",
"@tzres.dll,-1070": "Asia/Tbilisi",
"@tzres.dll,-1120": "America/Cuiaba",
"@tzres.dll,-1140": "Pacific/Fiji",
"@tzres.dll,-1460": "Pacific/Port_Moresby",
"@tzres.dll,-1530": "Asia/Yekaterinburg",
"@tzres.dll,-1630": "Europe/Nicosia",
"@tzres.dll,-1660": "America/Bahia",
"@tzres.dll,-1661": "America/Bahia",
"@tzres.dll,-1662": "America/Bahia",
"Central Standard Time": "CST6CDT",
"Pacific Standard Time": "PST8PDT",
}
| {
"content_hash": "134a44582d76aab3dee83baaa1fba0b7",
"timestamp": "",
"source": "github",
"line_count": 517,
"max_line_length": 80,
"avg_line_length": 36.684719535783366,
"alnum_prop": 0.6269640409153222,
"repo_name": "ksmaheshkumar/grr",
"id": "31643a8c4e82da621e86c69e342dd55e6a1192bd",
"size": "18988",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "parsers/windows_registry_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "14993"
},
{
"name": "C",
"bytes": "9062"
},
{
"name": "C++",
"bytes": "55149"
},
{
"name": "CSS",
"bytes": "12047"
},
{
"name": "Groff",
"bytes": "444"
},
{
"name": "HTML",
"bytes": "48624"
},
{
"name": "JavaScript",
"bytes": "230351"
},
{
"name": "Makefile",
"bytes": "5863"
},
{
"name": "Protocol Buffer",
"bytes": "181723"
},
{
"name": "Python",
"bytes": "4855590"
},
{
"name": "Ruby",
"bytes": "4931"
},
{
"name": "Shell",
"bytes": "45459"
}
],
"symlink_target": ""
} |
import sys
import os
import time
#variables for alphabetic characters
a_ = 65
z_ = 90
A_ = 97
Z_ = 122
#variables for accessing word_analysis list
WORDCOUNT = 0
LINENUMBERS = 1
IWORDINTEXT = 2
ICHARONLINE = 3
#variables for user input options in text_step and word_step functions
ENTER = ''
W_NEXT_INST = '>'
W_PREV_INST = '<'
INSTRUCTIONS = 'qa'
YES = NEXT_LINE = 1
NO = QUIT = FIRST = 0
NOMOVE = -1
"""
Karl Toby Rosenberg
Text Analyzer (word counts, number of words between instances of a given word)
and Basic Text Viewer
ver 3.0
(Currently version 3 works with texts that have only ASCII characters
as a result of the way it tracks positions in the file.
I believe that I have correctly implemented the way to track unicode character positions as well, but
this needs more testing.
-Dictionary and Word Frequency
-Step-through-text-viewer:
Steps through text x lines at a time (1 by default), displays line number
jumps to specific lines,
skips to specific instances of a chosen word
at/after or at/before current line position (forwards and backwards)
displays the word instance's position/index in the text,
displays number of words skipped (forwards or backwards)
If end of file reached
(either through a line skip or through an attempt to step forward after
the last instance of a word has already been reached),
exits (prompts to enter a new word to "track"),
If no instance of the chosen word found before the current line,
moves directly to the first instance
possible not to remove extra punctuation (less accurate)
possible to remove specified (trivial words)
mood/gender word counts possible
to-do: implement way for the user to specify the
mood/gender/trivial words and replace the default placeholder lists
current version June 29, 2016
"""
#############################
"""
binary_min_linea_bove_search
given a starting line number and a list of valid line numbers,
finds and returns the index of the nearest line number greater or equal to the starting line
returns -1 if there is no such valid line in the correct range
"""
def binary_min_line_above_search(line_numbers, low, high, starting_line):
mid = 0
index_first_valid_line = high
if line_numbers[index_first_valid_line] == starting_line:
return index_first_valid_line
while low <= high:
mid = (low + high)//2
test_line = line_numbers[mid]
if test_line == starting_line:
return mid
elif test_line < starting_line:
low = mid + 1
else: #if test_line > starting_line
if line_numbers[index_first_valid_line] >= test_line and mid < index_first_valid_line:
index_first_valid_line = mid
high = mid - 1
if low == high:
return index_first_valid_line
if line_numbers[index_first_valid_line] < starting_line:
return -1
return index_first_valid_line
"""
binary_max_line_below_search
given a starting line number and a list of valid line numbers,
finds and returns the index of the nearest line number less than or equal to the starting line
returns -1 if there is no such valid line in the correct range
"""
def binary_max_line_below_search(line_numbers, low, high, starting_line):
mid = 0
index_first_valid_line = low
if line_numbers[index_first_valid_line] == starting_line:
return index_first_valid_line
while low <= high:
mid = (low + high)//2
test_line = line_numbers[mid]
if test_line == starting_line:
return mid
elif test_line > starting_line:
high = mid - 1
else: #if test_line < starting_line
if line_numbers[index_first_valid_line] <= test_line and mid > index_first_valid_line:
index_first_valid_line = mid
low = mid + 1
if low == high:
return index_first_valid_line
if line_numbers[index_first_valid_line] > starting_line:
return -1
return index_first_valid_line
"""
clean_word
returns string with
all non-alphabetical characters from given string (word) omitted
params: string word
return: string cleaned
"""
def clean_word(word):
cleaned = []
cmp = 0
for char in word:
cmp = ord(char)
if (cmp >= a_ and cmp <= z_) or (cmp >= A_ and cmp <= Z_):
cleaned.append(char)
return ''.join(cleaned)
"""
is_valid_char
checks whether a given character is alphabetical or a valid non-alphabetical character,
returns True if valid, else returns False
"""
def is_valid_char(char, in_word_punct):
val = ord(char)
if (val >= a_ and val <= z_) or (val >= A_ and val <= Z_) or char in in_word_punct:
return True
return False
"""
print_instructions
displays the commands for text_step and word_step functions
"""
def print_instructions():
print("TEXT STEP COMMANDS:\n\
-enter a number n to step by n lines\n\
-a number -n to skip to a specific line number\n\
-the < and > character keys to skip to\n\
the previous and next instance of a word\n\
-qa to display the commands again\n\
-0 to quit\n")
"""
word_step
skips to instances of the chosen word within the text,
displays number of words skipped with each movement,
displays position of each word instance with respect to the "list" of all
words in the text
enter '<' or '>' to skip to the previous or next instance of the chosen word
param:
file,
the file object representing the text to read
list (int) line_start_pos
contains the index of the starting character of each
line with respect to the entire text
list, word_analysis:
information pertaining to a specific word in the text:
word_analysis[0]:
int (number of instances of the given word in the text)
word_analysis[1]:
list of int (for each instance of the given word,
stores--in order--the line numbers on which the word occurred)
word_analysis[2]:
list of int (understand the entire text as a list of words, where word i is the ith word in the text,
this list stores the word index i for each instance of the given word
word_analysis[3]:
list of int (understand the entire text as a list of strings where each string is a line in the text with indices 0-length_of_line-1,
this list stores the index of the first character of the given word for each instance of the word, with respect to its line. Use this
list with word_analysis[1])
word_analysis = [
1,
[line_count-1],
[word_i],
[pos_on_line]
]
optional param:
string, choice:
for now word_step is entered only from text_step when the '<' or '>' command
(to step to the previous or the next instance) is entered, but the default choice is now '>'
"""
def word_step(file, line_start_pos, word_analysis, starting_line, choice='>'):
line_nums = word_analysis[LINENUMBERS]
word_i = word_analysis[IWORDINTEXT]
pos_on_line = word_analysis[ICHARONLINE]
#track current line
current_line = starting_line
#track ith instance of word
w_inst_index = 0
#number of word instances
num_word_inst = len(word_i)
#total lines in text
total_lines = len(line_start_pos) - 1
"""
find first instance of word at/after or at/before starting line
"""
#store result of searches (index of a desired word instance)
found = -1
#if the starting line is not the first line and the command is to find the next word instance
if choice == W_NEXT_INST:
if starting_line > 1:
#binary search for the index of the first valid line at or after starting_line
found = binary_min_line_above_search(line_nums, 0, len(line_nums) - 1, starting_line)
#return (0, 0) if the end of the file has been reached (no more instances later in the text) to exit
if found == -1:
print("End of file reached at L" + str(total_lines) + '\n')
return 0, 0
else:
current_line = line_nums[0]
#if the command is to find the previous word instance
elif choice == W_PREV_INST:
if starting_line > 1:
#binary search for the index of the first valid line at or below starting_line
found = binary_max_line_below_search(line_nums, 0, len(line_nums) - 1, starting_line)
#if no earlier word instance is found, move to the first one in the text
if found == -1:
print("no instance earlier, starting at first instance\n")
current_line = line_nums[0]
else:
current_line = line_nums[0]
#set the current word instance index and set the current line to be the instance's line
if found >= 0:
#set the word and line start positions to the beginning of the line holding the word instance
w_inst_index = found
current_line = line_nums[w_inst_index]
################
#True if the latest command is valid
legal_command = True
#command
choice = ''
#exit from the loop when an attempt is made
#to move to move beyond the final instance of the word
#(considered the end of the text in word_step)
while w_inst_index < num_word_inst:
#move to correct position in file
file.seek(line_start_pos[current_line - 1], 0)
#print the current line
if current_line + 1 < total_lines:
print(file.readline(), end='')
else:
line = file.readline()
print(line, end='')
if line[len(line) - 1] != '\n':
print('\n', end='')
#display the marker for the current instance of the word, display number of words between current
#and previous instance of the word
if legal_command:
#display the word marker (preceded by proper number of spaces) under the current text line
print(' '*(pos_on_line[w_inst_index]) + '^- w ' + str(word_i[w_inst_index]))
#display the number of words between the current word instance and the previous word instance reached
if choice == W_NEXT_INST:
print("words skipped forwards: " + str(word_i[w_inst_index] - word_i[w_inst_index - 1] - 1))
elif choice == W_PREV_INST:
print("words skipped backwards: " + str(word_i[w_inst_index + 1] - word_i[w_inst_index] - 1))
elif choice == NOMOVE:
print("First instance reached")
legal_command = True
#display current line number
choice = input("L" + str(current_line) + ">> ")
print()
"""
CHECK COMMANDS
"""
#move to next word instance
if choice == W_NEXT_INST:
#if the next word instance index equals
#the number of word instances in the text,
#then the end of the text has been reached, break from loop
if w_inst_index + 1 == num_word_inst:
break
else:
#increment the word instance index
w_inst_index += 1
#move to the next line
current_line = line_nums[w_inst_index]
#move to previous word instance
elif choice == W_PREV_INST:
#if not at the first instance of the word,
#decrement the word instance index
if w_inst_index > 0:
w_inst_index -= 1
#move to the next line
current_line = line_nums[w_inst_index]
#otherwise if the first instance of the word has already been reached,
#reset word index and line start positions to beginning of current line
else:
#dummy command
choice = NOMOVE
#enter, exit word_step and proceed to the next line
elif choice == ENTER:
#return a step of 1 (move to next line) and the current line number
return 1, current_line
#display instructions
elif choice == INSTRUCTIONS:
print_instructions()
else:
#if the command is a valid integer,
#return a step of int(choice), print (choice) lines
try:
return int(choice), current_line
#if exception, the command is illegal,
#continue and prompt for input again
except:
legal_command = False
continue
#if the end of the file has been reached, return 0 0 to text_step (0 is the command to exist text_step)
print("End of file reached at L" + str(total_lines) + '\n')
return 0, 0
"""
text_step
step-through lines in the text,
enter a positive number n to display and step forward by n lines
enter a negative number -n to skip to line number |-n|
enter '<' or '>' to skip to the previous or next instance of the chosen word (see word_step() )
(whose word_analysis list is passed to text_step() )
enter "qa" to display the instructions
enter 0 to exit
param:
file,
the file object representing the file to read
list (int) line_start_pos
contains the index of the starting character of each
line with respect to the entire text
list, word_analysis:
information pertaining to a specific word in the text:
word_analysis[0]:
int (number of instances of the given word in the text)
word_analysis[1]:
list of int (for each instance of the given word,
stores--in order--the line numbers on which the word occurred)
word_analysis[2]:
list of int (understand the entire text as a list of words, where word i is the ith word in the text,
this list stores the word index i for each instance of the given word
word_analysis[3]:
list of int (understand the entire text as a list of strings where each string is a line in the text with indices 0-length_of_line-1,
this list stores the index of the first character of the given word for each instance of the word, with respect to its line. Use this
list with word_analysis[1])
word_analysis = [
1,
[line_count-1],
[word_i],
[pos_on_line]
]
"""
def text_step(file, line_start_pos, word_analysis):
#lines displayed in a row
cur_step = 0
#maximum number of steps in a row / alternate command option
step = 1
#line position in text file
line_pos = 0
line_nums = word_analysis[1]
w_inst = word_analysis[2]
pos_on_line = word_analysis[3]
#current line number (displayed)
current_line_index = 0
#display the instructions upon first call of function
if text_step.first_time:
print_instructions()
text_step.first_time = False
total_lines = len(line_start_pos) - 1
#accept commands until the end of the text has been reached
while current_line_index < total_lines:
#move to correct position in file
file.seek(line_start_pos[current_line_index], 0)
"""
testi = 0
while True:
file.seek(line_start_pos[testi], 0)
print(file.read(line_start_pos[testi + 1] - line_start_pos[testi]), end='')
testi += 1
time.sleep(1)
"""
#print the current line
if current_line_index + 1 < total_lines:
print(file.readline(), end='')
else:
line = file.readline()
print(line, end='')
if line[len(line) - 1] != '\n':
print('\n', end='')
#increment the number of lines that have been displayed in a row
cur_step += 1
#increment the line number
current_line_index +=1
#continue to print the next line if there are more lines to display in a row
if cur_step < step:
continue
#otherwise CHECK COMMANDS
else:
#wrap the command prompt and associated checks with a try/except loop to handle illegal commands
while True:
try:
#display the current line number, prompt for the next command
step = input("L" + str(current_line_index) + ">> ")
#reset the lines-displayed-in-a-row counter
cur_step = 0
#move to the next or previous instance of a word
if step == W_NEXT_INST or step == W_PREV_INST:
##########TRY/EXCEPT, enter and exit with return value printouts
try:
#print("ENTERING WORD_STEP")
#call word_step to handle movement to specific instances of words,
#returns a tuple (command, line_number) so text_step can update the current line
#and try the next command
step = word_step(file, line_start_pos, word_analysis, current_line_index, step)
current_line_index = step[1]
#print("EXITING WORD_STEP with current_line = ", current_line_index, "return value = ", step)
except:
print("WORD STEP FAILED")
##########
step = step[0]
#enter, move to the next line and print it
elif step == ENTER:
step = 1
break
#display the instructions
elif step == INSTRUCTIONS:
print_instructions()
continue
#otherwise interpret the command as an integer
else:
step = int(step)
#if the command is a positive number,
#interpret it as the number of lines to print in succession
if step > 0:
break
#if the command is a negative number,
#interpret it as a command to jump to a specific line number |step|
elif step < 0:
current_line_index = -1*(step)-1
step = 1
break
#if the command is 0, quit with a return value of 0
else:
return QUIT
#upon an exception, continue the loop and prompt for a new command
except:
print("ERROR")
continue
#before returning from the function, display the final line number if the end of the final has been reached
print("End of file reached at L" + str(total_lines) + '\n')
#function attribute,
#True if function call is the first one of the current session
text_step.first_time = True
"""
calc_w_analysis
calculates word frequencies given a text string,
can find additional (optional) information, ignore trivial words, ignore words above a certain length,
other possibilities are a work-in-progress
param: file text_file (text)
oother param: pass 1 or 0 to specify option, or specify a list of words for a list parameter
clean: clean text/words
max_len: omit words of length greater than max_len
trivial: omit trivial words
trivial_list: specifies a list of trivial words
gender: count words with male or female qualities,
stores another dictionary in list of dictionaries returned from function,
contains counts and percentages for male and female,
gender_terms: specifies a list of gender words
mood: count words with happy or sad qualities
stores another dictionary in list of dictionaries returned from function,
contains counts and percentages for happy and sad
mood_terms: specifies a list of happy or sad words
in_word_punct: list of acceptable in-word punctuation
return:
dictionary analysis_dict of word_analysis dictionary and optional dictionaries
(access with analysis_dict["word analysis"]
list, word_analysis: (access with analysis_dict["word analysis"])
information pertaining to a specific word in the text:
word_analysis[0]:
int (number of instances of the given word in the text)
word_analysis[1]:
list of int (for each instance of the given word,
stores--in order--the line numbers on which the word occurred)
word_analysis[2]:
list of int (understand the entire text as a list of words, where word i is the ith word in the text,
this list stores the word index i for each instance of the given word
word_analysis[3]:
list of int (understand the entire text as a list of strings where each string is a line in the text with indices 0-length_of_line-1,
this list stores the index of the first character of the given word for each instance of the word, with respect to its line. Use this
list with word_analysis[1])
word_analysis : [
1,
[line_count-1],
[word_i],
[pos_on_line]
]
UNUSED/UNCALCULATED:
word_analysis[4]:
list of int (understand the entire text as a single string with indices 0-length_of_text-1,
this list stores the index of the first character of the given word for each instance of the word)
list, text_as_lines: (access with analysis_dict["text as lines"])
the entire input text divided into lines,
where line i is stored in text_as_lines[i-1]
word list: (access with analysis_dict["word list"]
access list of words with analysis_dict[1]
other work-in-progress options:
gender: (access with analysis_dict["gender stat"]
access counts with [m] and [f]
access percentages with [%_m] and [%_f]
access percent of words identifiable as masculine or feminine with [%_indentifiable]
mood: (access with analysis_dict["mood stat"])
access counts with [:D] and [D:]
access percentages with [%_:D] and [%_D:]
access percent of words identifiable as happy or sad with [%_indentifiable]
encoding: the character encoding to use
"""
def calc_w_analysis(
text_file,
clean=0,
max_len=0,
trivial=1, trivial_list=[],
gender=0, gender_terms=[],
mood=0, mood_terms=[],
in_word_punct=["'", '-', u"’"],
eq_words={"can't":["can", "not"], "cannot":["can", "not"], "won't":["will", "not"], "shouldn't":["should", "not"]},
encoding='utf-8'
):
#dictionary of lists and dictionaries to return
analysis_dict = {}
#word analysis dictionary of word count and lists (variables declared at top of file simplify access for user)
word_analysis = {}
#word list
word_list = []
#dictionary of gender word counts (-1 counts if unused)
gender_stat = {'m':-1, 'f':-1}
#dictionary of mood stat counts (-1 counts if unused)
mood_stat = {':D':-1, 'D:':-1}
#save reference to word_list.append
word_list_append = word_list.append
#save reference to str.lower()
lower_ = str.lower
#save reference to str.isalpha()
isalpha_ = str.isalpha
#create a new list to store each character to be combined into a word
new_word = []
#save reference to new_word.append
new_word_append_ = new_word.append
#create a new list to store each character to be combined into a line in the text
chars_in_this_line = []
chars_in_this_line_append_ = chars_in_this_line.append
#store the index of the start of each line in the text (0 for the beginning of the file)
line_start_pos = [0]
line_start_pos_ = line_start_pos.append
#given text, create a word frequency dictionary of words in all_text stripped of invalid punctuation,
#records word positions, line positions, number of words between instances of a given word
#for use with text_step and word_step
if clean:
#track the number of characters reached so far with respect to the current line
char_count_line = -1
#counter tracks whether multiple punctuation marks appear in a row,
#used to allow words with interior punctuation (e.g. hyphen: good-natured)
#but does not allow words with multiple punctuation or non-alphabetical characters in a row
double_punct = 0
#marks a word as alphabetical
has_alpha = False
#save a puffer of punctuation marks to allow for in-word punctuation
#without adding punctuation immediately after the word
punct_buffer = []
#save reference to punct_buffer.append
punct_buffer_append_ = punct_buffer.append
#count the line number according to '\n' characters in text
line_count = 1
#count the number of words found
word_i = 0
#word start index with respect to lines
pos_on_line = 0
#index with respect to text
pos_in_text = -1
ch_encode_ = str.encode
in_bytes = 1
#read the first line
line = text_file.readline()
#while the file has a line to read, character-by-character
while line:
#iterate through each character in the line
for c in range(0, len(line)):
char = line[c]
pos_in_text += in_bytes
char_count_line += in_bytes
in_bytes = len(ch_encode_(char, encoding))
if char == '\r':
continue
if char == '\n':
#reset the number of characters reached with respect to the line
char_count_line = -1
#increment the line count
line_count += 1
#record the starting position of the next line
line_start_pos_(pos_in_text + 1)
#if the char is not alphabetic,
#continue to the next character if the current word under construction
#has no alphabetic characters (words must begin with an alphabetic character)
if has_alpha == False and isalpha_(char) == False:
continue
#treat alphabetic characters
if isalpha_(char):
#if the current word under construction
#has no alphabetical characters so far (is empty),
#mark the starting position of the word, mark the word as alphabetic
if has_alpha == False:
pos_on_line = char_count_line
has_alpha = True
#if characters are waiting in the punctuation buffer,
#first append them to the word under construction, then clear the buffer
if len(punct_buffer) > 0:
new_word_append_(''.join(punct_buffer))
del punct_buffer[:]
#append the current alphabetic character to the word under construction
new_word_append_(lower_(char))
#reset the punctuation-in-a-row counter to 0 since the alphabetic character ends the streak
double_punct = 0
#treat valid punctuation/characters
elif char in in_word_punct:
#if the punctuation-in-a-row counter is 0,
#append the current punctuation/valid non-alphabetic mark to the punctuation buffer
#and increment the punctuation-in-a-row counter
#-punctuation is not added immediately in case, for example,
#the current character is a hyphen, which can be safely added in the middle of a word,
#but cannot be added at the end of one.
#The hyphen is not added to the end of a word, as the word is considered complete before it can be (incorrectly) added.
if double_punct == 0:
punct_buffer_append_(char)
double_punct += 1
#the current word has been completed if:
#the punctuation-in-a-row counter is set to 2 (words cannot have multiple punctuation marks in a row)
#or the character is not alphabetic or an otherwise valid punctuation mark or character
if double_punct == 2 or is_valid_char(char, in_word_punct) == False:
#clear the punctuation buffer
del punct_buffer[:]
#reset the punctuation-in-a-row count
double_punct = 0
#reset has_alpha to prepare for the next round of valid word-checking
has_alpha = False
#(an additional check) make sure that the new word has a valid length
if len(new_word) > 0:
#a new word has been completed, increment the word counter
word_i += 1
#saved the characters in new_word as a joined_word
joined_word = ''.join(new_word)
#if the new word has not been added to the dictionary and the word is alphabetical,
#add an entry for the word in the word list and in the dictionary with a count of 1
if joined_word not in word_analysis:
#integer representing the total word count for the given word,
#list of line numbers on which the word appears,
#list of the positions of each instance of the word with respect to the list of words in the entire text
#list of the positions of the first char for each instance of the word with respect to the entire text,
#list of the positions of the first char for each instance of the word with respect to the current line in the text,
#add an entry for the joined_word
if char == 'r' or char == '\n':
#if the current character is a new-line character, the line-count is off by +1
word_analysis[joined_word] = [
1,
[line_count-1],
[word_i],
[pos_on_line]#,
#[pos_in_text]
]
else:
word_analysis[joined_word] = [
1,
[line_count],
[word_i],
[pos_on_line]#,
#[pos_in_text]
]
#add new word to word list
word_list_append(joined_word)
#else if the new word has already been added to the dictionary,
#increment the frequency count and other information for that word
else:
#access the in-progress word data
word_data = word_analysis[joined_word]
#increment the word frequency count
word_data[WORDCOUNT] += 1
#append the next valid line number
if char == 'r' or char == '\n':
word_data[LINENUMBERS].append(line_count-1)
else:
word_data[LINENUMBERS].append(line_count)
#append the ith word value for the current instance of the word
word_data[IWORDINTEXT].append(word_i)
#append the starting position/index of the current word instance with respect to the current line
word_data[ICHARONLINE].append(pos_on_line)
#UNUSED
#append the starting position/index of the current word instance with respect to the whole text
#word_data[ICHARINTEXT].append(pos_in_text)
#reset the word string
del new_word[:]
#read a line from the text
line = text_file.readline()
#append the first position beyond the end of the text
line_start_pos_(pos_in_text + in_bytes)
#print(word_analysis)
#print('cleaned\n')
#else create a word frequency dictionary of words in all_text including punctuation
else:
#hasalpha_w is true if the current word under construction has an alphabetical character
#this prevents non-words such as a hyphen '-' from being recognized as words
hasalpha_w = False
#save reference to str.isspace()
_is_space = str.isspace
#iterate through each character in the input text
for char in all_text:
#if the current word under construction has no alphabetical characters
#and the character is alphabetical, set hasalpha_w to True
if hasalpha_w == False and isalpha_(char):
hasalpha_w = True
#if the character has no whitespace and is not the empty string,
#add the character to the word under construction
if _is_space(char) == False and char != '':
new_word_append_(lower_(char))
#else check whether the current string is a completed word
else:
#check the current string only if it has at least one alphabetical character
if hasalpha_w:
joined_word = ''.join(new_word)
#if the new word has not been added to the dictionary,
#add an entry for the word in the word list and in the dictionary with a count of 1
if joined_word not in word_analysis:
word_list_append(joined_word)
word_analysis[joined_word] = [1]
#else if the new word has already been added to the dictionary,
#increment the frequency count for that word
elif joined_word in word_analysis:
word_analysis[joined_word][0] += 1
#reset the word string
del new_word[:]
hasalpha_w = False
#print(word_analysis)
#print('not cleaned\n')
####################################
#if no words, quit
if len(word_analysis) == 0:
return analysis_dict
#if a maximum word length is set,
#overwrite the word_analysis dictionary with a dictionary that
#omits words of length greater than max_len
if max_len > 0:
temp_dict = {}
#iterate through the words and copy only entries of valid length
for key in word_analysis:
if len(lower_(key)) <= max_len:
temp_dict[key] = word_analysis[key]
#overwrite the word_analysis dictionary
word_analysis = temp_dict
#print(word_analysis)
#print('maxlen-ed\n')
#if trivial words are to be omitted
#overwrite the word_analysis dictionary with a dictionary that
#omits trivial words, where trivial words are defined in the input list trivial_list
#(or the default list if trivial_list is empty)
if trivial == 0:
if len(trivial_list) == 0:
trivial_list = ['a', 'an', 'the', 'it', 'its', "it's", 'is', 'I', 'you', 'he', 'she', 'we', 'our']
temp_dict = {}
#iterate through the words and copy only non-trivial entries
for key in word_analysis:
if key not in trivial_list:
temp_dict[key] = word_analysis[key]
#overwrite the word_analysis dictionary
word_analysis = temp_dict
#print(word_analysis)
#print('detrivialized\n')
#if gender terms are to be counted:
if gender:
gender_stat['m'] = 0
gender_stat['f'] = 0
gender = ''
#if no list of gender terms specified, the default list is used
if len(gender_terms) == 0:
gender_terms = {
"he":'m', "him":'m', "his":'m', "gentleman":'m',
"she":'f', "her":'f', "hers":'f', "lady":'f'
}
#iterate through the keys in the word frequency dictionary,
#increment the count for each masculine or feminine word
for key in word_analysis:
if key in gender_terms:
gender = gender_terms[key]
if gender == 'm':
gender_stat['m'] += 1
else:
gender_stat['f'] += 1
#percent of text identified as masculine
gender_stat['%_m'] = (gender_stat['m'] / len(word_analysis))*100
#percent of text identified as feminine
gender_stat['%_f'] = (gender_stat['f'] / len(word_analysis))*100
#percent of text identified as either masculine or feminine
gender_stat['%_indentifiable'] = ((gender_stat['m'] + gender_stat['f']) / len(word_analysis))*100
#print(gender_stat)
print('gendered\n')
if mood:
mood_stat[':D'] = 0
mood_stat['D:'] = 0
mood = ''
#if no list of mood terms specified, the default list is used
if len(mood_terms) == 0:
mood_terms = {
"yay":':D', "wonderful":':D', "splendid":':D', "lovely":':D',
"aw":'D:', "terrible":'D:', "horrific":'D:', "unfortunately":'D:'
}
#iterate through the keys in the word frequency dictionary,
#increment the count for each happy or sad word
for key in word_analysis:
if key in mood_terms:
mood = mood_terms[key]
if mood == ':D':
mood_stat[':D'] += 1
else:
mood_stat['D:'] += 1
#percent of text identified as happy
mood_stat['%_:D'] = (mood_stat[':D'] / len(word_analysis))*100
#percent of text identified as sad
mood_stat['%_D:'] = (mood_stat['D:'] / len(word_analysis))*100
#percent of text identified as either happy or sad
mood_stat['%_indentifiable'] = ((mood_stat[':D'] + mood_stat['D:']) / len(word_analysis))*100
#print(mood_stat)
#print('mooded\n')
#add specific dictionaries to the analysis dictionary for output
#word analysis
analysis_dict["word analysis"] = word_analysis
#text as indices of characters, positions of beginning of line
print(line_start_pos)
analysis_dict["line start pos"] = line_start_pos
#word list
analysis_dict["word list"] = word_list
#gender statistics
analysis_dict["gender stat"] = gender_stat
#mood statistics
analysis_dict["mood stat"] = mood_stat
#return the analysis dictionary
return analysis_dict
"""
configure
choose settings for analysis
returns the list of choices
"""
def configure():
#list of option strings for prompt, answers to questions stored as 1 or 0 in choices_list
prompt_list = [
"Clean text? (enter or 1/0) ",
"Specify a maximum word length? (enter 0 for no limit or a positive number) ",
"Include trivial words? (enter or 1/0) ",
"Analyze gender? (enter or 1/0) ",
"Analyze mood? (enter or 1/0) "
]
#list of default on/off choices for calc_w_frequency function
choices_list = [0, 0, 0, 0, 0]
#cycle through options in prompt,
#set all settings by updating the values in choice_list according to the user's choices
count = 0
for option in prompt_list:
valid_choice = False
while valid_choice == False:
choice = input(option).lower()
if choice == ENTER:
choices_list[count] = 1
valid_choice = True
elif choice.isnumeric():
choices_list[count] = int(choice)
valid_choice = True
elif choice == '0':
valid_choice = True
else:
print("Please select a valid option\n")
count += 1
#return the updated list of choices
return choices_list
"""""""""""""""""""""
MAIN
"""""""""""""""""""""
def main():
"""
USER OPTIONS:
set directory, function options, and file - default options available
"""
choices_list = []
input_incomplete = True
#confirm or set the working directory
while input_incomplete:
option = input("Specify a working directory. Press enter for the default directory: ")
if option == ENTER:
try:
os.chdir(os.path.dirname(os.path.realpath(__file__)))
except:
option = input("ERROR, would you like to retry? (1 or enter/0): ")
if option == ENTER or option == '1':
continue
sys.exit("quitting")
else:
try:
os.chdir(option)
except:
print("Directory invalid, please select the default working directory or choose a valid directory\n")
continue
print(os.getcwd())
#set text analysis options to default or configure them
while input_incomplete:
option = input("Set all to default? (enter or 1/0): ")
if option == ENTER or option == '1':
choices_list.extend([1,0,1,1,1])
elif option == '0':
choices_list.extend(configure())
else:
print("Please choose a valid option.\n")
continue
input_incomplete = False
"""
FILE SELECTION, OPEN FILE FOR READING
"""
#stores the input text file
text_file = ''
#option/loop checks
try_new_file = False
choose_file = True
#file selection prompt
option = input("Enter '1' or the enter key to choose a file,\notherwise enter '0' or another key to choose the default file (must be named 'default.txt'): ")
#default file selection
if option != ENTER and option != '1':
try:
text_file = open("default.txt", 'r')
choose_file = False
except:
print("Unable to open the default file, please specify a file:")
choose_file = True
time.sleep(1)
else:
try_new_file = True
if choose_file:
#display files in current directory
print("\nFILES:\n")
file_options = next(os.walk(os.getcwd()))[2]
count = 0
for file_name in file_options:
print(str(count) + ' ' + file_name)
count += 1
print("\n")
#try to open a file (specified with an index) and its encoding
while choose_file:
if try_new_file:
option = ''
else:
option = input("Would you like to try a different file? (enter or 1/0 or any other entry): ")
if option == '' or option == '1':
option = input("Enter the index of a file in the current working directory: ")
encoding_ = input("Enter the encoding of the file, (enter or 1 for ascii default): ")
if encoding_ == '' or encoding_ == '1':
encoding_ = "ascii"
elif encoding_ == '2':
encoding_ = "utf-8"
elif encoding_ == '3':
encoding_ = "mac-roman"
try:
text_file = open(file_options[int(option)], 'r', encoding=encoding_, newline='')
except:
print("ERROR: unable to open the file\n")
else:
choose_file = False
else:
sys.exit("quitting")
try_new_file = False
#try to read the text file
#(call call_calc_w_analysis()) and save the analysis list that it returns
try:
analysis_dict = calc_w_analysis(text_file, choices_list[0], choices_list[1], choices_list[2], [], choices_list[3], [], choices_list[4], [], encoding_)
except:
sys.exit("ERROR in calc_w_analysis")
"""
OUTPUT DISPLAY
"""
if len(analysis_dict["word list"]) == 0:
print("Nothing\n")
sys.exit(0)
#print(analysis_dict)
print("////All Words in List////\n\n")
all_words = analysis_dict["word list"]
#track the longest word
w_longest = []
len_longest = 0
for w in all_words:
if len(w) > len_longest:
del w_longest[:]
w_longest.append(w)
len_longest = len(w)
elif len(w) == len_longest:
w_longest.append(w)
print(w)
print('\n\n')
print("////All Word Counts////\n\n")
word_analysis = analysis_dict["word analysis"]
count = 0
line_number = 0
format_start = '{:<' + str(len_longest) + '} {:>'
#format words and counts nicely
for word in sorted(word_analysis.keys()):
count = word_analysis[word][WORDCOUNT]
print(str( format_start + str(len(str(count))) + '}').format(word, count))
print("\nNumber of unique words found: " + str(len(all_words)) + '\n')
if len(w_longest) > 1:
print("Longest words: ",w_longest, " character length:",str(len_longest), "\n\n")
else:
print("Longest word: ",w_longest[0], " character length:",str(len_longest), "\n\n")
print("-------------------------------------------------------------------------------")
if choices_list[3] > 0:
print("////Gender Information////\n\n")
gender_stat = analysis_dict["gender stat"]
print("number of words identified as masculine: " + str(gender_stat['m']) + '\n')
print("percent of text identified as masculine: " + str(gender_stat['%_m']) + '\n')
print("number of words identified as feminine: " + str(gender_stat['f']) + '\n')
print("percent of text identified as feminine: " + str(gender_stat['%_f']) + '\n')
print("percent of text identified as either masculine or feminine: " + str(gender_stat['%_indentifiable']) + '\n\n')
if choices_list[4] > 0:
print("////Mood Information////\n\n")
mood_stat = analysis_dict["mood stat"]
print("number of words identified as happy: " + str(mood_stat[':D']) + '\n')
print("percent of text identified as happy: " + str(mood_stat['%_:D']) + '\n')
print("number of words identified as sad: " + str(mood_stat['D:']) + '\n')
print("percent of text identified as sad: " + str(mood_stat['%_D:']) + '\n')
print("percent of text identified as either happy or sad: " + str(mood_stat['%_indentifiable']) + '\n\n')
#step through the text and between instances of a chosen word that appears in the text
#(currently only with a cleaned text)
if choices_list[0] == 1:
print("////Step through Text////\n")
prompt = True
word_analysis = analysis_dict["word analysis"]
line_start_pos = analysis_dict["line start pos"]
word_list = analysis_dict["word list"]
while prompt:
#print(word_counts)
word = input("Please select a word (enter 0 to quit): ").lower()
if word == '0':
prompt = False
elif word in word_list:
text_step(text_file, line_start_pos, word_analysis[word])
else:
print("Error: word cannot be found\n")
text_file.close()
#main function
if __name__ == "__main__":
try:
main()
#allow control+d
except EOFError:
pass
| {
"content_hash": "fd11dd3eba70dc5deb18d6427d379945",
"timestamp": "",
"source": "github",
"line_count": 1213,
"max_line_length": 161,
"avg_line_length": 41.37757625721352,
"alnum_prop": 0.551035046123807,
"repo_name": "KTRosenberg/Text-Analyzer-and-Step-through-Viewer",
"id": "fc7452c926b2649417315e8778d16ea2b5a36246",
"size": "50217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old_summer_2016/v3_main_Text_Analyzer_Step_through_Viewer.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "964456"
}
],
"symlink_target": ""
} |
from os.path import dirname, isdir, isfile, join
from os import makedirs
from tempfile import mkdtemp
from mock import Mock, patch
from jig.tests.testcase import (
CommandTestCase, PluginTestCase,
cd_gitrepo, cwd_bounce, result_with_hint)
from jig.tests.mocks import MockPlugin
from jig.tools import NumberedDirectoriesToGit
from jig.exc import ForcedExit
from jig.plugins import (
set_jigconfig, get_jigconfig, create_plugin,
PluginManager)
from jig.plugins.testrunner import (
Expectation, SuccessResult,
FailureResult, REPORTER_HORIZONTAL_DIVIDER)
from jig.gitutils.remote import clone
from jig.commands import plugin
from jig.commands.hints import (
FORK_PROJECT_GITHUB, NO_PLUGINS_INSTALLED, USE_RUNNOW, INVALID_RANGE)
class TestPluginCommand(CommandTestCase, PluginTestCase):
"""
Test the plugin command.
"""
command = plugin.Command
def setUp(self):
super(TestPluginCommand, self).setUp()
self.plugindir = mkdtemp()
def _add_plugin(self, plugin_dir):
"""
Adds a plugin to the jig initialized Git repository.
"""
config = get_jigconfig(self.gitrepodir)
pm = PluginManager(config)
pm.add(plugin_dir)
set_jigconfig(self.gitrepodir, pm.config)
@cd_gitrepo
def test_list_no_plugins(self):
"""
No plugins are installed and the list is empty.
"""
# Without the path argument, this should default to the current working
# directory.
self.run_command('list')
self.assertResults(
result_with_hint(
u'No plugins installed.',
NO_PLUGINS_INSTALLED),
self.output)
def test_list_plugins_different_bundle(self):
"""
Lists plugins correctly if they are in different bundles.
"""
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test01', name='plugin01'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test02', name='plugin02'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test03', name='plugin03'))
self.run_command('list -r {0}'.format(self.gitrepodir))
self.assertResults(result_with_hint(u'''
Installed plugins
Plugin name Bundle name
plugin01................. test01
plugin02................. test02
plugin03................. test03
''', USE_RUNNOW), self.output)
def test_list_plugins_same_bundle(self):
"""
Lists plugins correctly if they are in the same bundle.
"""
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test', name='plugin01'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test', name='plugin02'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='test', name='plugin03'))
self.run_command('list -r {0}'.format(self.gitrepodir))
self.assertResults(result_with_hint(u'''
Installed plugins
Plugin name Bundle name
plugin01................. test
plugin02................. test
plugin03................. test
''', USE_RUNNOW), self.output)
def test_lists_alphabetically(self):
"""
Will list bundles and plugin names alphabetically.
"""
# Add these in reverse order of alphabetical
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='c', name='c'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='b', name='b'))
self._add_plugin(create_plugin(
self.plugindir, template='python',
bundle='a', name='a'))
self.run_command('list -r {0}'.format(self.gitrepodir))
self.assertResults(result_with_hint(u'''
Installed plugins
Plugin name Bundle name
a........................ a
b........................ b
c........................ c
''', USE_RUNNOW), self.output)
@cd_gitrepo
def test_add_bad_plugin(self):
"""
Only adds a plugin if it's valid.
"""
# This is not a valid plugin directory, it's empty
tmp_dir = mkdtemp()
with self.assertRaises(ForcedExit):
self.run_command('add {0}'.format(tmp_dir))
self.assertRegexpMatches(
self.error,
u'The plugin file (.+)config.cfg is missing')
@cd_gitrepo
def test_add_plugin(self):
"""
Adds a valid plugin.
"""
plugin_dir = create_plugin(
self.plugindir, template='python',
bundle='a', name='a')
# We are going to test whether it defaults --gitrepo to cwd
self.run_command('add {0}'.format(plugin_dir))
config = get_jigconfig(self.gitrepodir)
# The config now contains our section
self.assertTrue(config.has_section('plugin:a:a'))
self.assertResults(
u'''
Added plugin a in bundle a to the repository.
Run the plugins in the current repository with this command:
$ jig runnow
Jig works off of your staged files in the Git repository index.
You place things in the index with `git add`. You will need to stage
some files before you can run Jig.''',
self.output)
def test_add_plugin_to_git_repo(self):
"""
Add a plugin when not inside the Git repository.
"""
plugin_dir = create_plugin(
self.plugindir, template='python',
bundle='a', name='a')
self.run_command('add --gitrepo {0} {1}'.format(
self.gitrepodir, plugin_dir))
self.assertResults(
u'''
Added plugin a in bundle a to the repository.
Run the plugins in the current repository with this command:
$ jig runnow
Jig works off of your staged files in the Git repository index.
You place things in the index with `git add`. You will need to stage
some files before you can run Jig.''',
self.output)
def test_add_plugin_by_url(self):
"""
Add a plugin from a Git URL.
"""
def clone_fake(plugin, to_dir, branch=None):
makedirs(to_dir)
create_plugin(
to_dir, template='python',
bundle='a', name='a')
with patch('jig.commands.base.clone') as c:
c.side_effect = clone_fake
self.run_command(
'add --gitrepo {0} http://repo'.format(
self.gitrepodir))
# And clone was called with our URL and would have performed the
# operation in our test directory.
self.assertEqual('http://repo', c.call_args[0][0])
self.assertIn(
'{0}/.jig/plugins/'.format(self.gitrepodir),
c.call_args[0][1])
self.assertEqual(None, c.call_args[0][2])
def test_add_plugin_by_url_with_branch(self):
"""
Add a plugin from a Git URL, targeting a specific branch.
"""
def clone_fake(plugin, to_dir, branch=None):
makedirs(to_dir)
create_plugin(
to_dir, template='python',
bundle='a', name='a')
with patch('jig.commands.base.clone') as c:
c.side_effect = clone_fake
self.run_command(
'add --gitrepo {0} http://url.com/repo@alternate'.format(
self.gitrepodir))
# And the branch name was passed to clone
self.assertEqual('alternate', c.call_args[0][2])
def test_update_existing_plugins(self):
"""
Can update an existing plugin.
"""
# Make our remote repository so we have something to pull from
origin_repo = mkdtemp()
root_commit_dir = join(origin_repo, '01')
makedirs(root_commit_dir)
# Create a plugin in the repo
create_plugin(
root_commit_dir, template='python',
bundle='a', name='a')
create_plugin(
root_commit_dir, template='python',
bundle='b', name='b')
# This is the directory we will clone
ngd = NumberedDirectoriesToGit(origin_repo)
dir_to_clone = ngd.repo.working_dir
# This is a trick, we give it the dir_to_clone when asked to install it
def clone_local(plugin, to_dir, branch):
# Instead of jumping on the Internet to clone this, we will use the
# local numbered directory repository we setup above. This will
# allow our update to occur with a git pull and avoid network
# traffic which is always faster for tests.
clone(dir_to_clone, to_dir)
# First thing is to install the the plugin
with patch('jig.commands.base.clone') as c:
c.side_effect = clone_local
self.run_command(
'add --gitrepo {0} http://repo'.format(
self.gitrepodir))
self.run_command(
'update --gitrepo {0}'.format(
self.gitrepodir))
self.assertResults(
"""
Updating plugins
Plugin a, b in bundle a, b
Already up-to-date.""",
self.output)
def test_update_existing_plugins_no_plugins(self):
"""
If an attempt is made to update plugins when none are installed.
"""
self.run_command('update --gitrepo {0}'.format(
self.gitrepodir))
self.assertResults("No plugins to update.", self.output)
@cd_gitrepo
def test_remove_bad_plugin(self):
"""
Only removes a plugin that has been installed.
"""
with self.assertRaises(ForcedExit):
self.run_command('remove a')
self.assertEqual(
u'This plugin does not exist.\n',
self.error)
@cd_gitrepo
def test_remove_plugin(self):
"""
Removes an installed plugin.
"""
plugin_dir = create_plugin(
self.plugindir, template='python',
bundle='bundle', name='name')
self.run_command('add -r {0} {1}'.format(self.gitrepodir, plugin_dir))
# Remove with the --gitrepo defaulting to cwd again
self.run_command('remove name bundle')
config = get_jigconfig(self.gitrepodir)
# It should be removed from our config now
self.assertFalse(config.has_section('plugin:bundle:name'))
self.assertEqual(
u'Removed plugin name\n',
self.output)
@cd_gitrepo
def test_remove_plugin_guesses_bundle(self):
"""
Removes an installed plugin.
"""
plugin_dir = create_plugin(
self.plugindir, template='python',
bundle='bundle', name='name')
self.run_command('add -r {0} {1}'.format(self.gitrepodir, plugin_dir))
# Leave the bundle name off so it can be guessed.
self.run_command('remove name')
self.assertEqual(
u'Removed plugin name\n',
self.output)
def test_remove_plugin_same_name(self):
"""
Exits because more than one plugin has the same name.
If the bundle is not specified and more than one plugin has the same
name, we can't assume which plugin they wish to remove. Error out and
suggest they use the list command.
"""
plugin_dir1 = create_plugin(
mkdtemp(), template='python',
bundle='bundle1', name='name')
plugin_dir2 = create_plugin(
mkdtemp(), template='python',
bundle='bundle2', name='name')
self.run_command('add -r {0} {1}'.format(self.gitrepodir, plugin_dir1))
self.run_command('add -r {0} {1}'.format(self.gitrepodir, plugin_dir2))
with self.assertRaises(ForcedExit):
# Leave the bundle out, this should make our command error out
self.run_command('remove -r {0} name'.format(self.gitrepodir))
self.assertEqual(
u'More than one plugin has the name of name. Use the list '
u'command to see installed plugins.\n',
self.error)
def test_create_with_bad_language(self):
"""
Cannot create a plugin if the language is unavailable
"""
with self.assertRaises(ForcedExit):
# We just created a plugin in this directory, so it should fail
self.run_command('create -l php name bundle')
self.assertResults(
result_with_hint(
u'Language php is not supported yet.',
FORK_PROJECT_GITHUB),
self.error)
def test_create_plugin_already_exists(self):
"""
Cannot create a plugin if the destination already exists.
"""
save_dir = dirname(create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name'))
with self.assertRaises(ForcedExit):
# We just created a plugin in this directory, so it should fail
self.run_command('create --dir {0} name bundle'.format(save_dir))
self.assertEqual(
u'A plugin with this name already exists in this '
u'directory: {0}.\n'.format(save_dir),
self.error)
def test_create_plugin(self):
"""
Can create a plugin.
"""
with cwd_bounce(self.plugindir):
self.run_command('create name bundle')
self.assertTrue(isdir(join(self.plugindir, 'name')))
self.assertTrue(isfile(join(self.plugindir, 'name', 'config.cfg')))
def test_create_plugin_in_directory(self):
"""
Creates a plugin in a given directory.
"""
self.run_command('create --dir {0} name bundle'.format(self.plugindir))
self.assertTrue(isdir(join(self.plugindir, 'name')))
def test_create_plugin_defaults_python(self):
"""
Creates a plugin with the default language of python.
"""
self.run_command(
'create --dir {0} --language python name bundle'.format(
self.plugindir))
with open(join(self.plugindir, 'name', 'pre-commit')) as fh:
pre_commit = fh.readlines()
self.assertEqual('#!/usr/bin/env python\n', pre_commit[0])
def test_plugin_tests_none_found(self):
"""
Run tests for a plugin where no tests are found.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
with self.assertRaises(ForcedExit):
self.run_command('test {0}'.format(plugin_dir))
self.assertIn('Could not find any tests:', self.error)
self.assertIn('{0}/tests'.format(plugin_dir), self.error)
def test_formats_results(self):
"""
Will return test results.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
expectation = Expectation((1, 2), None, u'aaa')
results = [
SuccessResult(
actual=u'aaa', expectation=expectation,
plugin=MockPlugin())]
with patch('jig.commands.plugin.PluginTestRunner') as ptr:
ptr.return_value = Mock()
ptr.return_value.run = Mock(return_value=results)
self.run_command('test {0}'.format(plugin_dir))
self.assertResults(
u'''
01 – 02 Pass
Pass 1, Fail 0''', self.output)
def test_runs_specific_test(self):
"""
Will run a specific test.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
with patch('jig.commands.plugin.PluginTestRunner') as ptr:
ptr.return_value = Mock()
ptr.return_value.run = Mock(return_value=[])
self.run_command('test -r 4..5 {0}'.format(plugin_dir))
ptr.return_value.run.assert_called_with(test_range=[(4, 5)])
def test_handles_range_error(self):
"""
If an improper range is given, provides a helpful error message.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
with self.assertRaises(ForcedExit):
# Bad range "a.b"
self.run_command('test -r a.b {0}'.format(plugin_dir))
self.assertResults(
result_with_hint(
u'a.b is an invalid numbered test range',
INVALID_RANGE),
self.error)
def test_plugin_test_failure(self):
"""
Fails with exit code other than 0.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
expectation = Expectation((1, 2), None, u'bbb')
results = [
FailureResult(
actual=u'aaa', expectation=expectation,
plugin=MockPlugin())]
with patch('jig.commands.plugin.PluginTestRunner') as ptr:
ptr.return_value = Mock()
ptr.return_value.run = Mock(return_value=results)
with self.assertRaises(ForcedExit):
self.run_command('test {0}'.format(plugin_dir))
self.assertResults(
u'''
01 – 02 Fail
Actual
{0}
aaa
Diff
{0}
- bbb
+ aaa
Pass 0, Fail 1'''.format(REPORTER_HORIZONTAL_DIVIDER),
self.error)
def test_plugin_defaults_to_cwd(self):
"""
Running the plugins tests defaults to the current working directory.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
expectation = Expectation((1, 2), None, u'aaa')
results = [
SuccessResult(
actual=u'aaa', expectation=expectation,
plugin=MockPlugin())]
with patch('jig.commands.plugin.PluginTestRunner') as ptr:
ptr.return_value = Mock()
ptr.return_value.run = Mock(return_value=results)
with cwd_bounce(plugin_dir):
self.run_command('test')
self.assertResults(
u'''
01 – 02 Pass
Pass 1, Fail 0''', self.output)
def test_formats_results_verbose(self):
"""
Will return test results with stdin and stdout.
"""
plugin_dir = create_plugin(
mkdtemp(), template='python',
bundle='bundle', name='name')
expectation = Expectation((1, 2), None, u'aaa')
results = [
SuccessResult(
actual=u'aaa', expectation=expectation,
plugin=MockPlugin(), stdin='a\n', stdout='b\n')]
with patch('jig.commands.plugin.PluginTestRunner') as ptr:
ptr.return_value = Mock()
ptr.return_value.run = Mock(return_value=results)
self.run_command('test -v {0}'.format(plugin_dir))
self.assertResults(
u'''
01 – 02 Pass
stdin (sent to the plugin)
a
stdout (received from the plugin)
b
{0}
Pass 1, Fail 0'''.format(REPORTER_HORIZONTAL_DIVIDER),
self.output)
| {
"content_hash": "56aa228273a2c47cf6f98e0a299665fc",
"timestamp": "",
"source": "github",
"line_count": 626,
"max_line_length": 80,
"avg_line_length": 31.98083067092652,
"alnum_prop": 0.5548451548451548,
"repo_name": "robmadole/jig",
"id": "63368b12aac420f637f726903abeccd34cb58c66",
"size": "20043",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/jig/commands/tests/test_plugin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "141"
},
{
"name": "Python",
"bytes": "453168"
},
{
"name": "Shell",
"bytes": "1153"
}
],
"symlink_target": ""
} |
from proteus import *
from redist_p import *
from dambreak_Ubbink_coarse import *
tolFac = 0.0
nl_atol_res = rd_nl_atol_res
linTolFac = 0.01
l_atol_res = 0.01*rd_nl_atol_res
useEisenstatWalker = False
if redist_Newton:
timeIntegration = NoIntegration
stepController = Newton_controller
maxNonlinearIts = 50
maxLineSearches = 0
nonlinearSolverConvergenceTest = 'rits'
levelNonlinearSolverConvergenceTest = 'rits'
linearSolverConvergenceTest = 'r-true'
else:
timeIntegration = BackwardEuler_cfl
stepController = RDLS.PsiTC
runCFL=2.0
psitc['nStepsForce']=3
psitc['nStepsMax']=50
psitc['reduceRatio']=10.0
psitc['startRatio']=1.0
rtol_res[0] = 0.0
atol_res[0] = rd_nl_atol_res
useEisenstatWalker = False#True
maxNonlinearIts = 1
maxLineSearches = 0
nonlinearSolverConvergenceTest = 'rits'
levelNonlinearSolverConvergenceTest = 'rits'
linearSolverConvergenceTest = 'r-true'
femSpaces = {0:basis}
massLumping = False
numericalFluxType = DoNothing
conservativeFlux = None
subgridError = RDLS.SubgridError(coefficients,nd)
shockCapturing = RDLS.ShockCapturing(coefficients,nd,shockCapturingFactor=rd_shockCapturingFactor,lag=rd_lag_shockCapturing)
fullNewtonFlag = True
multilevelNonlinearSolver = Newton
levelNonlinearSolver = Newton
nonlinearSmoother = NLGaussSeidel
linearSmoother = None
matrix = SparseMatrix
if useOldPETSc:
multilevelLinearSolver = PETSc
levelLinearSolver = PETSc
else:
multilevelLinearSolver = KSP_petsc4py
levelLinearSolver = KSP_petsc4py
if useSuperlu:
multilevelLinearSolver = LU
levelLinearSolver = LU
linear_solver_options_prefix = 'rdls_'
#auxiliaryVariables=[lineGauges_phi]
| {
"content_hash": "0e3c95e500ce10d08d31722ad1d5497a",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 127,
"avg_line_length": 26.46268656716418,
"alnum_prop": 0.7326565143824028,
"repo_name": "erdc-cm/air-water-vv",
"id": "ad8b0520e4c4470c8e1be9cd19a4039226f44ea7",
"size": "1773",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "3d/dambreak_Ubbink/dambreak_Ubbink_coarse/redist_n.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1128"
},
{
"name": "GLSL",
"bytes": "3787"
},
{
"name": "Jupyter Notebook",
"bytes": "8264154"
},
{
"name": "M",
"bytes": "435"
},
{
"name": "Python",
"bytes": "1992474"
},
{
"name": "Shell",
"bytes": "14414"
}
],
"symlink_target": ""
} |
__author__ = "Danilo S. Carvalho <danilo@jaist.ac.jp>"
import json
import argparse
def main(args):
print "Loading DB..."
with open(args.ifilepath, "r") as wiktdb_file:
db = json.load(wiktdb_file)
langs = args.langs.split(",")
selected = []
print "Filtering..."
for doc in db:
exist_langs = []
for lang in langs:
if (lang in doc["langs"].keys()):
exist_langs.append(lang)
if (exist_langs or "redirect" in doc.keys()):
seldoc = dict()
seldoc["wikid"] = doc["wikid"]
seldoc["title"] = doc["title"]
if ("redirect" in doc.keys()):
seldoc["redirect"] = doc["redirect"]
continue
seldoc["langs"] = {}
for lang in exist_langs:
seldoc["langs"][lang] = doc["langs"][lang]
selected.append(seldoc)
print "Writing..."
with open(args.ifilepath.replace(".json", "") + "_" + "-".join([lang.lower()[0:2] for lang in langs]) + ".json", "w") as filtered_file:
json.dump(selected, filtered_file, indent=2)
def parse_args():
argparser = argparse.ArgumentParser()
argparser.add_argument("ifilepath", type=str, help="Input file path (Wiktionary preprocessed JSON file)")
argparser.add_argument("langs", type=str, help="Selected languages separated by comma (e.g., 'English,Japanese'). Entries without sense info on the selected language will be ommited from the output")
return argparser.parse_args()
if __name__ == "__main__":
main(parse_args())
| {
"content_hash": "bd67a52232654525f8b2ed800767baee",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 203,
"avg_line_length": 32.89795918367347,
"alnum_prop": 0.5694789081885856,
"repo_name": "dscarvalho/tdv",
"id": "dea4845f7770bd999afa5a7e95bd1e820a2a99b6",
"size": "1659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wiktparser/langfilter.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "820060"
},
{
"name": "Makefile",
"bytes": "2419"
},
{
"name": "Python",
"bytes": "29610"
},
{
"name": "Shell",
"bytes": "533"
}
],
"symlink_target": ""
} |
from tornado.web import url
from functools import reduce
class Route(object):
_routes = {}
def __init__(self, pattern, kwargs=None, name=None, host='.*$'):
self.pattern = pattern
self.kwargs = kwargs if kwargs else {}
self.name = name
self.host = host
def __call__(self, handler_class):
spec = url(self.pattern, handler_class, self.kwargs, name=self.name)
self._routes.setdefault(self.host, []).append(spec)
return handler_class
@classmethod
def routes(cls, application=None):
if application:
for host, handlers in cls._routes.items():
application.add_handlers(host, handlers)
else:
return reduce(lambda x, y: x+y, cls._routes.values()) if cls._routes else []
route = Route
| {
"content_hash": "7135eef84bcc9b8701460b5e3abcf72a",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 88,
"avg_line_length": 28.06896551724138,
"alnum_prop": 0.6056511056511057,
"repo_name": "blackmatrix7/apizen-tornado",
"id": "e0d7eb5db5d958cc06413b41fb297c26c52f06a7",
"size": "852",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "toolkit/router.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "96966"
}
],
"symlink_target": ""
} |
import math
from decimal import Decimal
from django.db.models import DecimalField
from django.db.models.functions import Sqrt
from django.test import TestCase
from django.test.utils import register_lookup
from ..models import DecimalModel, FloatModel, IntegerModel
class SqrtTests(TestCase):
def test_null(self):
IntegerModel.objects.create()
obj = IntegerModel.objects.annotate(null_sqrt=Sqrt("normal")).first()
self.assertIsNone(obj.null_sqrt)
def test_decimal(self):
DecimalModel.objects.create(n1=Decimal("12.9"), n2=Decimal("0.6"))
obj = DecimalModel.objects.annotate(
n1_sqrt=Sqrt("n1"), n2_sqrt=Sqrt("n2")
).first()
self.assertIsInstance(obj.n1_sqrt, Decimal)
self.assertIsInstance(obj.n2_sqrt, Decimal)
self.assertAlmostEqual(obj.n1_sqrt, Decimal(math.sqrt(obj.n1)))
self.assertAlmostEqual(obj.n2_sqrt, Decimal(math.sqrt(obj.n2)))
def test_float(self):
FloatModel.objects.create(f1=27.5, f2=0.33)
obj = FloatModel.objects.annotate(
f1_sqrt=Sqrt("f1"), f2_sqrt=Sqrt("f2")
).first()
self.assertIsInstance(obj.f1_sqrt, float)
self.assertIsInstance(obj.f2_sqrt, float)
self.assertAlmostEqual(obj.f1_sqrt, math.sqrt(obj.f1))
self.assertAlmostEqual(obj.f2_sqrt, math.sqrt(obj.f2))
def test_integer(self):
IntegerModel.objects.create(small=20, normal=15, big=1)
obj = IntegerModel.objects.annotate(
small_sqrt=Sqrt("small"),
normal_sqrt=Sqrt("normal"),
big_sqrt=Sqrt("big"),
).first()
self.assertIsInstance(obj.small_sqrt, float)
self.assertIsInstance(obj.normal_sqrt, float)
self.assertIsInstance(obj.big_sqrt, float)
self.assertAlmostEqual(obj.small_sqrt, math.sqrt(obj.small))
self.assertAlmostEqual(obj.normal_sqrt, math.sqrt(obj.normal))
self.assertAlmostEqual(obj.big_sqrt, math.sqrt(obj.big))
def test_transform(self):
with register_lookup(DecimalField, Sqrt):
DecimalModel.objects.create(n1=Decimal("6.0"), n2=Decimal("0"))
DecimalModel.objects.create(n1=Decimal("1.0"), n2=Decimal("0"))
obj = DecimalModel.objects.filter(n1__sqrt__gt=2).get()
self.assertEqual(obj.n1, Decimal("6.0"))
| {
"content_hash": "1714176aa8f427aaad244a522bfc9ada",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 77,
"avg_line_length": 41.1578947368421,
"alnum_prop": 0.6572890025575447,
"repo_name": "pauloxnet/django",
"id": "b7751d929e3f0adf93986163d138cb2a2f1e30e8",
"size": "2346",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "tests/db_functions/math/test_sqrt.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "91756"
},
{
"name": "HTML",
"bytes": "238967"
},
{
"name": "JavaScript",
"bytes": "157514"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Python",
"bytes": "16138366"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "392"
}
],
"symlink_target": ""
} |
import time
import gevent
from util import RateLimit
# Time is around limit +/- 0.05 sec
def around(t, limit):
return t >= limit - 0.05 and t <= limit + 0.05
class ExampleClass(object):
def __init__(self):
self.counted = 0
self.last_called = None
def count(self, back="counted"):
self.counted += 1
self.last_called = back
return back
class TestRateLimit:
def testCall(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.0) # First allow to call instantly
assert obj1.counted == 1
# Call again
assert not RateLimit.isAllowed("counting", 0.1)
assert RateLimit.isAllowed("something else", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.1) # Delays second call within interval
assert obj1.counted == 2
# Call 3 times async
s = time.time()
assert obj2.counted == 0
threads = [
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
]
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
assert around(time.time() - s, 0.2)
# No queue = instant again
s = time.time()
assert RateLimit.isAllowed("counting", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
assert around(time.time() - s, 0.0)
assert obj2.counted == 4
def testCallAsync(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
assert obj1.counted == 1 # First instant
assert around(time.time() - s, 0.0)
# After that the calls delayed
s = time.time()
t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
time.sleep(0.03)
t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
time.sleep(0.03)
t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
assert obj1.counted == 1 # Delay still in progress: Not called yet
t3.join()
assert t3.value == "call #4"
assert around(time.time() - s, 0.1)
# Only the last one called
assert obj1.counted == 2
assert obj1.last_called == "call #4"
# Allowed again instantly
assert RateLimit.isAllowed("counting async", 0.1)
s = time.time()
RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
assert obj1.counted == 3
assert around(time.time() - s, 0.0)
assert not RateLimit.isAllowed("counting async", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting async", 0.1)
| {
"content_hash": "4b97676429ad54a39a50f76a764b9b2b",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 129,
"avg_line_length": 37.297872340425535,
"alnum_prop": 0.6041072447233314,
"repo_name": "kustomzone/Fuzium",
"id": "b0a91ba06b50e54cdfb2c13a84a4946161f780cb",
"size": "3506",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bin/core/src/Test/TestRateLimit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1204"
},
{
"name": "C",
"bytes": "34092"
},
{
"name": "CSS",
"bytes": "373182"
},
{
"name": "CoffeeScript",
"bytes": "88917"
},
{
"name": "HTML",
"bytes": "123191"
},
{
"name": "JavaScript",
"bytes": "2133526"
},
{
"name": "Python",
"bytes": "2843920"
},
{
"name": "Shell",
"bytes": "898"
}
],
"symlink_target": ""
} |
"""
=============================================
Density Estimation for a mixture of Gaussians
=============================================
Plot the density estimation of a mixture of two gaussians. Data is
generated from two gaussians with different centers and covariance
matrices.
"""
import numpy as np
import pylab as pl
from sklearn import mixture
n_samples = 300
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n_samples, 2), C),
np.random.randn(n_samples, 2) + np.array([20, 20])]
clf = mixture.GMM(n_components=2, covariance_type='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = pl.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
| {
"content_hash": "40ab6ae74d53e5045a67bb0c799491fe",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 67,
"avg_line_length": 26.07894736842105,
"alnum_prop": 0.5973763874873865,
"repo_name": "kmike/scikit-learn",
"id": "ec690c8adfaab0e141c40b86183d2a0a9e17aa23",
"size": "991",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "examples/mixture/plot_gmm_pdf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11070763"
},
{
"name": "C++",
"bytes": "257092"
},
{
"name": "JavaScript",
"bytes": "4775"
},
{
"name": "Python",
"bytes": "3808272"
},
{
"name": "Shell",
"bytes": "3770"
}
],
"symlink_target": ""
} |
import urlparse
from lxml import etree
from tempest.common import http
from tempest.common.rest_client import RestClientXML
from tempest.services.compute.xml.common import Document
from tempest.services.compute.xml.common import Element
from tempest.services.compute.xml.common import xml_to_json
XMLNS = "http://docs.openstack.org/identity/api/v3"
class EndPointClientXML(RestClientXML):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(EndPointClientXML, self).__init__(config, username, password,
auth_url, tenant_name)
self.service = self.config.identity.catalog_type
self.endpoint_url = 'adminURL'
def _parse_array(self, node):
array = []
for child in node.getchildren():
tag_list = child.tag.split('}', 1)
if tag_list[1] == "endpoint":
array.append(xml_to_json(child))
return array
def _parse_body(self, body):
json = xml_to_json(body)
return json
def request(self, method, url, headers=None, body=None, wait=None):
"""Overriding the existing HTTP request in super class RestClient."""
dscv = self.config.identity.disable_ssl_certificate_validation
self.http_obj = http.ClosingHttp(
disable_ssl_certificate_validation=dscv)
self._set_auth()
self.base_url = self.base_url.replace(
urlparse.urlparse(self.base_url).path, "/v3")
return super(EndPointClientXML, self).request(method, url,
headers=headers,
body=body)
def list_endpoints(self):
"""Get the list of endpoints."""
resp, body = self.get("endpoints", self.headers)
body = self._parse_array(etree.fromstring(body))
return resp, body
def create_endpoint(self, service_id, interface, url, **kwargs):
"""Create endpoint."""
region = kwargs.get('region', None)
enabled = kwargs.get('enabled', None)
create_endpoint = Element("endpoint",
xmlns=XMLNS,
service_id=service_id,
interface=interface,
url=url, region=region,
enabled=enabled)
resp, body = self.post('endpoints', str(Document(create_endpoint)),
self.headers)
body = self._parse_body(etree.fromstring(body))
return resp, body
def update_endpoint(self, endpoint_id, service_id=None, interface=None,
url=None, region=None, enabled=None):
"""Updates an endpoint with given parameters."""
doc = Document()
endpoint = Element("endpoint")
doc.append(endpoint)
if service_id:
endpoint.add_attr("service_id", service_id)
if interface:
endpoint.add_attr("interface", interface)
if url:
endpoint.add_attr("url", url)
if region:
endpoint.add_attr("region", region)
if enabled is not None:
endpoint.add_attr("enabled", enabled)
resp, body = self.patch('endpoints/%s' % str(endpoint_id),
str(doc), self.headers)
body = self._parse_body(etree.fromstring(body))
return resp, body
def delete_endpoint(self, endpoint_id):
"""Delete endpoint."""
resp_header, resp_body = self.delete('endpoints/%s' % endpoint_id)
return resp_header, resp_body
| {
"content_hash": "1f5d51087d628947ad9ebe7635981052",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 79,
"avg_line_length": 39.94565217391305,
"alnum_prop": 0.5706122448979591,
"repo_name": "armando-migliaccio/tempest",
"id": "e211ceeeb7ff246b106d51ed321da597a239faad",
"size": "4356",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tempest/services/identity/v3/xml/endpoints_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1966096"
},
{
"name": "Shell",
"bytes": "5228"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import numpy as np
from pyti import catch_errors
from pyti.function_helper import fill_for_noncomputable_vals
from six.moves import range
def true_range(close_data, period):
"""
True Range.
Formula:
TRt = MAX(abs(Ht - Lt), abs(Ht - Ct-1), abs(Lt - Ct-1))
"""
catch_errors.check_for_period_error(close_data, period)
tr = [np.max([np.max(close_data[idx+1-period:idx+1]) -
np.min(close_data[idx+1-period:idx+1]),
abs(np.max(close_data[idx+1-period:idx+1]) -
close_data[idx-1]),
abs(np.min(close_data[idx+1-period:idx+1]) -
close_data[idx-1])]) for idx in range(period-1, len(close_data))]
tr = fill_for_noncomputable_vals(close_data, tr)
return tr
| {
"content_hash": "8f224c32a2924326194c3f5ac73404c0",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 81,
"avg_line_length": 33.5,
"alnum_prop": 0.6094527363184079,
"repo_name": "kylejusticemagnuson/pyti",
"id": "fbf6acd14ddeee309e3aec8c09ae2f90947eb0be",
"size": "804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyti/true_range.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "483384"
}
],
"symlink_target": ""
} |
from copy import deepcopy
import numpy as np
from scipy import linalg
from .constants import FIFF
from .proj import _has_eeg_average_ref_proj, make_eeg_average_ref_proj
from .proj import setup_proj
from .pick import pick_types, pick_channels, pick_channels_forward
from .base import BaseRaw
from ..evoked import Evoked
from ..epochs import BaseEpochs
from ..utils import (logger, warn, verbose, _validate_type, _check_preload,
_check_option)
from ..defaults import DEFAULTS
def _copy_channel(inst, ch_name, new_ch_name):
"""Add a copy of a channel specified by ch_name.
Input data can be in the form of Raw, Epochs or Evoked.
The instance object is modified inplace.
Parameters
----------
inst : instance of Raw | Epochs | Evoked
Data containing the EEG channels
ch_name : str
Name of the channel to copy.
new_ch_name : str
Name given to the copy of the channel.
Returns
-------
inst : instance of Raw | Epochs | Evoked
The data with a copy of a given channel.
"""
new_inst = inst.copy().pick_channels([ch_name])
new_inst.rename_channels({ch_name: new_ch_name})
inst.add_channels([new_inst], force_update_info=True)
return inst
def _apply_reference(inst, ref_from, ref_to=None, forward=None):
"""Apply a custom EEG referencing scheme."""
# Check to see that data is preloaded
_check_preload(inst, "Applying a reference")
eeg_idx = pick_types(inst.info, eeg=True, meg=False, ref_meg=False)
if ref_to is None:
ref_to = [inst.ch_names[i] for i in eeg_idx]
extra = 'EEG channels found'
else:
extra = 'channels supplied'
if len(ref_to) == 0:
raise ValueError('No %s to apply the reference to' % (extra,))
# After referencing, existing SSPs might not be valid anymore.
projs_to_remove = []
for i, proj in enumerate(inst.info['projs']):
# Remove any average reference projections
if proj['desc'] == 'Average EEG reference' or \
proj['kind'] == FIFF.FIFFV_PROJ_ITEM_EEG_AVREF:
logger.info('Removing existing average EEG reference '
'projection.')
# Don't remove the projection right away, but do this at the end of
# this loop.
projs_to_remove.append(i)
# Inactive SSPs may block re-referencing
elif (not proj['active'] and
len([ch for ch in (ref_from + ref_to)
if ch in proj['data']['col_names']]) > 0):
raise RuntimeError(
'Inactive signal space projection (SSP) operators are '
'present that operate on sensors involved in the desired '
'referencing scheme. These projectors need to be applied '
'using the apply_proj() method function before the desired '
'reference can be set.'
)
for i in projs_to_remove:
del inst.info['projs'][i]
# Need to call setup_proj after changing the projs:
inst._projector, _ = \
setup_proj(inst.info, add_eeg_ref=False, activate=False)
# Compute reference
if len(ref_from) > 0:
# this is guaranteed below, but we should avoid the crazy pick_channels
# behavior that [] gives all. Also use ordered=True just to make sure
# that all supplied channels actually exist.
assert len(ref_to) > 0
ref_names = ref_from
ref_from = pick_channels(inst.ch_names, ref_from, ordered=True)
ref_to = pick_channels(inst.ch_names, ref_to, ordered=True)
data = inst._data
ref_data = data[..., ref_from, :].mean(-2, keepdims=True)
data[..., ref_to, :] -= ref_data
ref_data = ref_data[..., 0, :]
# If the reference touches EEG electrodes, note in the info that a
# non-CAR has been applied.
if len(np.intersect1d(ref_to, eeg_idx)) > 0:
inst.info['custom_ref_applied'] = FIFF.FIFFV_MNE_CUSTOM_REF_ON
# REST
if forward is not None:
# use ch_sel and the given forward
forward = pick_channels_forward(forward, ref_names, ordered=True)
# 1-3. Compute a forward (G) and avg-ref'ed data (done above)
G = forward['sol']['data']
assert G.shape[0] == len(ref_names)
# 4. Compute the forward (G) and average-reference it (Ga):
Ga = G - np.mean(G, axis=0, keepdims=True)
# 5. Compute the Ga_inv by SVD
Ga_inv = linalg.pinv(Ga, rcond=1e-6)
# 6. Compute Ra = (G @ Ga_inv) in eq (8) from G and Ga_inv
Ra = G @ Ga_inv
# 7-8. Compute Vp = Ra @ Va; then Vpa=average(Vp)
Vpa = np.mean(Ra @ data[..., ref_from, :], axis=-2, keepdims=True)
data[..., ref_to, :] += Vpa
else:
ref_data = None
return inst, ref_data
def add_reference_channels(inst, ref_channels, copy=True):
"""Add reference channels to data that consists of all zeros.
Adds reference channels to data that were not included during recording.
This is useful when you need to re-reference your data to different
channels. These added channels will consist of all zeros.
Parameters
----------
inst : instance of Raw | Epochs | Evoked
Instance of Raw or Epochs with EEG channels and reference channel(s).
ref_channels : str | list of str
Name of the electrode(s) which served as the reference in the
recording. If a name is provided, a corresponding channel is added
and its data is set to 0. This is useful for later re-referencing.
copy : bool
Specifies whether the data will be copied (True) or modified in-place
(False). Defaults to True.
Returns
-------
inst : instance of Raw | Epochs | Evoked
Data with added EEG reference channels.
"""
# Check to see that data is preloaded
if not inst.preload:
raise RuntimeError('Data needs to be preloaded.')
if isinstance(ref_channels, str):
ref_channels = [ref_channels]
elif not isinstance(ref_channels, list):
raise ValueError("`ref_channels` should be either str or list of str. "
"%s was provided." % type(ref_channels))
for ch in ref_channels:
if ch in inst.info['ch_names']:
raise ValueError("Channel %s already specified in inst." % ch)
# Once CAR is applied (active), don't allow adding channels
if _has_eeg_average_ref_proj(inst.info['projs'], check_active=True):
raise RuntimeError('Average reference already applied to data.')
if copy:
inst = inst.copy()
if isinstance(inst, (BaseRaw, Evoked)):
data = inst._data
refs = np.zeros((len(ref_channels), data.shape[1]))
data = np.vstack((data, refs))
inst._data = data
elif isinstance(inst, BaseEpochs):
data = inst._data
x, y, z = data.shape
refs = np.zeros((x * len(ref_channels), z))
data = np.vstack((data.reshape((x * y, z), order='F'), refs))
data = data.reshape(x, y + len(ref_channels), z, order='F')
inst._data = data
else:
raise TypeError("inst should be Raw, Epochs, or Evoked instead of %s."
% type(inst))
nchan = len(inst.info['ch_names'])
# only do this if we actually have digitisation points
if inst.info.get('dig', None) is not None:
# "zeroth" EEG electrode dig points is reference
ref_dig_loc = [dl for dl in inst.info['dig'] if (
dl['kind'] == FIFF.FIFFV_POINT_EEG and
dl['ident'] == 0)]
if len(ref_channels) > 1 or len(ref_dig_loc) != len(ref_channels):
ref_dig_array = np.zeros(12)
warn('The locations of multiple reference channels are ignored '
'(set to zero).')
else: # n_ref_channels == 1 and a single ref digitization exists
ref_dig_array = np.concatenate((ref_dig_loc[0]['r'],
ref_dig_loc[0]['r'], np.zeros(6)))
# Replace the (possibly new) Ref location for each channel
for idx in pick_types(inst.info, meg=False, eeg=True, exclude=[]):
inst.info['chs'][idx]['loc'][3:6] = ref_dig_loc[0]['r']
else:
# we should actually be able to do this from the montage, but
# it looks like the montage isn't stored, so we can't extract
# this information. The user will just have to call set_montage()
# by setting this to zero, we fall back to the old behavior
# when missing digitisation
ref_dig_array = np.zeros(12)
for ch in ref_channels:
chan_info = {'ch_name': ch,
'coil_type': FIFF.FIFFV_COIL_EEG,
'kind': FIFF.FIFFV_EEG_CH,
'logno': nchan + 1,
'scanno': nchan + 1,
'cal': 1,
'range': 1.,
'unit_mul': 0.,
'unit': FIFF.FIFF_UNIT_V,
'coord_frame': FIFF.FIFFV_COORD_HEAD,
'loc': ref_dig_array}
inst.info['chs'].append(chan_info)
inst.info._update_redundant()
if isinstance(inst, BaseRaw):
inst._cals = np.hstack((inst._cals, [1] * len(ref_channels)))
inst.info._check_consistency()
set_eeg_reference(inst, ref_channels=ref_channels, copy=False)
return inst
_ref_dict = {
FIFF.FIFFV_MNE_CUSTOM_REF_ON: 'on',
FIFF.FIFFV_MNE_CUSTOM_REF_OFF: 'off',
FIFF.FIFFV_MNE_CUSTOM_REF_CSD: 'CSD',
}
def _check_can_reref(inst):
_validate_type(inst, (BaseRaw, BaseEpochs, Evoked), "Instance")
current_custom = inst.info['custom_ref_applied']
if current_custom not in (FIFF.FIFFV_MNE_CUSTOM_REF_ON,
FIFF.FIFFV_MNE_CUSTOM_REF_OFF):
raise RuntimeError('Cannot set new reference on data with custom '
'reference type %r' % (_ref_dict[current_custom],))
@verbose
def set_eeg_reference(inst, ref_channels='average', copy=True,
projection=False, ch_type='auto', forward=None,
verbose=None):
"""Specify which reference to use for EEG data.
Use this function to explicitly specify the desired reference for EEG.
This can be either an existing electrode or a new virtual channel.
This function will re-reference the data according to the desired
reference.
Parameters
----------
inst : instance of Raw | Epochs | Evoked
Instance of Raw or Epochs with EEG channels and reference channel(s).
%(set_eeg_reference_ref_channels)s
copy : bool
Specifies whether the data will be copied (True) or modified in-place
(False). Defaults to True.
%(set_eeg_reference_projection)s
%(set_eeg_reference_ch_type)s
%(set_eeg_reference_forward)s
%(verbose)s
Returns
-------
inst : instance of Raw | Epochs | Evoked
Data with EEG channels re-referenced. If ``ref_channels='average'`` and
``projection=True`` a projection will be added instead of directly
re-referencing the data.
ref_data : array
Array of reference data subtracted from EEG channels. This will be
``None`` if ``projection=True`` or ``ref_channels='REST'``.
%(set_eeg_reference_see_also_notes)s
"""
from ..forward import Forward
_check_can_reref(inst)
if projection: # average reference projector
if ref_channels != 'average':
raise ValueError('Setting projection=True is only supported for '
'ref_channels="average", got %r.'
% (ref_channels,))
if _has_eeg_average_ref_proj(inst.info['projs']):
warn('An average reference projection was already added. The data '
'has been left untouched.')
else:
# Creating an average reference may fail. In this case, make
# sure that the custom_ref_applied flag is left untouched.
custom_ref_applied = inst.info['custom_ref_applied']
try:
inst.info['custom_ref_applied'] = FIFF.FIFFV_MNE_CUSTOM_REF_OFF
inst.add_proj(make_eeg_average_ref_proj(inst.info,
activate=False))
except Exception:
inst.info['custom_ref_applied'] = custom_ref_applied
raise
# If the data has been preloaded, projections will no
# longer be automatically applied.
if inst.preload:
logger.info('Average reference projection was added, '
'but has not been applied yet. Use the '
'apply_proj method to apply it.')
return inst, None
del projection # not used anymore
inst = inst.copy() if copy else inst
_check_option('ch_type', ch_type, ('auto', 'eeg', 'ecog', 'seeg'))
# if ch_type is 'auto', search through list to find first reasonable
# reference-able channel type.
possible_types = ['eeg', 'ecog', 'seeg']
if ch_type == 'auto':
for type_ in possible_types:
if type_ in inst:
ch_type = type_
logger.info('%s channel type selected for '
're-referencing' % DEFAULTS['titles'][type_])
break
# if auto comes up empty, or the user specifies a bad ch_type.
else:
raise ValueError('No EEG, ECoG or sEEG channels found '
'to rereference.')
else:
type_ = ch_type
ch_dict = {ch_type: True, 'meg': False, 'ref_meg': False}
eeg_idx = pick_types(inst.info, **ch_dict)
ch_sel = [inst.ch_names[i] for i in eeg_idx]
if ref_channels == 'REST':
_validate_type(forward, Forward, 'forward when ref_channels="REST"')
else:
forward = None # signal to _apply_reference not to do REST
if ref_channels in ('average', 'REST'):
logger.info(f'Applying {ref_channels} reference.')
ref_channels = ch_sel
if ref_channels == []:
logger.info('EEG data marked as already having the desired reference.')
else:
logger.info('Applying a custom %s '
'reference.' % DEFAULTS['titles'][type_])
return _apply_reference(inst, ref_channels, ch_sel, forward)
@verbose
def set_bipolar_reference(inst, anode, cathode, ch_name=None, ch_info=None,
drop_refs=True, copy=True, verbose=None):
"""Re-reference selected channels using a bipolar referencing scheme.
A bipolar reference takes the difference between two channels (the anode
minus the cathode) and adds it as a new virtual channel. The original
channels will be dropped.
Multiple anodes and cathodes can be specified, in which case multiple
virtual channels will be created. The 1st anode will be subtracted from the
1st cathode, the 2nd anode from the 2nd cathode, etc.
By default, the virtual channels will be annotated with channel info of
the anodes, their locations set to (0, 0, 0) and coil types set to
EEG_BIPOLAR.
Parameters
----------
inst : instance of Raw | Epochs | Evoked
Data containing the unreferenced channels.
anode : str | list of str
The name(s) of the channel(s) to use as anode in the bipolar reference.
cathode : str | list of str
The name(s) of the channel(s) to use as cathode in the bipolar
reference.
ch_name : str | list of str | None
The channel name(s) for the virtual channel(s) containing the resulting
signal. By default, bipolar channels are named after the anode and
cathode, but it is recommended to supply a more meaningful name.
ch_info : dict | list of dict | None
This parameter can be used to supply a dictionary (or a dictionary for
each bipolar channel) containing channel information to merge in,
overwriting the default values. Defaults to None.
drop_refs : bool
Whether to drop the anode/cathode channels from the instance.
copy : bool
Whether to operate on a copy of the data (True) or modify it in-place
(False). Defaults to True.
%(verbose)s
Returns
-------
inst : instance of Raw | Epochs | Evoked
Data with the specified channels re-referenced.
See Also
--------
set_eeg_reference : Convenience function for creating an EEG reference.
Notes
-----
1. If the anodes contain any EEG channels, this function removes
any pre-existing average reference projections.
2. During source localization, the EEG signal should have an average
reference.
3. The data must be preloaded.
.. versionadded:: 0.9.0
"""
_check_can_reref(inst)
if not isinstance(anode, list):
anode = [anode]
if not isinstance(cathode, list):
cathode = [cathode]
if len(anode) != len(cathode):
raise ValueError('Number of anodes (got %d) must equal the number '
'of cathodes (got %d).' % (len(anode), len(cathode)))
if ch_name is None:
ch_name = ['%s-%s' % ac for ac in zip(anode, cathode)]
elif not isinstance(ch_name, list):
ch_name = [ch_name]
if len(ch_name) != len(anode):
raise ValueError('Number of channel names must equal the number of '
'anodes/cathodes (got %d).' % len(ch_name))
# Check for duplicate channel names (it is allowed to give the name of the
# anode or cathode channel, as they will be replaced).
for ch, a, c in zip(ch_name, anode, cathode):
if ch not in [a, c] and ch in inst.ch_names:
raise ValueError('There is already a channel named "%s", please '
'specify a different name for the bipolar '
'channel using the ch_name parameter.' % ch)
if ch_info is None:
ch_info = [{} for _ in anode]
elif not isinstance(ch_info, list):
ch_info = [ch_info]
if len(ch_info) != len(anode):
raise ValueError('Number of channel info dictionaries must equal the '
'number of anodes/cathodes.')
# Merge specified and anode channel information dictionaries
new_chs = []
for an, ci in zip(anode, ch_info):
an_idx = inst.ch_names.index(an)
this_chs = deepcopy(inst.info['chs'][an_idx])
# Set channel location and coil type
this_chs['loc'] = np.zeros(12)
this_chs['coil_type'] = FIFF.FIFFV_COIL_EEG_BIPOLAR
this_chs.update(ci)
new_chs.append(this_chs)
if copy:
inst = inst.copy()
for i, (an, ca, name, chs) in enumerate(
zip(anode, cathode, ch_name, new_chs)):
if an in anode[i + 1:] or an in cathode[i + 1:] or not drop_refs:
# Make a copy of the channel if it's still needed later
# otherwise it's modified inplace
_copy_channel(inst, an, 'TMP')
an = 'TMP'
_apply_reference(inst, [ca], [an]) # ensures preloaded
an_idx = inst.ch_names.index(an)
inst.info['chs'][an_idx] = chs
inst.info['chs'][an_idx]['ch_name'] = name
logger.info('Bipolar channel added as "%s".' % name)
inst.info._update_redundant()
# Drop remaining channels.
if drop_refs:
drop_channels = list((set(anode) | set(cathode)) & set(inst.ch_names))
inst.drop_channels(drop_channels)
return inst
| {
"content_hash": "f2b39e1ffed589289ac4387f009eae86",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 79,
"avg_line_length": 40.00811359026369,
"alnum_prop": 0.5956702494423038,
"repo_name": "cjayb/mne-python",
"id": "42645d67ba98f41e8e15f7c43a5743fa87e324e2",
"size": "19913",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mne/io/reference.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "24999"
},
{
"name": "Makefile",
"bytes": "4450"
},
{
"name": "Python",
"bytes": "7901053"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
import numpy as np
import tensorflow as tf
from agents import TabularQAgent, capacities
class TabularQERAgent(TabularQAgent):
"""
Agent implementing tabular Q-learning with experience replay.
"""
def set_agent_props(self):
super(TabularQERAgent, self).set_agent_props()
self.er_batch_size = self.config['er_batch_size']
self.er_rm_size = self.config['er_rm_size']
self.replayMemoryDt = np.dtype([('states', 'int32'), ('actions', 'int32'), ('rewards', 'float32'), ('next_states', 'int32')])
self.replayMemory = np.array([], dtype=self.replayMemoryDt)
def get_best_config(self, env_name=""):
return {
'lr': 0.03
, 'lr_decay_steps': 40000
, 'discount': 0.999
, 'N0': 75
, 'min_eps': 0.005
, 'initial_q_value': 0
, 'er_batch_size': 783
, 'er_rm_size': 36916
}
@staticmethod
def get_random_config(fixed_params={}):
get_lr = lambda: 1e-3 + (1 - 1e-3) * np.random.random(1)[0]
get_lr_decay_steps = lambda: np.random.randint(1e3, 1e5)
get_discount = lambda: 0.5 + (1 - 0.5) * np.random.random(1)[0]
get_N0 = lambda: np.random.randint(1, 5e3)
get_min_eps = lambda: 1e-4 + (1e-1 - 1e-4) * np.random.random(1)[0]
get_initial_q_value = lambda: 0 # int(np.random.random(1)[0] * 200)
get_er_batch_size = lambda: np.random.randint(16, 1024)
get_er_rm_size = lambda: np.random.randint(1000, 50000)
random_config = {
'lr': get_lr()
, 'lr_decay_steps': get_lr_decay_steps()
, 'discount': get_discount()
, 'N0': get_N0()
, 'min_eps': get_min_eps()
, 'initial_q_value': get_initial_q_value()
, 'er_batch_size': get_er_batch_size()
, 'er_rm_size': get_er_rm_size()
}
random_config.update(fixed_params)
return random_config
def learn_from_episode(self, env, render=False):
score = 0
av_loss = []
done = False
obs = env.reset()
while not done:
if render:
env.render()
act, state_id = self.act(obs)
next_obs, reward, done, info = env.step(act)
next_state_id = self.phi(next_obs, done)
memory = np.array([(state_id, act, reward, next_state_id)], dtype=self.replayMemoryDt)
if self.replayMemory.shape[0] >= self.er_rm_size:
self.replayMemory = np.delete(self.replayMemory, 0)
self.replayMemory = np.append(self.replayMemory, memory)
memories = np.random.choice(self.replayMemory, self.er_batch_size)
loss, _ = self.sess.run([self.loss, self.train_op], feed_dict={
self.inputs_plh: memories['states'],
self.actions_t: memories['actions'],
self.rewards_plh: memories['rewards'],
self.next_states_plh: memories['next_states'],
})
av_loss.append(loss)
score += reward
obs = next_obs
summary, _, episode_id = self.sess.run([self.all_summary_t, self.inc_ep_id_op, self.episode_id], feed_dict={
self.score_plh: score,
self.loss_plh: np.mean(av_loss)
})
self.sw.add_summary(summary, episode_id)
return | {
"content_hash": "aae4f05e97b8f101ed6b8bb995a3e0d8",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 133,
"avg_line_length": 36.634408602150536,
"alnum_prop": 0.5444672732609334,
"repo_name": "morgangiraud/openai-rl",
"id": "cd84682dfe6b501f9ed35612d21980de194dbb05",
"size": "3407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agents/tabular_q_er_agent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "237116"
},
{
"name": "Shell",
"bytes": "981"
}
],
"symlink_target": ""
} |
"""Endpoint action implementations"""
import logging
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
from openstackclient.identity import common
LOG = logging.getLogger(__name__)
class CreateEndpoint(command.ShowOne):
_description = _("Create new endpoint")
def get_parser(self, prog_name):
parser = super(CreateEndpoint, self).get_parser(prog_name)
parser.add_argument(
'service',
metavar='<service>',
help=_('Service to be associated with new endpoint (name or ID)'),
)
parser.add_argument(
'--publicurl',
metavar='<url>',
required=True,
help=_('New endpoint public URL (required)'),
)
parser.add_argument(
'--adminurl',
metavar='<url>',
help=_('New endpoint admin URL'),
)
parser.add_argument(
'--internalurl',
metavar='<url>',
help=_('New endpoint internal URL'),
)
parser.add_argument(
'--region',
metavar='<region-id>',
help=_('New endpoint region ID'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
service = common.find_service(identity_client, parsed_args.service)
endpoint = identity_client.endpoints.create(
parsed_args.region,
service.id,
parsed_args.publicurl,
parsed_args.adminurl,
parsed_args.internalurl,)
info = {}
info.update(endpoint._info)
info['service_name'] = service.name
info['service_type'] = service.type
return zip(*sorted(info.items()))
class DeleteEndpoint(command.Command):
_description = _("Delete endpoint(s)")
def get_parser(self, prog_name):
parser = super(DeleteEndpoint, self).get_parser(prog_name)
parser.add_argument(
'endpoints',
metavar='<endpoint-id>',
nargs='+',
help=_('Endpoint(s) to delete (ID only)'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
result = 0
for endpoint in parsed_args.endpoints:
try:
identity_client.endpoints.delete(endpoint)
except Exception as e:
result += 1
LOG.error(_("Failed to delete endpoint with "
"ID '%(endpoint)s': %(e)s"),
{'endpoint': endpoint, 'e': e})
if result > 0:
total = len(parsed_args.endpoints)
msg = (_("%(result)s of %(total)s endpoints failed "
"to delete.") % {'result': result, 'total': total})
raise exceptions.CommandError(msg)
class ListEndpoint(command.Lister):
_description = _("List endpoints")
def get_parser(self, prog_name):
parser = super(ListEndpoint, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_('List additional fields in output'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
if parsed_args.long:
columns = ('ID', 'Region', 'Service Name', 'Service Type',
'PublicURL', 'AdminURL', 'InternalURL')
else:
columns = ('ID', 'Region', 'Service Name', 'Service Type')
data = identity_client.endpoints.list()
for ep in data:
service = common.find_service(identity_client, ep.service_id)
ep.service_name = service.name
ep.service_type = service.type
return (columns,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class ShowEndpoint(command.ShowOne):
_description = _("Display endpoint details")
def get_parser(self, prog_name):
parser = super(ShowEndpoint, self).get_parser(prog_name)
parser.add_argument(
'endpoint_or_service',
metavar='<endpoint>',
help=_('Endpoint to display (endpoint ID, service ID,'
' service name, service type)'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
data = identity_client.endpoints.list()
match = None
for ep in data:
if ep.id == parsed_args.endpoint_or_service:
match = ep
service = common.find_service(identity_client, ep.service_id)
if match is None:
service = common.find_service(identity_client,
parsed_args.endpoint_or_service)
for ep in data:
if ep.service_id == service.id:
match = ep
if match is None:
return None
info = {}
info.update(match._info)
info['service_name'] = service.name
info['service_type'] = service.type
return zip(*sorted(info.items()))
| {
"content_hash": "475eeb89f27717a228da17d800409347",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 78,
"avg_line_length": 32.63636363636363,
"alnum_prop": 0.5463324048282265,
"repo_name": "openstack/python-openstackclient",
"id": "57906ddff6356506fb27a58eb1395fb14af771d9",
"size": "5998",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstackclient/identity/v2_0/endpoint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "923"
},
{
"name": "Python",
"bytes": "5016301"
},
{
"name": "Shell",
"bytes": "299"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.deprecated import deprecated_conditional
from pants.base.exceptions import TargetDefinitionException
class JavaThriftLibrary(JvmTarget):
"""A Java library generated from Thrift IDL files.
:API: public
"""
# TODO(John Sirois): Tasks should register the values they support in a plugin-registration goal.
# In general a plugin will contribute a target and a task, but in this case we have a shared
# target that can be used by at least 2 tasks - ThriftGen and ScroogeGen. This is likely not
# uncommon (gcc & clang) so the arrangement needs to be cleaned up and supported well.
_COMPILERS = frozenset(['thrift', 'scrooge'])
def __init__(self,
compiler=None,
language=None,
rpc_style=None,
namespace_map=None,
thrift_linter_strict=None,
default_java_namespace=None,
include_paths=None,
compiler_args=None,
**kwargs):
"""
:API: public
:param compiler: The compiler used to compile the thrift files. The default is defined in
the global options under ``--thrift-default-compiler``.
:param language: The language used to generate the output files. The default is defined in
the global options under ``--thrift-default-language``.
:param rpc_style: An optional rpc style to generate service stubs with. The default is defined
in the global options under ``--thrift-default-rpc-style``.
:param namespace_map: An optional dictionary of namespaces to remap {old: new}
:param thrift_linter_strict: If True, fail if thrift linter produces any warnings.
:param default_java_namespace: The namespace used for Java generated code when a Java
namespace is not explicitly specified in the IDL. The default is defined in the global
options under ``--thrift-default-default-java-namespace``.
:param compiler_args: Extra arguments to the compiler.
"""
super(JavaThriftLibrary, self).__init__(**kwargs)
def check_value_for_arg(arg, value, values):
if value and value not in values:
raise TargetDefinitionException(self, "{} may only be set to {} ('{}' not valid)"
.format(arg, ', or '.join(map(repr, values)), value))
return value
# The following fields are only added to the fingerprint via FingerprintStrategy when their
# values impact the outcome of the task. See JavaThriftLibraryFingerprintStrategy.
self._compiler = check_value_for_arg('compiler', compiler, self._COMPILERS)
self._language = language
deprecated_conditional(
lambda: rpc_style is not None,
'1.6.0.dev0',
'rpc_style',
'''
Deprecated property rpc_style used for {target}, use compiler_args instead.
e.g. [ \'--finagle\'] for \'finagle\'
and [\'--finagle\', \'--ostrich\'] for \'ostrich\'.
If both rpc_style and compiler_args are set then only compiler_args is used
and rpc_style is discarded.
'''.format(target=self.address.spec)
)
self._rpc_style = rpc_style
self.namespace_map = namespace_map
self.thrift_linter_strict = thrift_linter_strict
self._default_java_namespace = default_java_namespace
self._include_paths = include_paths
self._compiler_args = compiler_args
@property
def compiler(self):
return self._compiler
@property
def language(self):
return self._language
@property
def rpc_style(self):
return self._rpc_style
@property
def compiler_args(self):
return self._compiler_args
@property
def default_java_namespace(self):
return self._default_java_namespace
@property
def include_paths(self):
return self._include_paths
# TODO(Eric Ayers) As of 2/5/2015 this call is DEPRECATED and should be removed soon
@property
def is_thrift(self):
return True
| {
"content_hash": "492f5b7c5355aa7cf071ae6aa7cf7dae",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 99,
"avg_line_length": 38.120370370370374,
"alnum_prop": 0.675491863007044,
"repo_name": "fkorotkov/pants",
"id": "b453952383393a3bb0e2133dacc28b94bad6604e",
"size": "4264",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/codegen/thrift/java/java_thrift_library.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "1805"
},
{
"name": "HTML",
"bytes": "79866"
},
{
"name": "Java",
"bytes": "481460"
},
{
"name": "JavaScript",
"bytes": "35417"
},
{
"name": "Python",
"bytes": "5931594"
},
{
"name": "Rust",
"bytes": "271643"
},
{
"name": "Scala",
"bytes": "76239"
},
{
"name": "Shell",
"bytes": "74734"
},
{
"name": "Thrift",
"bytes": "2795"
}
],
"symlink_target": ""
} |
from diogenis.tests import *
class SchoolTestCase(DiogenisTestCase):
def test_lessons_count_for_informatics(self):
self.assertEqual(self.school_1.lessons.all().count(), 2)
def test_schools_count_for_second_lesson(self):
self.assertEqual(self.lesson_2.school_set.all().count(), 2)
| {
"content_hash": "2bf0566d1b578b00d9193a4df251a246",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 67,
"avg_line_length": 32.8,
"alnum_prop": 0.6676829268292683,
"repo_name": "gtsiokos/diogenis",
"id": "1cce7e85960aaf6237556d8d6b7d9aa6f7ddf31d",
"size": "352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "schools/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "445116"
},
{
"name": "Python",
"bytes": "119388"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^idptest/', include('idptest.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
# Required for login:
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
# URLs for the IDP:
(r'^idp/', include('saml2idp.urls')),
)
| {
"content_hash": "3db49c08b44730e5ab55ec462c46ab20",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 75,
"avg_line_length": 30.17391304347826,
"alnum_prop": 0.6729106628242075,
"repo_name": "anentropic/django-saml2-idp",
"id": "9ded19fa13195daf893c52df70546e7858463b25",
"size": "694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "idptest/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "77585"
},
{
"name": "Shell",
"bytes": "659"
}
],
"symlink_target": ""
} |
import os
import hashlib
from io import BytesIO
from tests.utils import driver
from onitu_webdav.wd import get_WEBDAV_client, create_dirs
from onitu.utils import get_random_string, b, dirname
class Driver(driver.Driver):
def __init__(self, *args, **options):
options['hostname'] = os.getenv(
"ONITU_WEBDAV_HOSTNAME", "http://localhost"
)
options['username'] = os.getenv("ONITU_WEBDAV_USERNAME", "")
options['password'] = os.getenv("ONITU_WEBDAV_PASSWORD", "")
options['changes_timer'] = os.getenv("ONITU_WEBDAV_CHANGES_TIMER", 5)
root = os.getenv("ONITU_WEBDAV_ROOT", u"/")
self._root = root + get_random_string(10)
hostname = options['hostname']
username = options['username']
password = options['password']
super(Driver, self).__init__('webdav', *args, **options)
self.webd = get_WEBDAV_client(hostname, username, password)
create_dirs(self.webd, self._root)
@property
def root(self):
return self._root
def close(self):
self.rmdir(self.root)
def mkdir(self, subdirs):
create_dirs(self.webd, subdirs)
def rmdir(self, path):
self.webd.clean(b(path))
def write(self, filename, content):
create_dirs(self.webd, dirname(filename))
buff = BytesIO(content)
self.webd.upload_from(buff, b(filename))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def exists(self, filename):
try:
self.webd.info(b(filename))
except:
return False
return True
def unlink(self, filename):
self.webd.clean(b(filename))
def rename(self, source, target):
self.webd.move(remote_path_from=b(source), remote_path_to=b(target))
def checksum(self, filename):
buff = BytesIO()
self.webd.download_to(buff, b(filename))
data = buff.getvalue()
md5 = hashlib.md5(data).hexdigest()
return md5
class DriverFeatures(driver.DriverFeatures):
move_file_to_onitu = False
| {
"content_hash": "278b8bf82654e7d1b5e26f25a1be83eb",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 77,
"avg_line_length": 27.75,
"alnum_prop": 0.6164058795637744,
"repo_name": "onitu/onitu",
"id": "3f24ee25b87bb644ac66e74af91d3fa94a233baf",
"size": "2109",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "drivers/webdav/onitu_webdav/tests/driver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "18974"
},
{
"name": "JavaScript",
"bytes": "18192"
},
{
"name": "Python",
"bytes": "402320"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function)
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_sdn_connector
short_description: Configure connection to SDN Connector.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure system feature and sdn_connector category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
system_sdn_connector:
description:
- Configure connection to SDN Connector.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
access-key:
description:
- AWS access key ID.
azure-region:
description:
- Azure server region.
choices:
- global
- china
- germany
- usgov
client-id:
description:
- Azure client ID (application ID).
client-secret:
description:
- Azure client secret (application key).
compartment-id:
description:
- Compartment ID.
external-ip:
description:
- Configure GCP external IP.
suboptions:
name:
description:
- External IP name.
required: true
gcp-project:
description:
- GCP project name.
key-passwd:
description:
- Private key password.
name:
description:
- SDN connector name.
required: true
nic:
description:
- Configure Azure network interface.
suboptions:
ip:
description:
- Configure IP configuration.
suboptions:
name:
description:
- IP configuration name.
required: true
public-ip:
description:
- Public IP name.
name:
description:
- Network interface name.
required: true
oci-cert:
description:
- OCI certificate. Source certificate.local.name.
oci-fingerprint:
description:
- OCI pubkey fingerprint.
oci-region:
description:
- OCI server region.
choices:
- phoenix
- ashburn
- frankfurt
- london
password:
description:
- Password of the remote SDN connector as login credentials.
private-key:
description:
- Private key of GCP service account.
region:
description:
- AWS region name.
resource-group:
description:
- Azure resource group.
route:
description:
- Configure GCP route.
suboptions:
name:
description:
- Route name.
required: true
route-table:
description:
- Configure Azure route table.
suboptions:
name:
description:
- Route table name.
required: true
route:
description:
- Configure Azure route.
suboptions:
name:
description:
- Route name.
required: true
next-hop:
description:
- Next hop address.
secret-key:
description:
- AWS secret access key.
server:
description:
- Server address of the remote SDN connector.
server-port:
description:
- Port number of the remote SDN connector.
service-account:
description:
- GCP service account email.
status:
description:
- Enable/disable connection to the remote SDN connector.
choices:
- disable
- enable
subscription-id:
description:
- Azure subscription ID.
tenant-id:
description:
- Tenant ID (directory ID).
type:
description:
- Type of SDN connector.
choices:
- aci
- aws
- azure
- nsx
- nuage
- oci
- gcp
update-interval:
description:
- Dynamic object update interval (0 - 3600 sec, 0 means disabled, default = 60).
use-metadata-iam:
description:
- Enable/disable using IAM role from metadata to call API.
choices:
- disable
- enable
user-id:
description:
- User ID.
username:
description:
- Username of the remote SDN connector as login credentials.
vpc-id:
description:
- AWS VPC ID.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure connection to SDN Connector.
fortios_system_sdn_connector:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
system_sdn_connector:
state: "present"
access-key: "<your_own_value>"
azure-region: "global"
client-id: "<your_own_value>"
client-secret: "<your_own_value>"
compartment-id: "<your_own_value>"
external-ip:
-
name: "default_name_9"
gcp-project: "<your_own_value>"
key-passwd: "<your_own_value>"
name: "default_name_12"
nic:
-
ip:
-
name: "default_name_15"
public-ip: "<your_own_value>"
name: "default_name_17"
oci-cert: "<your_own_value> (source certificate.local.name)"
oci-fingerprint: "<your_own_value>"
oci-region: "phoenix"
password: "<your_own_value>"
private-key: "<your_own_value>"
region: "<your_own_value>"
resource-group: "<your_own_value>"
route:
-
name: "default_name_26"
route-table:
-
name: "default_name_28"
route:
-
name: "default_name_30"
next-hop: "<your_own_value>"
secret-key: "<your_own_value>"
server: "192.168.100.40"
server-port: "34"
service-account: "<your_own_value>"
status: "disable"
subscription-id: "<your_own_value>"
tenant-id: "<your_own_value>"
type: "aci"
update-interval: "40"
use-metadata-iam: "disable"
user-id: "<your_own_value>"
username: "<your_own_value>"
vpc-id: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "key1"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_system_sdn_connector_data(json):
option_list = ['access-key', 'azure-region', 'client-id',
'client-secret', 'compartment-id', 'external-ip',
'gcp-project', 'key-passwd', 'name',
'nic', 'oci-cert', 'oci-fingerprint',
'oci-region', 'password', 'private-key',
'region', 'resource-group', 'route',
'route-table', 'secret-key', 'server',
'server-port', 'service-account', 'status',
'subscription-id', 'tenant-id', 'type',
'update-interval', 'use-metadata-iam', 'user-id',
'username', 'vpc-id']
dictionary = {}
for attribute in option_list:
if attribute in json:
dictionary[attribute] = json[attribute]
return dictionary
def system_sdn_connector(data, fos):
vdom = data['vdom']
system_sdn_connector_data = data['system_sdn_connector']
filtered_data = filter_system_sdn_connector_data(system_sdn_connector_data)
if system_sdn_connector_data['state'] == "present":
return fos.set('system',
'sdn-connector',
data=filtered_data,
vdom=vdom)
elif system_sdn_connector_data['state'] == "absent":
return fos.delete('system',
'sdn-connector',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_system(data, fos):
login(data)
methodlist = ['system_sdn_connector']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"system_sdn_connector": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"access-key": {"required": False, "type": "str"},
"azure-region": {"required": False, "type": "str",
"choices": ["global", "china", "germany",
"usgov"]},
"client-id": {"required": False, "type": "str"},
"client-secret": {"required": False, "type": "str"},
"compartment-id": {"required": False, "type": "str"},
"external-ip": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"gcp-project": {"required": False, "type": "str"},
"key-passwd": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"nic": {"required": False, "type": "list",
"options": {
"ip": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"},
"public-ip": {"required": False, "type": "str"}
}},
"name": {"required": True, "type": "str"}
}},
"oci-cert": {"required": False, "type": "str"},
"oci-fingerprint": {"required": False, "type": "str"},
"oci-region": {"required": False, "type": "str",
"choices": ["phoenix", "ashburn", "frankfurt",
"london"]},
"password": {"required": False, "type": "str"},
"private-key": {"required": False, "type": "str"},
"region": {"required": False, "type": "str"},
"resource-group": {"required": False, "type": "str"},
"route": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"route-table": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"},
"route": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"},
"next-hop": {"required": False, "type": "str"}
}}
}},
"secret-key": {"required": False, "type": "str"},
"server": {"required": False, "type": "str"},
"server-port": {"required": False, "type": "int"},
"service-account": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"subscription-id": {"required": False, "type": "str"},
"tenant-id": {"required": False, "type": "str"},
"type": {"required": False, "type": "str",
"choices": ["aci", "aws", "azure",
"nsx", "nuage", "oci",
"gcp"]},
"update-interval": {"required": False, "type": "int"},
"use-metadata-iam": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"user-id": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"vpc-id": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_system(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| {
"content_hash": "7ee3870c0f690e16f8b243273be2b8f6",
"timestamp": "",
"source": "github",
"line_count": 539,
"max_line_length": 100,
"avg_line_length": 35.08163265306123,
"alnum_prop": 0.4639060764715215,
"repo_name": "SergeyCherepanov/ansible",
"id": "a9bbbdfd40056fd5b7d6b5764f4a83330c046fb5",
"size": "18927",
"binary": false,
"copies": "24",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/network/fortios/fortios_system_sdn_connector.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
} |
from oslo_log import log as logging
from trove.cluster import models as cluster_models
import trove.common.apischema as apischema
from trove.common import cfg
from trove.common import exception
from trove.common.i18n import _
from trove.common import notification
from trove.common.notification import StartNotification, EndNotification
from trove.common import pagination
from trove.common import policy
from trove.common import timeutils
from trove.common import wsgi
from trove.configuration import models
from trove.configuration.models import DBConfigurationParameter
from trove.configuration import views
from trove.datastore import models as ds_models
from trove.instance import models as instances_models
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class ConfigurationsController(wsgi.Controller):
schemas = apischema.configuration
@classmethod
def authorize_config_action(cls, context, config_rule_name, config):
policy.authorize_on_target(
context, 'configuration:%s' % config_rule_name,
{'tenant': config.tenant_id})
def index(self, req, tenant_id):
context = req.environ[wsgi.CONTEXT_KEY]
configs, marker = models.Configurations.load(context)
policy.authorize_on_tenant(context, 'configuration:index')
view = views.ConfigurationsView(configs)
paged = pagination.SimplePaginatedDataView(req.url, 'configurations',
view, marker)
return wsgi.Result(paged.data(), 200)
def show(self, req, tenant_id, id):
LOG.debug("Showing configuration group %(id)s on tenant %(tenant)s",
{"tenant": tenant_id, "id": id})
context = req.environ[wsgi.CONTEXT_KEY]
configuration = models.Configuration.load(context, id)
self.authorize_config_action(context, 'show', configuration)
configuration_items = models.Configuration.load_items(context, id)
find_instance = {
'configuration_id': configuration.id,
'deleted': False
}
if not context.is_admin:
find_instance['tenant_id'] = context.project_id
configuration.instance_count = instances_models.DBInstance.find_all(
**find_instance).count()
return wsgi.Result(views.DetailedConfigurationView(
configuration,
configuration_items).data(), 200)
def instances(self, req, tenant_id, id):
context = req.environ[wsgi.CONTEXT_KEY]
configuration = models.Configuration.load(context, id)
self.authorize_config_action(context, 'instances', configuration)
kwargs = {
'configuration_id': configuration.id,
'deleted': False
}
if not context.is_admin:
kwargs['tenant_id'] = context.project_id
instances = instances_models.DBInstance.find_all(**kwargs)
limit = int(context.limit or CONF.instances_page_size)
if limit > CONF.instances_page_size:
limit = CONF.instances_page_size
data_view = instances_models.DBInstance.find_by_pagination(
'instances', instances, "foo",
limit=limit,
marker=context.marker)
view = views.DetailedConfigurationInstancesView(data_view.collection)
paged = pagination.SimplePaginatedDataView(req.url, 'instances', view,
data_view.next_page_marker)
return wsgi.Result(paged.data(), 200)
def create(self, req, body, tenant_id):
LOG.debug("req : '%s'\n\n", req)
LOG.debug("body : '%s'\n\n", req)
context = req.environ[wsgi.CONTEXT_KEY]
policy.authorize_on_tenant(context, 'configuration:create')
context.notification = notification.DBaaSConfigurationCreate(
context, request=req)
name = body['configuration']['name']
description = body['configuration'].get('description')
values = body['configuration']['values']
msg = ("Creating configuration group on tenant "
"%(tenant_id)s with name: %(cfg_name)s")
LOG.info(msg, {"tenant_id": tenant_id, "cfg_name": name})
datastore_args = body['configuration'].get('datastore', {})
datastore, datastore_version = (
ds_models.get_datastore_version(**datastore_args))
with StartNotification(context, name=name, datastore=datastore.name,
datastore_version=datastore_version.name):
configItems = []
if values:
# validate that the values passed in are permitted by the
# operator.
ConfigurationsController._validate_configuration(
body['configuration']['values'],
datastore_version,
models.DatastoreConfigurationParameters.load_parameters(
datastore_version.id))
for k, v in values.items():
configItems.append(DBConfigurationParameter(
configuration_key=k,
configuration_value=v))
cfg_group = models.Configuration.create(name, description,
tenant_id, datastore.id,
datastore_version.id)
with EndNotification(context, configuration_id=cfg_group.id):
cfg_group_items = models.Configuration.create_items(
cfg_group.id, values)
view_data = views.DetailedConfigurationView(cfg_group,
cfg_group_items)
return wsgi.Result(view_data.data(), 200)
def delete(self, req, tenant_id, id):
msg = ("Deleting configuration group %(cfg_id)s on tenant: "
"%(tenant_id)s")
LOG.info(msg, {"tenant_id": tenant_id, "cfg_id": id})
context = req.environ[wsgi.CONTEXT_KEY]
group = models.Configuration.load(context, id)
self.authorize_config_action(context, 'delete', group)
context.notification = notification.DBaaSConfigurationDelete(
context, request=req)
with StartNotification(context, configuration_id=id):
instances = instances_models.DBInstance.find_all(
tenant_id=context.project_id,
configuration_id=id,
deleted=False).all()
if instances:
raise exception.InstanceAssignedToConfiguration()
models.Configuration.delete(context, group)
return wsgi.Result(None, 202)
def update(self, req, body, tenant_id, id):
msg = ("Updating configuration group %(cfg_id)s for tenant "
"id %(tenant_id)s")
LOG.info(msg, {"tenant_id": tenant_id, "cfg_id": id})
context = req.environ[wsgi.CONTEXT_KEY]
group = models.Configuration.load(context, id)
# Note that changing the configuration group will also
# indirectly affect all the instances which attach it.
#
# The Trove instance itself won't be changed (the same group is still
# attached) but the configuration values will.
#
# The operator needs to keep this in mind when defining the related
# policies.
self.authorize_config_action(context, 'update', group)
# if name/description are provided in the request body, update the
# model with these values as well.
if 'name' in body['configuration']:
group.name = body['configuration']['name']
if 'description' in body['configuration']:
group.description = body['configuration']['description']
context.notification = notification.DBaaSConfigurationUpdate(
context, request=req)
with StartNotification(context, configuration_id=id,
name=group.name, description=group.description):
items = self._configuration_items_list(group,
body['configuration'])
deleted_at = timeutils.utcnow()
models.Configuration.remove_all_items(context, group.id,
deleted_at)
models.Configuration.save(group, items)
self._refresh_on_all_instances(context, id)
self._refresh_on_all_clusters(context, id)
return wsgi.Result(None, 202)
def edit(self, req, body, tenant_id, id):
context = req.environ[wsgi.CONTEXT_KEY]
group = models.Configuration.load(context, id)
self.authorize_config_action(context, 'edit', group)
context.notification = notification.DBaaSConfigurationEdit(
context, request=req)
with StartNotification(context, configuration_id=id):
items = self._configuration_items_list(group,
body['configuration'])
models.Configuration.save(group, items)
self._refresh_on_all_instances(context, id)
self._refresh_on_all_clusters(context, id)
def _refresh_on_all_instances(self, context, configuration_id):
"""Refresh a configuration group on all single instances.
"""
single_instances = instances_models.DBInstance.find_all(
tenant_id=context.project_id,
configuration_id=configuration_id,
cluster_id=None,
deleted=False).all()
config = models.Configuration(context, configuration_id)
for dbinstance in single_instances:
LOG.info("Re-applying configuration %s to instance: %s",
configuration_id, dbinstance.id)
instance = instances_models.Instance.load(context, dbinstance.id)
instance.update_configuration(config)
def _refresh_on_all_clusters(self, context, configuration_id):
"""Refresh a configuration group on all clusters.
"""
LOG.debug("Re-applying configuration group '%s' to all clusters.",
configuration_id)
clusters = cluster_models.DBCluster.find_all(
tenant_id=context.project_id,
configuration_id=configuration_id,
deleted=False).all()
for dbcluster in clusters:
LOG.debug("Re-applying configuration to cluster: %s", dbcluster.id)
cluster = cluster_models.Cluster.load(context, dbcluster.id)
cluster.configuration_attach(configuration_id)
def _configuration_items_list(self, group, configuration):
ds_version_id = group.datastore_version_id
ds_version = ds_models.DatastoreVersion.load_by_uuid(ds_version_id)
items = []
if 'values' in configuration:
# validate that the values passed in are permitted by the operator.
ConfigurationsController._validate_configuration(
configuration['values'],
ds_version,
models.DatastoreConfigurationParameters.load_parameters(
ds_version.id))
for k, v in configuration['values'].items():
items.append(DBConfigurationParameter(
configuration_id=group.id,
configuration_key=k,
configuration_value=v,
deleted=False))
return items
@staticmethod
def _validate_configuration(values, datastore_version, config_rules):
LOG.info("Validating configuration values")
# create rules dictionary based on parameter name
rules_lookup = {}
for item in config_rules:
rules_lookup[item.name.lower()] = item
# checking if there are any rules for the datastore
if not rules_lookup:
output = {"version": datastore_version.name,
"name": datastore_version.datastore_name}
msg = _("Configuration groups are not supported for this "
"datastore: %(name)s %(version)s") % output
raise exception.UnprocessableEntity(message=msg)
for k, v in values.items():
key = k.lower()
# parameter name validation
if key not in rules_lookup:
output = {"key": k,
"version": datastore_version.name,
"name": datastore_version.datastore_name}
msg = _("The configuration parameter %(key)s is not "
"supported for this datastore: "
"%(name)s %(version)s.") % output
raise exception.UnprocessableEntity(message=msg)
rule = rules_lookup[key]
# type checking
value_type = rule.data_type
if not isinstance(v, ConfigurationsController._find_type(
value_type)):
output = {"key": k, "type": value_type}
msg = _("The value provided for the configuration "
"parameter %(key)s is not of type %(type)s.") % output
raise exception.UnprocessableEntity(message=msg)
# integer min/max checking
if isinstance(v, int) and not isinstance(v, bool):
if rule.min_size is not None:
try:
min_value = int(rule.min_size)
except ValueError:
raise exception.TroveError(_(
"Invalid or unsupported min value defined in the "
"configuration-parameters configuration file. "
"Expected integer."))
if v < min_value:
output = {"key": k, "min": min_value}
message = _(
"The value for the configuration parameter "
"%(key)s is less than the minimum allowed: "
"%(min)s") % output
raise exception.UnprocessableEntity(message=message)
if rule.max_size is not None:
try:
max_value = int(rule.max_size)
except ValueError:
raise exception.TroveError(_(
"Invalid or unsupported max value defined in the "
"configuration-parameters configuration file. "
"Expected integer."))
if v > max_value:
output = {"key": k, "max": max_value}
message = _(
"The value for the configuration parameter "
"%(key)s is greater than the maximum "
"allowed: %(max)s") % output
raise exception.UnprocessableEntity(message=message)
@staticmethod
def _find_type(value_type):
if value_type == "boolean":
return bool
elif value_type == "string":
return str
elif value_type == "integer":
return int
elif value_type == "float":
return float
else:
raise exception.TroveError(_(
"Invalid or unsupported type defined in the "
"configuration-parameters configuration file."))
@staticmethod
def _get_item(key, dictList):
for item in dictList:
if key == item.get('name'):
return item
raise exception.UnprocessableEntity(
message=_("%s is not a supported configuration parameter.") % key)
class ParametersController(wsgi.Controller):
@classmethod
def authorize_request(cls, req, rule_name):
"""Parameters (configuration templates) bind to a datastore.
Datastores are not owned by any particular tenant so we only check
the current tenant is allowed to perform the action.
"""
context = req.environ[wsgi.CONTEXT_KEY]
policy.authorize_on_tenant(context, 'configuration-parameter:%s'
% rule_name)
def index(self, req, tenant_id, datastore, id):
self.authorize_request(req, 'index')
ds, ds_version = ds_models.get_datastore_version(
type=datastore, version=id)
rules = models.DatastoreConfigurationParameters.load_parameters(
ds_version.id)
return wsgi.Result(views.ConfigurationParametersView(rules).data(),
200)
def show(self, req, tenant_id, datastore, id, name):
self.authorize_request(req, 'show')
ds, ds_version = ds_models.get_datastore_version(
type=datastore, version=id)
rule = models.DatastoreConfigurationParameters.load_parameter_by_name(
ds_version.id, name)
return wsgi.Result(views.ConfigurationParameterView(rule).data(), 200)
def index_by_version(self, req, tenant_id, version):
self.authorize_request(req, 'index_by_version')
ds_version = ds_models.DatastoreVersion.load_by_uuid(version)
rules = models.DatastoreConfigurationParameters.load_parameters(
ds_version.id)
return wsgi.Result(views.ConfigurationParametersView(rules).data(),
200)
def show_by_version(self, req, tenant_id, version, name):
self.authorize_request(req, 'show_by_version')
ds_models.DatastoreVersion.load_by_uuid(version)
rule = models.DatastoreConfigurationParameters.load_parameter_by_name(
version, name)
return wsgi.Result(views.ConfigurationParameterView(rule).data(), 200)
| {
"content_hash": "6a669a8a5191aca77eda2caaa297b26a",
"timestamp": "",
"source": "github",
"line_count": 403,
"max_line_length": 79,
"avg_line_length": 44.20347394540943,
"alnum_prop": 0.5859436398338386,
"repo_name": "openstack/trove",
"id": "50b4138b994b3c4471f4333aa825734d633c3ce3",
"size": "18439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trove/configuration/service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1166"
},
{
"name": "Python",
"bytes": "3667406"
},
{
"name": "Shell",
"bytes": "136049"
}
],
"symlink_target": ""
} |
import time
def RateLimited(maxPerSecond):
minInterval = 1.0 / float(maxPerSecond)
def decorate(func):
lastTimeCalled = [0.0]
def rateLimitedFunction(*args,**kargs):
elapsed = time.clock() - lastTimeCalled[0]
leftToWait = minInterval - elapsed
if leftToWait>0:
time.sleep(leftToWait)
ret = func(*args,**kargs)
lastTimeCalled[0] = time.clock()
return ret
return rateLimitedFunction
return decorate
@RateLimited(2) # 2 per second at most
def PrintNumber(num):
print num
if __name__ == "__main__":
print "This should print 1,2,3... at about 2 per second."
for i in range(1,100):
PrintNumber(i)
| {
"content_hash": "8f3ede59eb40802216311e061df6b0c3",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 61,
"avg_line_length": 29.56,
"alnum_prop": 0.591339648173207,
"repo_name": "voidabhi/python-scripts",
"id": "098a57975a29841881307544041530ccaa225a37",
"size": "739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rate-limiter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "446"
},
{
"name": "Go",
"bytes": "330"
},
{
"name": "JavaScript",
"bytes": "1728"
},
{
"name": "Python",
"bytes": "282732"
},
{
"name": "Shell",
"bytes": "794"
}
],
"symlink_target": ""
} |
import logging
import json
import six
import copy
import datetime
from django.http import HttpResponse
from django.utils.decorators import method_decorator, classonlymethod
from django.views.generic import View
from elasticsearch.exceptions import ElasticsearchException
from casexml.apps.case.models import CommCareCase
from corehq.apps.es.utils import flatten_field_dict
from corehq.apps.api.resources.v0_1 import TASTYPIE_RESERVED_GET_PARAMS
from corehq.pillows.mappings.case_mapping import CASE_INDEX
from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_INDEX
from corehq.pillows.mappings.reportxform_mapping import REPORT_XFORM_INDEX
from corehq.pillows.mappings.user_mapping import USER_INDEX
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX
from dimagi.utils.parsing import ISO_DATE_FORMAT
from dimagi.utils.logging import notify_exception
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.reports.filters.forms import FormsByApplicationFilter
from corehq.elastic import ESError, get_es_new
from corehq.pillows.base import restore_property_dict, VALUE_TAG
from no_exceptions.exceptions import Http400
logger = logging.getLogger('es')
DEFAULT_SIZE = 10
class ESUserError(Http400):
pass
class DateTimeError(ValueError):
pass
class ESView(View):
"""
Generic CBV for interfacing with the Elasticsearch REST api.
This is necessary because tastypie's built in REST assumptions don't like
ES's POST for querying, which we can set explicitly here.
For security purposes, queries ought to be domain'ed by the requesting user, so a base_query
is encouraged to be added.
Access to the APIs can be done via url endpoints which are attached to the corehq.api.urls
or programmatically via the self.run_query() method.
This current iteration of the ESView must require a domain for its usage for security purposes.
"""
#note - for security purposes, csrf protection is ENABLED
#search POST queries must take the following format:
#query={query_json}
#csrfmiddlewaretoken=token
#in curl, this is:
#curl -b "csrftoken=<csrftoken>;sessionid=<session_id>" -H "Content-Type: application/json" -XPOST http://server/a/domain/api/v0.1/xform_es/
# -d"query=@myquery.json&csrfmiddlewaretoken=<csrftoken>"
#or, call this programmatically to avoid CSRF issues.
index = ""
domain = ""
es = None
http_method_names = ['get', 'post', 'head', ]
def __init__(self, domain):
super(ESView, self).__init__()
self.domain = domain.lower()
self.es = get_es_new()
def head(self, *args, **kwargs):
raise NotImplementedError("Not implemented")
@method_decorator(login_and_domain_required)
#@method_decorator(csrf_protect)
# todo: csrf_protect temporarily removed and left to implementor's prerogative
# getting ajax'ed csrf token method needs revisit.
def dispatch(self, *args, **kwargs):
req = args[0]
self.pretty = req.GET.get('pretty', False)
if self.pretty:
self.indent = 4
else:
self.indent = None
ret = super(ESView, self).dispatch(*args, **kwargs)
return ret
@classonlymethod
def as_view(cls, **initkwargs):
"""
Django as_view cannot be used since the constructor requires information only present in the request.
"""
raise Exception('as_view not supported for domain-specific ESView')
@classonlymethod
def as_domain_specific_view(cls, **initkwargs):
"""
Creates a simple domain-specific class-based view for passing through ES requests.
"""
def view(request, domain, *args, **kwargs):
self = cls(domain)
return self.dispatch(request, domain, *args, **kwargs)
return view
def run_query(self, es_query, es_type=None):
"""
Run a more advanced POST based ES query
Returns the raw query json back, or None if there's an error
"""
logger.info("ESlog: [%s.%s] ESquery: %s" % (self.__class__.__name__, self.domain, json.dumps(es_query)))
if 'fields' in es_query or 'script_fields' in es_query:
#nasty hack to add domain field to query that does specific fields.
#do nothing if there's no field query because we get everything
fields = es_query.get('fields', [])
fields.append('domain')
es_query['fields'] = fields
try:
es_results = self.es.search(self.index, es_type, body=es_query)
except ElasticsearchException as e:
if 'query_string' in es_query.get('query', {}).get('filtered', {}).get('query', {}):
# the error may have been caused by a bad query string
# re-run with no query string to check
querystring = es_query['query']['filtered']['query']['query_string']['query']
new_query = es_query
new_query['query']['filtered']['query'] = {"match_all": {}}
new_results = self.run_query(new_query)
if new_results:
# the request succeeded without that query string
# an error with a blank query will return None
raise ESUserError("Error with elasticsearch query: %s" %
querystring)
msg = "Error in elasticsearch query [%s]: %s\nquery: %s" % (self.index, str(e), es_query)
notify_exception(None, message=msg)
raise ESError(msg)
hits = []
for res in es_results['hits']['hits']:
if '_source' in res:
res_domain = res['_source'].get('domain', None)
elif 'fields' in res:
res['fields'] = flatten_field_dict(res)
res_domain = res['fields'].get('domain', None)
# security check
if res_domain == self.domain:
hits.append(res)
else:
logger.info("Requester domain %s does not match result domain %s" % (
self.domain, res_domain))
es_results['hits']['hits'] = hits
return es_results
def base_query(self, terms=None, fields=None, start=0, size=DEFAULT_SIZE):
"""
The standard query to run across documents of a certain index.
domain = exact match domain string
terms = k,v pairs of terms you want to match against. you can dive down into properties like form.foo for an xform, like { "username": "foo", "type": "bar" } - this will make it into a term: k: v dict
fields = field properties to report back in the results['fields'] array. if blank, you will need to read the _source
start = where to start the results from
size = default size in ES is 10, also explicitly set here.
"""
fields = fields or []
query = {
"filter": {
"and": [
{"term": {"domain.exact": self.domain}}
]
},
"from": start,
"size": size
}
use_terms = terms or {}
if len(fields) > 0:
query['fields'] = fields
for k, v in use_terms.items():
query['filter']['and'].append({"term": {k: v}})
return query
def get(self, request, *args, **kwargs):
"""
Very basic querying based upon GET parameters.
todo: apply GET params as lucene query_string params to base_query
"""
size = request.GET.get('size', DEFAULT_SIZE)
start = request.GET.get('start', 0)
query_results = self.run_query(self.base_query(start=start, size=size))
query_output = json.dumps(query_results, indent=self.indent)
response = HttpResponse(query_output, content_type="application/json")
return response
def post(self, request, *args, **kwargs):
"""
More powerful ES querying using POST params.
"""
try:
raw_post = request.body
raw_query = json.loads(raw_post)
except Exception, ex:
content_response = dict(message="Error parsing query request", exception=ex.message)
response = HttpResponse(status=406, content=json.dumps(content_response))
return response
#ensure that the domain is filtered in implementation
query_results = self.run_query(raw_query)
query_output = json.dumps(query_results, indent=self.indent)
response = HttpResponse(query_output, content_type="application/json")
return response
class CaseES(ESView):
"""
Expressive CaseES interface. Yes, this is redundant with pieces of the v0_1.py CaseAPI - todo to merge these applications
Which this should be the final say on ES access for Casedocs
"""
index = CASE_INDEX
class ReportCaseES(ESView):
index = REPORT_CASE_INDEX
class XFormES(ESView):
index = XFORM_INDEX
def base_query(self, terms=None, doc_type='xforminstance', fields=None, start=0, size=DEFAULT_SIZE):
"""
Somewhat magical enforcement that the basic query for XForms will only return XFormInstance
docs by default.
"""
new_terms = terms or {}
use_fields = fields or []
if 'doc_type' not in new_terms:
#let the terms override the kwarg - the query terms trump the magic
new_terms['doc_type'] = doc_type
return super(XFormES, self).base_query(terms=new_terms, fields=use_fields, start=start, size=size)
def run_query(self, es_query, **kwargs):
es_results = super(XFormES, self).run_query(es_query)
# hack, walk the results again, and if we have xmlns, populate human readable names
# Note that `get_unknown_form_name` does not require the request, which is also
# not necessarily available here. So `None` is passed here.
form_filter = FormsByApplicationFilter(None, domain=self.domain)
for res in es_results.get('hits', {}).get('hits', []):
if '_source' in res:
xmlns = res['_source'].get('xmlns', None)
name = None
if xmlns:
name = form_filter.get_unknown_form_name(xmlns,
app_id=res['_source'].get('app_id',
None),
none_if_not_found=True)
if not name:
name = 'unknown' # try to fix it below but this will be the default
# fall back
try:
if res['_source']['form'].get('@name', None):
name = res['_source']['form']['@name']
else:
backup = res['_source']['form'].get('#type', 'data')
if backup != 'data':
name = backup
except (TypeError, KeyError):
pass
res['_source']['es_readable_name'] = name
return es_results
class UserES(ESView):
"""
self.run_query accepts a structured elasticsearch query
"""
index = USER_INDEX
def validate_query(self, query):
if 'password' in query['fields']:
raise ESUserError("You cannot include password in the results")
def run_query(self, es_query, es_type=None, security_check=True):
"""
Must be called with a "fields" parameter
Returns the raw query json back, or None if there's an error
"""
logger.info("ESlog: [%s.%s] ESquery: %s" % (
self.__class__.__name__, self.domain, json.dumps(es_query)))
self.validate_query(es_query)
try:
es_results = self.es.search(self.index, es_type, body=es_query)
except ElasticsearchException as e:
msg = "Error in elasticsearch query [%s]: %s\nquery: %s" % (
self.index, str(e), es_query)
notify_exception(None, message=msg)
return None
hits = []
for res in es_results['hits']['hits']:
if '_source' in res:
raise ESUserError(
"This query does not support full document lookups")
# security check
if security_check:
res_domain = res['fields'].get('domain_memberships.domain', None)
if res_domain == self.domain:
hits.append(res)
else:
logger.info(
"Requester domain %s does not match result domain %s" % (
self.domain, res_domain))
else:
hits.append(res)
es_results['hits']['hits'] = hits
return es_results
def report_term_filter(terms, mapping):
"""convert terms to correct #value term queries based upon the mapping
does it match up with pre-defined stuff in the mapping?
"""
ret_terms = []
for orig_term in terms:
curr_mapping = mapping.get('properties')
split_term = orig_term.split('.')
for ix, sub_term in enumerate(split_term, start=1):
is_property = sub_term in curr_mapping
if ix == len(split_term):
#it's the last one, and if it's still not in it, then append a value
if is_property:
ret_term = orig_term
else:
ret_term = '%s.%s' % (orig_term, VALUE_TAG)
ret_terms.append(ret_term)
if is_property and 'properties' in curr_mapping[sub_term]:
curr_mapping = curr_mapping[sub_term]['properties']
return ret_terms
class ReportXFormES(XFormES):
index = REPORT_XFORM_INDEX
def base_query(self, terms=None, doc_type='xforminstance', fields=None, start=0, size=DEFAULT_SIZE):
"""
Somewhat magical enforcement that the basic query for XForms will only return XFormInstance
docs by default.
"""
raw_terms = terms or {}
query_terms = {}
if 'doc_type' not in raw_terms:
#let the terms override the kwarg - the query terms trump the magic
query_terms['doc_type'] = doc_type
for k, v in raw_terms.items():
query_terms['%s.%s' % (k, VALUE_TAG)] = v
return super(ReportXFormES, self).base_query(terms=raw_terms, fields=fields, start=start, size=size)
def run_query(self, es_query):
es_results = super(XFormES, self).run_query(es_query)
#hack, walk the results again, and if we have xmlns, populate human readable names
# Note that `get_unknown_form_name` does not require the request, which is also
# not necessarily available here. So `None` is passed here.
form_filter = FormsByApplicationFilter(None, domain=self.domain)
for res in es_results.get('hits', {}).get('hits', []):
if '_source' in res:
res_source = restore_property_dict(res['_source'])
res['_source'] = res_source
xmlns = res['_source'].get('xmlns', None)
name = None
if xmlns:
name = form_filter.get_unknown_form_name(xmlns,
app_id=res['_source'].get('app_id',
None),
none_if_not_found=True)
if not name:
name = 'unknown' # try to fix it below but this will be the default
# fall back
try:
if res['_source']['form'].get('@name', None):
name = res['_source']['form']['@name']
else:
backup = res['_source']['form'].get('#type', 'data')
if backup != 'data':
name = backup
except (TypeError, KeyError):
pass
res['_source']['es_readable_name'] = name
return es_results
@classmethod
def by_case_id_query(cls, domain, case_id, terms=None, doc_type='xforminstance',
date_field=None, startdate=None, enddate=None,
date_format=ISO_DATE_FORMAT):
"""
Run a case_id query on both case properties (supporting old and new) for xforms.
datetime options onsubmission ranges possible too by passing datetime startdate or enddate
args:
domain: string domain, required exact
case_id: string
terms: k,v of additional filters to apply as terms and block of filter
doc_type: explicit xforminstance doc_type term query (only search active, legit items)
date_field: string property of the xform submission you want to do date filtering, be sure to make sure that the field in question is indexed as a datetime
startdate, enddate: datetime interval values
date_format: string of the date format to filter based upon, defaults to yyyy-mm-dd
"""
use_terms = terms or {}
query = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": domain.lower()}},
{"term": {"doc_type": doc_type}},
{
"nested": {
"path": "form.case",
"filter": {
"or": [
{"term": {"form.case.@case_id": "%s" % case_id}},
{"term": {"form.case.case_id": "%s" % case_id}}
]
}
}
}
]
},
"query": {
"match_all": {}
}
}
}
}
if date_field is not None:
range_query = {
"range": {
date_field: {}
}
}
if startdate is not None:
range_query['range'][date_field]["gte"] = startdate.strftime(date_format)
if enddate is not None:
range_query['range'][date_field]["lte"] = enddate.strftime(date_format)
query['query']['filtered']['filter']['and'].append(range_query)
for k, v in use_terms.items():
query['query']['filtered']['filter']['and'].append({"term": {k.lower(): v.lower()}})
return query
class ElasticAPIQuerySet(object):
"""
An abstract representation of an elastic search query,
modeled somewhat after Django's QuerySet but with
the only important goal being compatibility
with Tastypie's classes. Key capabilities, by piece of
Tastypie:
Pagination:
- `__getitem__([start:stop])` which should efficiently pass the bounds on to ES
- `count()` which should efficiently ask ES for the total matching (regardless of slice)
Sorting:
- order_by('field') or order_by('-field') both become ES service-side sort directives
Serialization:
- `__iter__()`
"""
# Also note https://github.com/llonchj/django-tastypie-elasticsearch/ which is
# not very mature, plus this code below may involve Dimagic-specific assumptions
def __init__(self, es_client, payload=None, model=None):
"""
Instantiate with an entire ElasticSearch payload,
since "query", "filter", etc, all exist alongside
each other.
"""
self.es_client = es_client
self.payload = payload
self.model = model
self.__results = None
def with_fields(self, es_client=None, payload=None, model=None):
"Clones this queryset, optionally changing some fields"
return ElasticAPIQuerySet(es_client=es_client or self.es_client,
payload=payload or self.payload,
model=model or self.model)
@property
def results(self):
if self.__results is None:
self.__results = self.es_client.run_query(self.payload)
return self.__results
def count(self):
# Just asks ES for the count by limiting results to zero, leveraging slice implementation
return self[0:0].results['hits']['total']
def order_by(self, *fields):
new_payload = copy.deepcopy(self.payload)
new_payload['sort'] = []
for field in fields:
if not field:
continue
direction = 'asc'
if field[0] == '-':
direction = 'desc'
field = field[1:]
new_payload['sort'].append({field: direction})
return self.with_fields(payload=new_payload)
def __len__(self):
# Note that this differs from `count` in that it actually performs the query and measures
# only those objects returned
return len(self.results['hits']['hits'])
def __iter__(self):
for jvalue in self.results['hits']['hits']:
if self.model:
# HACK: Sometimes the model is a class w/ a wrap method, sometimes just a function
if hasattr(self.model, 'wrap'):
if self.model == CommCareCase:
jvalue['_source'].pop('modified_by', None)
yield self.model.wrap(jvalue['_source'])
else:
yield self.model(jvalue['_source'])
else:
yield jvalue['_source']
def __getitem__(self, idx):
if isinstance(idx, slice):
if idx.start < 0 or idx.stop < 0:
# This actually could be supported with varying degrees of efficiency
raise NotImplementedError('Negative index in slice not supported.')
new_payload = copy.deepcopy(self.payload)
new_payload['from'] = new_payload.get('from', 0) + (idx.start or 0)
if idx.stop is not None:
new_payload['size'] = max(0, idx.stop - (idx.start or 0))
return self.with_fields(payload=new_payload)
elif isinstance(idx, six.integer_types):
if idx >= 0:
# Leverage efficicent backend slicing
return list(self[idx:idx+1])[0]
else:
# This actually could be supported with varying degrees of efficiency
raise NotImplementedError('Negative index not supported.')
else:
raise TypeError('Unsupported type: %s', type(idx))
def validate_date(date):
try:
datetime.datetime.strptime(date, ISO_DATE_FORMAT)
except ValueError:
try:
datetime.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S')
except ValueError:
try:
datetime.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S.%f')
except ValueError:
raise DateTimeError("Date not in the correct format")
return date
RESERVED_QUERY_PARAMS = set(['limit', 'offset', 'order_by', 'q', '_search'] + TASTYPIE_RESERVED_GET_PARAMS)
# Note that dates are already in a string format when they arrive as query params
query_param_transforms = {
'xmlns': lambda v: {'term': {'xmlns.exact': v}},
'received_on_start': lambda v: {'range': {'received_on': {'from': validate_date(v)}}},
'received_on_end': lambda v: {'range': {'received_on': {'to': validate_date(v)}}},
}
def es_search(request, domain, reserved_query_params=None):
payload = {
"filter": {
"and": [
{"term": {"domain.exact": domain}}
]
},
}
# ?_search=<json> for providing raw ES query, which is nonetheless restricted here
# NOTE: The fields actually analyzed into ES indices differ somewhat from the raw
# XML / JSON.
if '_search' in request.GET:
additions = json.loads(request.GET['_search'])
if 'filter' in additions:
payload['filter']['and'].append(additions['filter'])
if 'query' in additions:
payload['query'] = additions['query']
# ?q=<lucene>
if 'q' in request.GET:
payload['query'] = payload.get('query', {})
payload['query']['query_string'] = {'query': request.GET['q']} # A bit indirect?
# filters are actually going to be a more common case
reserved_query_params = RESERVED_QUERY_PARAMS | set(reserved_query_params or [])
for key in set(request.GET.keys()) - reserved_query_params:
if key.endswith('__full'): continue
value = request.GET[key]
if key in query_param_transforms:
try:
payload["filter"]["and"].append(query_param_transforms[key](value))
except DateTimeError:
raise Http400("Bad query parameter")
else:
payload["filter"]["and"].append({"term": {key: value}})
return payload
| {
"content_hash": "7d317ae20f279c1c3ba7b08fe23e2080",
"timestamp": "",
"source": "github",
"line_count": 660,
"max_line_length": 208,
"avg_line_length": 38.807575757575755,
"alnum_prop": 0.5589739585366806,
"repo_name": "qedsoftware/commcare-hq",
"id": "90ea3d1624d36c242f5c54fb178f487f3d5dc93b",
"size": "25613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/api/es.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
"""
Merge runs of KLEE. Optionally merging coverage and bug replay
information
"""
import argparse
import logging
import pprint
import copy
import os
import sys
import yaml
from load_klee_analysis import add_kleeanalysis_to_module_search_path
from load_klee_runner import add_KleeRunner_to_module_search_path
add_kleeanalysis_to_module_search_path()
import KleeRunner.ResultInfo
import KleeRunner.DriverUtil as DriverUtil
import KleeRunner.ResultInfoUtil
import KleeRunner.InvocationInfo
import kleeanalysis
import kleeanalysis.rank
_logger = logging.getLogger(__name__)
def handle_rejected_result_infos(rejected_result_infos, index_to_name_fn):
assert isinstance(rejected_result_infos, list)
had_rejected_result_infos = False
for index, rejected_result_infos_list in enumerate(rejected_result_infos):
name = index_to_name_fn(index)
assert(isinstance(rejected_result_infos_list, list))
for result_info in rejected_result_infos_list:
had_rejected_result_infos = True
_logger.warning('"{}" was rejected from "{}"'.format(
KleeRunner.ResultInfoUtil.get_result_info_key(result_info),
name))
return had_rejected_result_infos
def handle_error_result_infos(result_infos_list, index_to_name_fn):
for index, result_info in enumerate(result_infos_list):
for ri in result_info['results']:
if 'error' in ri:
_logger.error('{} contains an error result: {}'.format(
index_to_name_fn(index), ri))
# FIXME: This does not handle the problem in a very elegant way
sys.exit(1)
def report_missing_result_infos(key_to_result_infos, index_to_name_fn):
assert isinstance(key_to_result_infos, dict)
had_missing_result_infos = False
for key, result_infos in key_to_result_infos.items():
assert(isinstance(result_infos, list))
for index, result_info in enumerate(result_infos):
if result_info is None:
had_missing_result_infos = True
name = index_to_name_fn(index)
_logger.warning('"{}" is missing from "{}"'.format(
key,
name))
return had_missing_result_infos
# TODO: Remove when we are sure we don't need this.
def longest_common_prefix(list_of_strings):
assert isinstance(list_of_strings, list)
first_string = list_of_strings[0]
prefix_end_index = len(first_string) - 1
for string_index, s in enumerate(list_of_strings):
assert isinstance(s, str)
assert len(s) > 0
if string_index == 0:
# No need to compare the first string to itself
continue
if prefix_end_index == -1:
break
for char_index, char_value in enumerate(s):
if char_index > prefix_end_index:
# No need to look past this string.
# We already know looking at other strings
# that the prefix is shorter
break
if first_string[char_index] != char_value:
# Character mismatch.
# Largest prefix must include last successful character
prefix_end_index = char_index -1
if prefix_end_index >= 0:
return first_string[0:(prefix_end_index + 1)]
else:
return None
def sort_paths(directories):
assert isinstance(directories, list)
#lcp = longest_common_prefix(directories)
#if lcp is not None:
# raise Exception('Not implemented')
# HACK: Check if all directory names are integers
# For directories named like `0` and `10`.
all_ints = True
for d in directories:
assert isinstance(d, str)
try:
_ = int(d)
except ValueError:
all_ints = False
if all_ints:
# Sort numerically rather than lexographically
return sorted(directories, key=lambda v: int(v))
# Sort lexographically
return sorted(directories)
def main(argv):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("klee_repeat_output_dir",
help="Directory containing directories where each directory is a repeat run of KLEE"
)
parser.add_argument("--repeat-replay-coverage-output-dir",
default=None,
dest="repeat_replay_coverage_output_dir",
help="Directory containing directories where each directory contains coverage info file")
parser.add_argument("--replay-coverage-info-file",
dest="replay_coverage_info_file",
default="coverage_info.yml"
)
parser.add_argument("--klee-result-info-file-name",
dest="klee_result_info_file_name",
default="output.yml",
help="File name to look for when looking for mergable result-info")
parser.add_argument("--output-dir",
dest="output_dir",
required=True)
parser.add_argument("--repeat-bug-replay-output-dir",
default=None,
dest="repeat_bug_replay_output_dir",
help="Directory containing directories where each directory contages a bug replay info file")
parser.add_argument("--bug-replay-info-file-name",
dest="bug_replay_info_file",
default="bug_replay_info.yml")
DriverUtil.parserAddLoggerArg(parser)
args = parser.parse_args(args=argv)
DriverUtil.handleLoggerArgs(args, parser)
klee_result_info_repeat_run_dir = os.path.abspath(args.klee_repeat_output_dir)
if not os.path.exists(klee_result_info_repeat_run_dir):
_logger.error('"{}" does not exist'.format(klee_result_info_repeat_run_dir))
return 1
if args.repeat_replay_coverage_output_dir:
repeat_replay_coverage_output_dir = os.path.abspath(args.repeat_replay_coverage_output_dir)
if not os.path.exists(repeat_replay_coverage_output_dir):
_logger.error('"{}" does not exist'.format(repeat_replay_coverage_output_dir))
return 1
if args.repeat_bug_replay_output_dir:
repeat_bug_replay_output_dir = os.path.abspath(args.repeat_bug_replay_output_dir)
if not os.path.exists(repeat_bug_replay_output_dir):
_logger.error('"{}" does not exist'.format(repeat_bug_replay_output_dir))
return 1
# Setup output directory
output_dir, success = KleeRunner.DriverUtil.setupWorkingDirectory(args.output_dir)
if not success:
_logger.error('Failed to set up output directory "{}"'.format(args.output_dir))
return 1
# Find result info files
_, dirnames, _ = next(os.walk(klee_result_info_repeat_run_dir))
result_info_files = []
for d in sort_paths(dirnames):
result_info_file = os.path.join(klee_result_info_repeat_run_dir, d, args.klee_result_info_file_name)
_logger.info('Looking for ResultInfo file "{}"'.format(result_info_file))
if not os.path.exists(result_info_file):
_logger.warning('"{}" not found'.format(result_info_file))
continue
_logger.info('"{}" found'.format(result_info_file))
result_info_files.append(result_info_file)
if len(result_info_files) < 2:
_logger.error('Need two or more result info files')
return 1
# Find coverage info files
coverage_info_files = []
if args.repeat_replay_coverage_output_dir:
_, dirnames, _ = next(os.walk(repeat_replay_coverage_output_dir))
for d in sort_paths(dirnames):
coverage_info_file = os.path.join(
repeat_replay_coverage_output_dir,
d,
args.replay_coverage_info_file)
_logger.info('Looking for CoverageInfo file "{}"'.format(coverage_info_file))
if not os.path.exists(coverage_info_file):
_logger.warning('"{}" not found'.format(coverage_info_file))
continue
_logger.info('"{}" found'.format(coverage_info_file))
coverage_info_files.append(coverage_info_file)
if len(coverage_info_files) != len(result_info_files):
_logger.error('Found {} coverage info files but expected {}'.format(
len(coverage_info_files),
len(result_info_files)
))
return 1
# Find bug replay info files
bug_replay_info_files = []
if args.repeat_bug_replay_output_dir:
_, dirnames, _ = next(os.walk(repeat_bug_replay_output_dir))
for d in sort_paths(dirnames):
bug_replay_info_file = os.path.join(
repeat_bug_replay_output_dir,
d,
args.bug_replay_info_file)
_logger.info('Looking for BugReplayInfo file "{}"'.format(bug_replay_info_file))
if not os.path.exists(bug_replay_info_file):
_logger.warning('"{}" not found'.format(bug_replay_info_file))
continue
_logger.info('"{}" found'.format(bug_replay_info_file))
bug_replay_info_files.append(bug_replay_info_file)
if len(bug_replay_info_files) != len(result_info_files):
_logger.error('Found {} bug replay info files but expected {}'.format(
len(bug_replay_info_files),
len(result_info_files)
))
return 1
# Open result info files
result_infos_list = []
for result_info_file in result_info_files:
with open(result_info_file, 'r') as f:
_logger.info('Loading "{}"'.format(f.name))
result_info = KleeRunner.ResultInfo.loadRawResultInfos(f)
result_infos_list.append(result_info)
def index_to_name_fn(i):
return result_info_files[i]
# Warn about error results
handle_error_result_infos(result_infos_list, index_to_name_fn)
# Group result infos by key (program name)
key_to_result_infos, rejected_result_infos = (
KleeRunner.ResultInfoUtil.group_result_infos_by(result_infos_list)
)
had_rejected_result_infos = handle_rejected_result_infos(
rejected_result_infos,
index_to_name_fn
)
if had_rejected_result_infos:
_logger.error('Rejected ResultInfo(s) where found.')
return 1
if len(key_to_result_infos) == 0:
_logger.error('No accepeted result infos')
return 1
had_missing_result_infos = report_missing_result_infos(
key_to_result_infos,
index_to_name_fn)
if had_missing_result_infos:
_logger.error('Some result infos were missing')
return 1
# Open coverage info files
coverage_infos_list = []
for coverage_info_file in coverage_info_files:
with open(coverage_info_file, 'r') as f:
_logger.info('Loading "{}"'.format(f.name))
coverage_info = KleeRunner.util.loadYaml(f)
coverage_infos_list.append(coverage_info)
# Open bug replay info files
bug_replay_infos_list = []
for bug_replay_info_file in bug_replay_info_files:
with open(bug_replay_info_file, 'r') as f:
_logger.info('Loading "{}"'.format(f.name))
bug_replay_info = KleeRunner.util.loadYaml(f)
bug_replay_infos_list.append(bug_replay_info)
# Merge result infos and write data out
merged_result_info = get_merged_result_infos(
key_to_result_infos,
result_infos_list)
output_result_info_file_path = os.path.join(
output_dir,
args.klee_result_info_file_name
)
with open(output_result_info_file_path, 'w') as f:
KleeRunner.util.writeYaml(f, merged_result_info)
# Merge coverage data and write data out
if len(coverage_infos_list) > 0:
merged_coverage_info = get_merged_coverage_infos(
coverage_infos_list,
coverage_info_files)
merged_coverage_info_file_path = os.path.join(
output_dir,
args.replay_coverage_info_file)
with open(merged_coverage_info_file_path, 'w') as f:
KleeRunner.util.writeYaml(f, merged_coverage_info)
# Merge bug replay info and write data out
if len(bug_replay_infos_list) > 0:
merged_bug_replay_info = get_merged_bug_replay_infos(
bug_replay_infos_list,
bug_replay_info_files)
merged_bug_replay_info_file_path = os.path.join(
output_dir,
args.bug_replay_info_file)
with open(merged_bug_replay_info_file_path, 'w') as f:
KleeRunner.util.writeYaml(f, merged_bug_replay_info)
return 0
def get_merged_bug_replay_infos(bug_replay_infos_list, bug_replay_info_files):
merged_bug_replay_info = {}
def get_bug_replay_info():
template_bug_replay_info = {
'augmented_spec_file': None,
'test_cases': {},
}
return template_bug_replay_info
# Insert all the keys (program names)
for cov_info in bug_replay_infos_list:
assert isinstance(cov_info, dict)
for program_name in cov_info.keys():
if program_name not in merged_bug_replay_info:
merged_bug_replay_info[program_name] = get_bug_replay_info()
# Iterate over each program
for program_name, prog_bug_replay_info in merged_bug_replay_info.items():
for index, bug_replay_info in enumerate(bug_replay_infos_list):
individual_bug_replay_info = None
try:
individual_bug_replay_info = bug_replay_info[program_name]
except KeyError:
_logger.warning('Missing bug replay info for "{}" in {}'.format(
program_name,
bug_replay_info_files[index]))
continue
prog_bug_replay_info['augmented_spec_file'] = individual_bug_replay_info['augmented_spec_file']
_logger.debug('Adding {} to {}'.format(individual_bug_replay_info['test_cases'], program_name))
prog_bug_replay_info['test_cases'].update(individual_bug_replay_info['test_cases'])
return merged_bug_replay_info
def get_merged_result_infos(key_to_result_infos, result_infos_list):
merged_result_info = {
'results': [],
'schema_version': KleeRunner.ResultInfo.getSchema()['__version__'],
'misc': {
'merged': True,
'individual_misc': {}
}
}
# TODO: Merge misc data
# Merge result infos
for program_name, result_infos in key_to_result_infos.items():
assert isinstance(program_name, str)
assert isinstance(result_infos, list)
assert len(result_infos) == len(result_infos_list)
_logger.info('Merging "{}"'.format(program_name))
combined_result_info = merge_result_infos(result_infos)
merged_result_info['results'].append(combined_result_info)
return merged_result_info
def get_merged_coverage_infos(coverage_infos_list, coverage_info_files):
merged_coverage_info = {}
def get_coverage_info():
template_coverage_info = {
'branch_coverage': [],
'line_coverage': [],
'raw_data': [],
}
return template_coverage_info
# Insert all the keys (program names)
for cov_info in coverage_infos_list:
assert isinstance(cov_info, dict)
for program_name in cov_info.keys():
if program_name not in merged_coverage_info:
merged_coverage_info[program_name] = get_coverage_info()
# Iterate over each program
for program_name, prog_coverage_info in merged_coverage_info.items():
for index, cov_info in enumerate(coverage_infos_list):
individual_cov_info = None
try:
individual_cov_info = cov_info[program_name]
except KeyError:
# Assume no coverage
_logger.warning('Assuming no coverage for "{}" from "{}"'.format(
program_name,
coverage_info_files[index]))
individual_cov_info = {
'branch_coverage': 0.0,
'line_coverage': 0.0,
'raw_data': None
}
prog_coverage_info['branch_coverage'].append(
individual_cov_info['branch_coverage'])
prog_coverage_info['line_coverage'].append(
individual_cov_info['line_coverage'])
prog_coverage_info['raw_data'].append(
individual_cov_info['raw_data'])
# Warn if any coverage values differ
same_branch_cov = same_values_or_list_of_values(
prog_coverage_info['branch_coverage'])
if isinstance(same_branch_cov, list):
_logger.warning('Branch coverage ({}) differs for "{}"'.format(
prog_coverage_info['branch_coverage'],
program_name))
same_line_cov = same_values_or_list_of_values(
prog_coverage_info['line_coverage'])
if isinstance(same_line_cov, list):
_logger.warning('Line coverage ({})differs for "{}"'.format(
prog_coverage_info['line_coverage'],
program_name))
return merged_coverage_info
def same_values_or_list_of_values(l):
"""
If all elements of list `l` has
same value l return that value
otherwise return the original list
"""
assert isinstance(l, list)
assert len(l) > 0
same_value = None
for index, v in enumerate(l):
if index == 0:
same_value = v
continue
if v != same_value:
return l
return same_value
def merge_result_infos(result_infos):
assert isinstance(result_infos, list)
assert len(result_infos) > 1
# Make a deep copy of the first to use as a template for the merged result
merged_result_info = copy.deepcopy(result_infos[0])
# Merge backend-timeout
backed_timeout_values = [ r['backend_timeout'] for r in result_infos ]
_logger.debug('Got backend timeout values: {}'.format(backed_timeout_values))
#backed_timeout_values = same_values_or_list_of_values(backed_timeout_values)
#_logger.debug('Merged backend timeout values: {}'.format(backed_timeout_values))
merged_result_info['backend_timeout'] = backed_timeout_values
# Merge exit code
exit_code_values = [ r['exit_code'] for r in result_infos ]
_logger.debug('Got exit code values: {}'.format(exit_code_values))
#exit_code_values = same_values_or_list_of_values(exit_code_values)
#_logger.debug('Merged exit code values: {}'.format(exit_code_values))
merged_result_info['exit_code'] = exit_code_values
# Merge klee_dir
klee_dir_values = [ r['klee_dir'] for r in result_infos ]
_logger.debug('Got klee_dir values: {}'.format(klee_dir_values))
merged_result_info['klee_dir'] = klee_dir_values
# Merge log_file
log_file_values = [ r['log_file'] for r in result_infos ]
_logger.debug('Got log_file values: {}'.format(log_file_values))
merged_result_info['log_file'] = log_file_values
# Merge out of memory
out_of_memory_values = [ r['out_of_memory'] for r in result_infos]
_logger.debug('Got out_of_memory values: {}'.format(out_of_memory_values))
#out_of_memory_values = same_values_or_list_of_values(out_of_memory_values)
#_logger.debug('Merged out_of_memory values: {}'.format(out_of_memory_values))
merged_result_info['out_of_memory'] = out_of_memory_values
# Merge working directory
working_directory_values = [ r['working_directory'] for r in result_infos]
merged_result_info['working_directory'] = working_directory_values
# Merge numeric values
# DL: Design decision. We could compute stats here but given this is
# cheap to compute and I don't want to have re-generate the merged files
# every time we change what stats we compute we will just make these the list
# of values and let other tools worry about how to analyse these values.
# merge sys_cpu_time
sys_cpu_time_values = [r['sys_cpu_time'] for r in result_infos]
merged_result_info['sys_cpu_time'] = sys_cpu_time_values
# merge user_cpu_time
user_cpu_time_values = [r['user_cpu_time'] for r in result_infos]
merged_result_info['user_cpu_time'] = user_cpu_time_values
# merge wallclock_time
wallclock_time_values = [r['wallclock_time'] for r in result_infos]
merged_result_info['wallclock_time'] = wallclock_time_values
# Add an attribute that hints that this is a merged result
merged_result_info['merged_result'] = True
return merged_result_info
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| {
"content_hash": "0c55e2e3ab0aaa466508024ea5b5e4ec",
"timestamp": "",
"source": "github",
"line_count": 513,
"max_line_length": 108,
"avg_line_length": 40.12865497076023,
"alnum_prop": 0.6250850092295736,
"repo_name": "delcypher/klee-runner",
"id": "4cd66121c05c002fd3b793f2c6db0ebaf352add4",
"size": "20654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/result-info-klee-merge.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "491174"
},
{
"name": "Shell",
"bytes": "103"
}
],
"symlink_target": ""
} |
import os
import sys
### Make sure to find the Spirit modules
### This is only needed if you did not install the package
spirit_py_dir = os.path.abspath(os.path.join(os.path.dirname( __file__ ), "../core/python"))
sys.path.insert(0, spirit_py_dir)
### Import Spirit modules
from spirit import state
from spirit import configuration
from spirit import simulation
from spirit import io
cfgfile = "input/input.cfg"
quiet = False
with state.State(cfgfile, quiet) as p_state:
### Read Image from file
# spinsfile = "input/spins.ovf"
# io.image_from_file(state.get(), spinsfile, idx_image=0);
### First image is homogeneous with a skyrmion in the center
configuration.plus_z(p_state, idx_image=0)
configuration.skyrmion(p_state, 5.0, phase=-90.0, idx_image=0)
### LLG dynamics simulation
LLG = simulation.METHOD_LLG
DEPONDT = simulation.SOLVER_DEPONDT # Velocity projection minimiser
simulation.start(p_state, LLG, DEPONDT) | {
"content_hash": "676fbe819587895f6e6c8a52adac984e",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 92,
"avg_line_length": 32.1,
"alnum_prop": 0.7175493250259606,
"repo_name": "spirit-code/spirit",
"id": "37aafc2bfe27d5fe8f207d5d041d18d37c8e8f82",
"size": "963",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ui-python/llg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "57746"
},
{
"name": "C++",
"bytes": "2145028"
},
{
"name": "CMake",
"bytes": "124764"
},
{
"name": "CSS",
"bytes": "3891"
},
{
"name": "Cuda",
"bytes": "105957"
},
{
"name": "Dockerfile",
"bytes": "1009"
},
{
"name": "HTML",
"bytes": "44989"
},
{
"name": "JavaScript",
"bytes": "82659"
},
{
"name": "Julia",
"bytes": "984"
},
{
"name": "Objective-C",
"bytes": "2871"
},
{
"name": "Python",
"bytes": "207945"
},
{
"name": "Shell",
"bytes": "1090"
}
],
"symlink_target": ""
} |
from oslo.serialization import jsonutils
from oslo_config import cfg
from oslo_log import log as logging
import six
import webob.exc
from wsme.rest import json
from daisy.api import policy
from daisy.api.v2 import metadef_namespaces as namespaces
from daisy.api.v2.model.metadef_object import MetadefObject
from daisy.api.v2.model.metadef_object import MetadefObjects
from daisy.common import exception
from daisy.common import utils
from daisy.common import wsgi
from daisy.common import wsme_utils
import daisy.db
from daisy import i18n
import daisy.notifier
import daisy.schema
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LI = i18n._LI
CONF = cfg.CONF
class MetadefObjectsController(object):
def __init__(self, db_api=None, policy_enforcer=None, notifier=None):
self.db_api = db_api or daisy.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.notifier = notifier or daisy.notifier.Notifier()
self.gateway = daisy.gateway.Gateway(db_api=self.db_api,
notifier=self.notifier,
policy_enforcer=self.policy)
self.obj_schema_link = '/v2/schemas/metadefs/object'
def create(self, req, metadata_object, namespace):
object_factory = self.gateway.get_metadef_object_factory(req.context)
object_repo = self.gateway.get_metadef_object_repo(req.context)
try:
new_meta_object = object_factory.new_object(
namespace=namespace,
**metadata_object.to_dict())
object_repo.add(new_meta_object)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
return MetadefObject.to_wsme_model(
new_meta_object,
get_object_href(namespace, new_meta_object),
self.obj_schema_link)
def index(self, req, namespace, marker=None, limit=None,
sort_key='created_at', sort_dir='desc', filters=None):
try:
filters = filters or dict()
filters['namespace'] = namespace
object_repo = self.gateway.get_metadef_object_repo(req.context)
db_metaobject_list = object_repo.list(
marker=marker, limit=limit, sort_key=sort_key,
sort_dir=sort_dir, filters=filters)
object_list = [MetadefObject.to_wsme_model(
db_metaobject,
get_object_href(namespace, db_metaobject),
self.obj_schema_link) for db_metaobject in db_metaobject_list]
metadef_objects = MetadefObjects()
metadef_objects.objects = object_list
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
return metadef_objects
def show(self, req, namespace, object_name):
meta_object_repo = self.gateway.get_metadef_object_repo(
req.context)
try:
metadef_object = meta_object_repo.get(namespace, object_name)
return MetadefObject.to_wsme_model(
metadef_object,
get_object_href(namespace, metadef_object),
self.obj_schema_link)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
def update(self, req, metadata_object, namespace, object_name):
meta_repo = self.gateway.get_metadef_object_repo(req.context)
try:
metadef_object = meta_repo.get(namespace, object_name)
metadef_object._old_name = metadef_object.name
metadef_object.name = wsme_utils._get_value(
metadata_object.name)
metadef_object.description = wsme_utils._get_value(
metadata_object.description)
metadef_object.required = wsme_utils._get_value(
metadata_object.required)
metadef_object.properties = wsme_utils._get_value(
metadata_object.properties)
updated_metadata_obj = meta_repo.save(metadef_object)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
return MetadefObject.to_wsme_model(
updated_metadata_obj,
get_object_href(namespace, updated_metadata_obj),
self.obj_schema_link)
def delete(self, req, namespace, object_name):
meta_repo = self.gateway.get_metadef_object_repo(req.context)
try:
metadef_object = meta_repo.get(namespace, object_name)
metadef_object.delete()
meta_repo.remove(metadef_object)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except Exception as e:
LOG.error(utils.exception_to_str(e))
raise webob.exc.HTTPInternalServerError()
def _get_base_definitions():
return namespaces.get_schema_definitions()
def _get_base_properties():
return {
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"$ref": "#/definitions/stringArray"
},
"properties": {
"$ref": "#/definitions/property"
},
"schema": {
"type": "string"
},
"self": {
"type": "string"
},
"created_at": {
"type": "string",
"description": _("Date and time of object creation"
" (READ-ONLY)"),
"format": "date-time"
},
"updated_at": {
"type": "string",
"description": _("Date and time of the last object modification"
" (READ-ONLY)"),
"format": "date-time"
}
}
def get_schema():
definitions = _get_base_definitions()
properties = _get_base_properties()
mandatory_attrs = MetadefObject.get_mandatory_attrs()
schema = daisy.schema.Schema(
'object',
properties,
required=mandatory_attrs,
definitions=definitions,
)
return schema
def get_collection_schema():
object_schema = get_schema()
return daisy.schema.CollectionSchema('objects', object_schema)
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['self', 'schema', 'created_at', 'updated_at']
def __init__(self, schema=None):
super(RequestDeserializer, self).__init__()
self.schema = schema or get_schema()
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
def create(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
metadata_object = json.fromjson(MetadefObject, body)
return dict(metadata_object=metadata_object)
def update(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
metadata_object = json.fromjson(MetadefObject, body)
return dict(metadata_object=metadata_object)
def index(self, request):
params = request.params.copy()
limit = params.pop('limit', None)
marker = params.pop('marker', None)
sort_dir = params.pop('sort_dir', 'desc')
query_params = {
'sort_key': params.pop('sort_key', 'created_at'),
'sort_dir': self._validate_sort_dir(sort_dir),
'filters': self._get_filters(params)
}
if marker is not None:
query_params['marker'] = marker
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
return query_params
def _validate_sort_dir(self, sort_dir):
if sort_dir not in ['asc', 'desc']:
msg = _('Invalid sort direction: %s') % sort_dir
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_dir
def _get_filters(self, filters):
visibility = filters.get('visibility')
if visibility:
if visibility not in ['public', 'private', 'shared']:
msg = _('Invalid visibility value: %s') % visibility
raise webob.exc.HTTPBadRequest(explanation=msg)
return filters
@classmethod
def _check_allowed(cls, image):
for key in cls._disallowed_properties:
if key in image:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(explanation=msg)
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema or get_schema()
def create(self, response, metadata_object):
response.status_int = 201
self.show(response, metadata_object)
def show(self, response, metadata_object):
metadata_object_json = json.tojson(MetadefObject, metadata_object)
body = jsonutils.dumps(metadata_object_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def update(self, response, metadata_object):
response.status_int = 200
self.show(response, metadata_object)
def index(self, response, result):
result.schema = "v2/schemas/metadefs/objects"
metadata_objects_json = json.tojson(MetadefObjects, result)
body = jsonutils.dumps(metadata_objects_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def delete(self, response, result):
response.status_int = 204
def get_object_href(namespace_name, metadef_object):
base_href = ('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadef_object.name))
return base_href
def create_resource():
"""Metadef objects resource factory method"""
schema = get_schema()
deserializer = RequestDeserializer(schema)
serializer = ResponseSerializer(schema)
controller = MetadefObjectsController()
return wsgi.Resource(controller, deserializer, serializer)
| {
"content_hash": "8386e91e5a2f7585377ba442d5c707cc",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 78,
"avg_line_length": 37.030959752321984,
"alnum_prop": 0.6148315358247638,
"repo_name": "OpenDaisy/daisy-api",
"id": "aed3d8c1c8d9b8286869487276acfa5e04170cb6",
"size": "12572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "daisy/api/v2/metadef_objects.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1475450"
},
{
"name": "Shell",
"bytes": "7860"
}
],
"symlink_target": ""
} |
"""Adds config flow for Tibber integration."""
import asyncio
import logging
import aiohttp
import tibber
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
class TibberConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Tibber integration."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_import(self, import_info):
"""Set the config entry up from yaml."""
return await self.async_step_user(import_info)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
if self._async_current_entries():
return self.async_abort(reason="already_configured")
if user_input is not None:
access_token = user_input[CONF_ACCESS_TOKEN].replace(" ", "")
tibber_connection = tibber.Tibber(
access_token=access_token,
websession=async_get_clientsession(self.hass),
)
errors = {}
try:
await tibber_connection.update_info()
except asyncio.TimeoutError:
errors[CONF_ACCESS_TOKEN] = "timeout"
except aiohttp.ClientError:
errors[CONF_ACCESS_TOKEN] = "connection_error"
except tibber.InvalidLogin:
errors[CONF_ACCESS_TOKEN] = "invalid_access_token"
if errors:
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors,
)
unique_id = tibber_connection.user_id
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=tibber_connection.name, data={CONF_ACCESS_TOKEN: access_token},
)
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA, errors={},)
| {
"content_hash": "69fc15867b0691a09b73eb8b21e71c5c",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 88,
"avg_line_length": 33.13235294117647,
"alnum_prop": 0.6284953395472703,
"repo_name": "mKeRix/home-assistant",
"id": "b0115d84e2c8afead73189b1678cd574c576fac4",
"size": "2253",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/tibber/config_flow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1466026"
},
{
"name": "Python",
"bytes": "4770710"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "12407"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.