input
stringlengths 0
2.17k
| instruction
stringlengths 18
2.94k
| output
stringlengths 47
3.36k
|
---|---|---|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
| Fix highlighting of "IP Networks" menu entry
| # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
| Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8
| #! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| Fix send count and add debug logging
| import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
| Add marker to indicate backend error
| def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
"""
Trip generation
===============
"""
| Implement the trip description classes
And a default list for trips has also been written.
| """
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
| Include Interface in public API.
| from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
import logging
import datetime
import decimal
import elasticsearch
from bson import ObjectId, DBRef
from nefertari.renderers import _JSONEncoder
log = logging.getLogger(__name__)
class JSONEncoder(_JSONEncoder):
def default(self, obj):
if isinstance(obj, (ObjectId, DBRef)):
return str(obj)
if isinstance(obj, decimal.Decimal):
return float(obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso
if isinstance(obj, datetime.time):
return obj.strftime('%H:%M:%S')
if isinstance(obj, datetime.timedelta):
return obj.seconds
if hasattr(obj, 'to_dict'):
# If it got to this point, it means its a nested object.
# outter objects would have been handled with DataProxy.
return obj.to_dict(__nested=True)
return super(JSONEncoder, self).default(obj)
class ESJSONSerializer(elasticsearch.serializer.JSONSerializer):
def default(self, obj):
if isinstance(obj, (ObjectId, DBRef)):
return str(obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso
if isinstance(obj, datetime.time):
return obj.strftime('%H:%M:%S')
if isinstance(obj, datetime.timedelta):
return obj.seconds
if isinstance(obj, decimal.Decimal):
return float(obj)
try:
return super(ESJSONSerializer, self).default(obj)
except:
import traceback
log.error(traceback.format_exc())
| Refactor encoders to have base class
| import logging
import datetime
import decimal
import elasticsearch
from bson import ObjectId, DBRef
from nefertari.renderers import _JSONEncoder
log = logging.getLogger(__name__)
class JSONEncoderMixin(object):
def default(self, obj):
if isinstance(obj, (ObjectId, DBRef)):
return str(obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.strftime("%Y-%m-%dT%H:%M:%SZ") # iso
if isinstance(obj, datetime.time):
return obj.strftime('%H:%M:%S')
if isinstance(obj, datetime.timedelta):
return obj.seconds
if isinstance(obj, decimal.Decimal):
return float(obj)
return super(JSONEncoderMixin, self).default(obj)
class JSONEncoder(JSONEncoderMixin, _JSONEncoder):
def default(self, obj):
if hasattr(obj, 'to_dict'):
# If it got to this point, it means its a nested object.
# outter objects would have been handled with DataProxy.
return obj.to_dict(__nested=True)
return super(JSONEncoder, self).default(obj)
class ESJSONSerializer(JSONEncoderMixin,
elasticsearch.serializer.JSONSerializer):
def default(self, obj):
try:
return super(ESJSONSerializer, self).default(obj)
except:
import traceback
log.error(traceback.format_exc())
|
from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session['logged_in'] = False
if 'username' in session:
del session['username']
return 'You are logged out.'
| Generalize the logout a bit
This is on the road to removing auth from this file.
| from wavvy import app
from flask import Flask, url_for, render_template, request, session, escape
def clear_session(s):
if 'username' in s:
del s['username']
s['logged_in'] = False
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
@app.route('/')
def index():
if session.get('logged_in', False):
return 'Logged in as {}'.format(escape(session['username']))
return 'You are not logged in.'
@app.route('/login', methods=['POST', 'GET'])
def login():
error = None
if request.method == 'POST':
session['logged_in'] = True
session['username'] = request.form['username']
password = escape(request.form['password'])
return 'Validating a login! U:{} P:{}'.format(escape(session['username']), password)
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
clear_session(session)
return 'You are logged out.'
|
from pyramda.function.curry import curry
from . import filter
@curry
def reject(p, xs):
"""
Acts as a complement of `filter`
:param p: predicate
:param xs: Iterable. A sequence, a container which supports iteration or an iterator
:return: list
"""
return list(set(xs) - set(filter(p, xs)))
| Refactor to use pyramdas complement function. The initial set implementaion did not respect the order of elements and removed duplicates
| from pyramda.function.curry import curry
from pyramda.logic import complement
from . import filter
@curry
def reject(p, xs):
"""
Acts as a complement of `filter`
:param p: predicate
:param xs: Iterable. A sequence, a container which supports iteration or an iterator
:return: list
"""
return filter(complement(p), xs)
|
from django.shortcuts import render
def _get_key_details(conn, db):
conn.execute_command('SELECT', db)
keys = conn.keys()
key_details = {}
for key in keys:
details = conn.execute_command('DEBUG', 'OBJECT', key)
key_details[key] = {
'type': conn.type(key),
'details': dict(
i.split(':') for i in details.split() if ':' in i
),
'ttl': conn.ttl(key),
}
return key_details
def inspect(request, server):
stats = server.stats
if stats['status'] == 'UP':
conn = server.connection
databases = [name[2:] for name in conn.info() if name.startswith('db')]
database_details = {}
for db in databases:
database_details[db] = _get_key_details(conn, db)
else:
database_details = {}
return render(request, "redisboard/inspect.html", {
'databases': database_details,
'original': server,
'stats': stats,
'app_label': 'redisboard',
})
| Sort the database order in the inspect page.
| from django.shortcuts import render
from django.utils.datastructures import SortedDict
def _get_key_details(conn, db):
conn.execute_command('SELECT', db)
keys = conn.keys()
key_details = {}
for key in keys:
details = conn.execute_command('DEBUG', 'OBJECT', key)
key_details[key] = {
'type': conn.type(key),
'details': dict(
i.split(':') for i in details.split() if ':' in i
),
'ttl': conn.ttl(key),
}
return key_details
def inspect(request, server):
stats = server.stats
if stats['status'] == 'UP':
conn = server.connection
databases = sorted(name[2:] for name in conn.info() if name.startswith('db'))
database_details = SortedDict()
for db in databases:
database_details[db] = _get_key_details(conn, db)
else:
database_details = {}
return render(request, "redisboard/inspect.html", {
'databases': database_details,
'original': server,
'stats': stats,
'app_label': 'redisboard',
})
|
"""Models for the response of the configuration object."""
from __future__ import division, print_function, unicode_literals
from readthedocs.config.utils import to_dict
class Base(object):
"""
Base class for every configuration.
Each inherited class should define
its attibutes in the `__slots__` attribute.
"""
def __init__(self, **kwargs):
for name in self.__slots__:
setattr(self, name, kwargs[name])
def as_dict(self):
return {
name: to_dict(getattr(self, name))
for name in self.__slots__
}
class Build(Base):
__slots__ = ('image',)
class Python(Base):
__slots__ = ('version', 'install', 'use_system_site_packages')
class PythonInstallRequirements(Base):
__slots__ = ('requirements',)
class PythonInstall(Base):
__slots__ = ('path', 'method', 'extra_requirements',)
class Conda(Base):
__slots__ = ('environment',)
class Sphinx(Base):
__slots__ = ('builder', 'configuration', 'fail_on_warning')
class Mkdocs(Base):
__slots__ = ('configuration', 'fail_on_warning')
class Submodules(Base):
__slots__ = ('include', 'exclude', 'recursive')
| Add explanation about using __slots__
| """Models for the response of the configuration object."""
from __future__ import division, print_function, unicode_literals
from readthedocs.config.utils import to_dict
class Base(object):
"""
Base class for every configuration.
Each inherited class should define
its attibutes in the `__slots__` attribute.
We are using `__slots__` so we can't add more attributes by mistake,
this is similar to a namedtuple.
"""
def __init__(self, **kwargs):
for name in self.__slots__:
setattr(self, name, kwargs[name])
def as_dict(self):
return {
name: to_dict(getattr(self, name))
for name in self.__slots__
}
class Build(Base):
__slots__ = ('image',)
class Python(Base):
__slots__ = ('version', 'install', 'use_system_site_packages')
class PythonInstallRequirements(Base):
__slots__ = ('requirements',)
class PythonInstall(Base):
__slots__ = ('path', 'method', 'extra_requirements',)
class Conda(Base):
__slots__ = ('environment',)
class Sphinx(Base):
__slots__ = ('builder', 'configuration', 'fail_on_warning')
class Mkdocs(Base):
__slots__ = ('configuration', 'fail_on_warning')
class Submodules(Base):
__slots__ = ('include', 'exclude', 'recursive')
|
from .base import RouteBase, NonIterableRouteBase
from .template import TemplateRoute, XHRPartialRoute, ROCARoute
from .forms import FormRoute, TemplateFormRoute, XHRPartialFormRoute
__version__ = '1.0'
__author__ = 'Outernet Inc'
__all__ = (
RouteBase,
NonIterableRouteBase,
TemplateRoute,
XHRPartialRoute,
ROCARoute,
FormRoute,
TemplateFormRoute,
XHRPartialFormRoute,
)
| Fix __all__ using objects instead of strings
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is>
| from .base import RouteBase, NonIterableRouteBase
from .template import TemplateRoute, XHRPartialRoute, ROCARoute
from .forms import FormRoute, TemplateFormRoute, XHRPartialFormRoute
__version__ = '1.0'
__author__ = 'Outernet Inc'
__all__ = (
'RouteBase',
'NonIterableRouteBase',
'TemplateRoute',
'XHRPartialRoute',
'ROCARoute',
'FormRoute',
'TemplateFormRoute',
'XHRPartialFormRoute',
)
|
from django.test import TestCase
# Create your tests here.
class SanityTest(TestCase):
def test_one_plus_one_equals_two(self):
self.assertEqual(1+1, 2)
| Add whitespace around + (pep8 E225/E226)
| from django.test import TestCase
# Create your tests here.
class SanityTest(TestCase):
def test_one_plus_one_equals_two(self):
self.assertEqual(1 + 1, 2)
|
from IPython.display import Image
import boto
def s3img(uri):
if uri.startswith('s3://'):
uri = uri[5:]
bucket_name, key_name = uri.split('/', 1)
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
key = bucket.get_key(key_name)
data = key.get_contents_as_string()
return Image(data=data)
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
| Refactor S3 interactions for reusability
| from IPython.display import Image
import boto
def parse_s3_uri(uri):
if uri.startswith('s3://'):
uri = uri[5:]
return uri.split('/', 1)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
return bucket.get_key(key_name)
def s3img(uri):
key = get_s3_key(uri)
data = key.get_contents_as_string()
return Image(data=data)
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
|
"""
The ``foreign`` Devito backend is meant to be used by codes that don't
run Python natively. This backend is only capable of generating and compiling
kernels; however, kernels must be executed explicitly from outside Devito.
Further, with the ``foreign`` backed, Devito doesn't allocate any data.
"""
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
| Add leftover import due to disfunctional testing
| """
The ``foreign`` Devito backend is meant to be used by codes that don't
run Python natively. This backend is only capable of generating and compiling
kernels; however, kernels must be executed explicitly from outside Devito.
Further, with the ``foreign`` backed, Devito doesn't allocate any data.
"""
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
from devito.types import CacheManager # noqa
|
from __future__ import print_function
from jupyter_kernel import MagicKernel
class EvalKernel(MagicKernel):
implementation = 'Eval'
implementation_version = '1.0'
language = 'python'
language_version = '0.1'
banner = "Eval kernel - evaluates simple Python statements and expressions"
env = {}
def get_usage(self):
return "This is a usage statement."
def set_variable(self, name, value):
"""
Set a variable in the kernel language.
"""
self.env[name] = value
def get_variable(self, name):
"""
Get a variable from the kernel language.
"""
return self.env.get(name, None)
def do_execute_direct(self, code):
python_magic = self.line_magics['python']
resp = python_magic.eval(code.strip())
if not resp is None:
self.Print(str(resp))
def get_completions(self, token):
python_magic = self.line_magics['python']
return python_magic.get_completions(token)
def get_kernel_help_on(self, expr, level=0):
python_magic = self.line_magics['python']
return python_magic.get_help_on(expr, level)
if __name__ == '__main__':
from IPython.kernel.zmq.kernelapp import IPKernelApp
IPKernelApp.launch_instance(kernel_class=EvalKernel)
| Return python eval instead of printing it
| from __future__ import print_function
from jupyter_kernel import MagicKernel
class EvalKernel(MagicKernel):
implementation = 'Eval'
implementation_version = '1.0'
language = 'python'
language_version = '0.1'
banner = "Eval kernel - evaluates simple Python statements and expressions"
env = {}
def get_usage(self):
return "This is a usage statement."
def set_variable(self, name, value):
"""
Set a variable in the kernel language.
"""
self.env[name] = value
def get_variable(self, name):
"""
Get a variable from the kernel language.
"""
return self.env.get(name, None)
def do_execute_direct(self, code):
python_magic = self.line_magics['python']
return python_magic.eval(code.strip())
def get_completions(self, token):
python_magic = self.line_magics['python']
return python_magic.get_completions(token)
def get_kernel_help_on(self, expr, level=0):
python_magic = self.line_magics['python']
return python_magic.get_help_on(expr, level)
if __name__ == '__main__':
from IPython.kernel.zmq.kernelapp import IPKernelApp
IPKernelApp.launch_instance(kernel_class=EvalKernel)
|
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
return jsonify({'Success': True, 'ipAddress': get_client_ip()})
def get_client_ip():
return request.headers.get('X-Forwarded-For') or request.remote_addr
| Add a standard API reply interface
| from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
"""Return client IP"""
return api_reply({'ipAddress': get_client_ip()})
def get_client_ip():
"""Return the client x-forwarded-for header or IP address"""
return request.headers.get('X-Forwarded-For') or request.remote_addr
def api_reply(body={}, success=True):
"""Create a standard API reply interface"""
return jsonify({**body, 'success': success})
|
#!/srv/python/venv/bin/ipython -i
import bqueryd
import os
import sys
import logging
import ConfigParser
config = ConfigParser.RawConfigParser()
config.read(['/etc/bqueryd.cfg', os.path.expanduser('~/.bqueryd.cfg')])
redis_url=config.get('Main', 'redis_url')
if __name__ == '__main__':
if '-v' in sys.argv:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
if 'controller' in sys.argv:
bqueryd.ControllerNode(redis_url=redis_url, loglevel=loglevel).go()
elif 'worker' in sys.argv:
bqueryd.WorkerNode(redis_url=redis_url, loglevel=loglevel).go()
elif 'downloader' in sys.argv:
bqueryd.DownloaderNode(redis_url=redis_url, loglevel=loglevel).go()
else:
if len(sys.argv) > 1 and sys.argv[1].startswith('tcp:'):
rpc = bqueryd.RPC(address=sys.argv[1], redis_url=redis_url, loglevel=loglevel)
else:
rpc = bqueryd.RPC(redis_url=redis_url, loglevel=loglevel)
sys.stderr.write('Run this script with python -i , and then you will have a variable named "rpc" as a connection.\n')
| Use configobj for config handling
| #!/srv/python/venv/bin/ipython -i
import bqueryd
import os
import sys
import logging
import configobj
config = configobj.ConfigObj('/etc/bqueryd.cfg')
redis_url = config.get('redis_url')
if __name__ == '__main__':
if '-v' in sys.argv:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
if 'controller' in sys.argv:
bqueryd.ControllerNode(redis_url=redis_url, loglevel=loglevel).go()
elif 'worker' in sys.argv:
bqueryd.WorkerNode(redis_url=redis_url, loglevel=loglevel).go()
elif 'downloader' in sys.argv:
bqueryd.DownloaderNode(redis_url=redis_url, loglevel=loglevel).go()
else:
if len(sys.argv) > 1 and sys.argv[1].startswith('tcp:'):
rpc = bqueryd.RPC(address=sys.argv[1], redis_url=redis_url, loglevel=loglevel)
else:
rpc = bqueryd.RPC(redis_url=redis_url, loglevel=loglevel)
sys.stderr.write('Run this script with python -i , and then you will have a variable named "rpc" as a connection.\n')
|
"""Alternate way of running the unittests, for Python 2.5 or Windows."""
__author__ = 'Beech Horn'
import sys
import unittest
def suite():
mods = ['context', 'eventloop', 'key', 'model', 'query', 'tasklets', 'thread']
test_mods = ['%s_test' % name for name in mods]
ndb = __import__('ndb', fromlist=test_mods, level=1)
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for mod in [getattr(ndb, name) for name in test_mods]:
for name in set(dir(mod)):
if name.endswith('Tests'):
test_module = getattr(mod, name)
tests = loader.loadTestsFromTestCase(test_module)
suite.addTests(tests)
return suite
def main():
v = 0
q = 0
for arg in sys.argv[1:]:
if arg.startswith('-v'):
v += arg.count('v')
elif arg == '-q':
q += 1
if q:
v = 0
else:
v = max(v, 1)
unittest.TextTestRunner(verbosity=v).run(suite())
if __name__ == '__main__':
main()
| Simplify and improve -v/-q handling.
| """Alternate way of running the unittests, for Python 2.5 or Windows."""
__author__ = 'Beech Horn'
import sys
import unittest
def suite():
mods = ['context', 'eventloop', 'key', 'model', 'query', 'tasklets', 'thread']
test_mods = ['%s_test' % name for name in mods]
ndb = __import__('ndb', fromlist=test_mods, level=1)
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for mod in [getattr(ndb, name) for name in test_mods]:
for name in set(dir(mod)):
if name.endswith('Tests'):
test_module = getattr(mod, name)
tests = loader.loadTestsFromTestCase(test_module)
suite.addTests(tests)
return suite
def main():
v = 1
for arg in sys.argv[1:]:
if arg.startswith('-v'):
v += arg.count('v')
elif arg == '-q':
v = 0
unittest.TextTestRunner(verbosity=v).run(suite())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
'django.contrib.sessions', # for USE_SESSION
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| Support Django 1.7 in test runner.
| #!/usr/bin/env python
from os.path import dirname, abspath
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
'django.contrib.sessions', # for USE_SESSION
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if django.VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.1'
| Set dsub version to 0.1.2-dev.
PiperOrigin-RevId: 172923102
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2-dev'
|
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], list):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
| Fix typing and accept iterable instead of list
| from typing import Iterable, Union
import rx
from rx import operators as ops
from rx.core import Observable
def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
"""Merges all the observable sequences into a single observable
sequence.
1 - merged = rx.merge(xs, ys, zs)
2 - merged = rx.merge([xs, ys, zs])
Returns:
The observable sequence that merges the elements of the
observable sequences.
"""
sources = args[:]
if isinstance(sources[0], Iterable):
sources = sources[0]
return rx.from_iterable(sources).pipe(ops.merge_all())
|
import datetime
import numpy as np
class Epoch(datetime.datetime):
def get_jd(self, epoch=2000):
jd = (367.0 * self.year
- np.floor( (7 * (self.year + np.floor( (self.month + 9) / 12.0) ) ) * 0.25 )
+ np.floor( 275 * self.month / 9.0 )
+ self.day + 1721013.5
+ ( (self.second/60.0 + self.minute ) / 60.0 + self.hour ) / 24.0)
if epoch == 2000:
return jd - 2451544.5
elif epoch == 1950:
return jd - 2433282.5
elif epoch == "mjd":
return jd - 2400000.5
elif epoch == 0:
return jd
class State:
def __init__(self, x, y, z, vx, vy, vz, epoch=Epoch(2000,1,1,0,0,0)):
self.r = np.array([x, y, z])
self.v = np.array([vx, vy, vz])
self.t = epoch | Change julian date to be a property.
| import datetime
import numpy as np
class Epoch(datetime.datetime):
@property
def jd(self):
jd = (367.0 * self.year
- np.floor( (7 * (self.year + np.floor( (self.month + 9) / 12.0) ) ) * 0.25 )
+ np.floor( 275 * self.month / 9.0 )
+ self.day + 1721013.5
+ ( (self.second/60.0 + self.minute ) / 60.0 + self.hour ) / 24.0)
return jd
@property
def jd2000(self):
return self.jd - 2451544.5
@property
def jd1950(self):
return self.jd - 2433282.5
@property
def mjd(self):
return self.jd - 2400000.5
class State:
def __init__(self, x, y, z, vx, vy, vz, epoch=Epoch(2000,1,1,0,0,0)):
self.r = np.array([x, y, z])
self.v = np.array([vx, vy, vz])
self.t = epoch |
from distutils.core import setup
long_description = open("README.rst").read()
long_description += "\n.. code-block:: python\n\n "
long_description += "\n ".join(open("output_example.py").read().split("\n"))
setup(
name="radiance",
version="0.1dev5",
packages=["radiance",],
license="MIT",
description="Python tools for the Radiance video art system",
long_description_content_type="text/x-rst",
long_description=long_description,
url="https://radiance.video",
author="Eric Van Albert",
author_email="eric@van.al",
)
| Change version to 0.1 for the pull request
| from distutils.core import setup
long_description = open("README.rst").read()
long_description += "\n.. code-block:: python\n\n "
long_description += "\n ".join(open("output_example.py").read().split("\n"))
setup(
name="radiance",
version="0.1",
packages=["radiance",],
license="MIT",
description="Python tools for the Radiance video art system",
long_description_content_type="text/x-rst",
long_description=long_description,
url="https://radiance.video",
author="Eric Van Albert",
author_email="eric@van.al",
)
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
| Change TimeStampType to not accept negative values
This is to work around a Python bug (http://bugs.python.org/issue1777412)
| from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType, ConversionError
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
MESSAGES = {
'negative': u'Timestamp cannot be negative.',
}
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
if value < 0:
raise ConversionError(self.messages['negative'])
return TimeStampType.timestamp_to_date(value)
except ConversionError as e:
raise e
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
import json
from changes.api.serializer import Serializer, register
from changes.models import Plan, Step
@register(Plan)
class PlanSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'steps': list(instance.steps),
'dateCreated': instance.date_created,
'dateModified': instance.date_modified,
}
@register(Step)
class StepSerializer(Serializer):
def serialize(self, instance, attrs):
implementation = instance.get_implementation()
return {
'id': instance.id.hex,
'implementation': instance.implementation,
'order': instance.order,
'name': implementation.get_label() if implementation else '',
'data': json.dumps(dict(instance.data)),
'dateCreated': instance.date_created,
}
| Handle optional value in step.data
| import json
from changes.api.serializer import Serializer, register
from changes.models import Plan, Step
@register(Plan)
class PlanSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'steps': list(instance.steps),
'dateCreated': instance.date_created,
'dateModified': instance.date_modified,
}
@register(Step)
class StepSerializer(Serializer):
def serialize(self, instance, attrs):
implementation = instance.get_implementation()
return {
'id': instance.id.hex,
'implementation': instance.implementation,
'order': instance.order,
'name': implementation.get_label() if implementation else '',
'data': json.dumps(dict(instance.data or {})),
'dateCreated': instance.date_created,
}
|
#!/usr/bin/python
import json
import utils
def component_reviews(component, reviewer=None):
cmd = ('ssh review.openstack.org gerrit query --format json '
'--current-patch-set project:%s status:open '
'limit:10000'
% component)
if reviewer:
cmd += ' reviewer:%s' % reviewer
else:
cmd += ' --all-approvals'
stdout = utils.runcmd(cmd)
reviews = []
for line in stdout.split('\n'):
if not line:
continue
try:
packet = json.loads(line)
if packet.get('project') == component:
reviews.append(packet)
except ValueError as e:
print 'Could not decode:'
print ' %s' % line
print ' Error: %s' % e
return reviews
if __name__ == '__main__':
reviews = component_reviews('openstack/nova', reviewer='mikal@stillhq.com')
print '%s reviews found' % len(reviews)
for review in reviews:
print
for key in sorted(review.keys()):
if key == 'patchSets':
print '%s:' % key
for ps in review[key]:
print ' %s' % ps
else:
print '%s: %s' %(key, review[key])
| Handle args in the review helper.
| #!/usr/bin/python
import argparse
import json
import utils
def component_reviews(component, reviewer=None):
cmd = ('ssh review.openstack.org gerrit query --format json '
'--current-patch-set project:%s status:open '
'limit:10000'
% component)
if reviewer:
cmd += ' reviewer:%s' % reviewer
else:
cmd += ' --all-approvals'
stdout = utils.runcmd(cmd)
reviews = []
for line in stdout.split('\n'):
if not line:
continue
try:
packet = json.loads(line)
if packet.get('project') == component:
reviews.append(packet)
except ValueError as e:
print 'Could not decode:'
print ' %s' % line
print ' Error: %s' % e
return reviews
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--username', default='mikalstill',
help='The username (if any) to filter by')
ARGS = parser.parse_args()
reviews = component_reviews('openstack/nova', reviewer=ARGS.username)
print '%s reviews found' % len(reviews)
for review in reviews:
print
for key in sorted(review.keys()):
if key == 'patchSets':
print '%s:' % key
for ps in review[key]:
print ' %s' % ps
else:
print '%s: %s' %(key, review[key])
|
from django import forms
from django.core.mail import mail_managers
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Contact
class ContactForm(forms.Form):
personsList = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
mail_managers(self.cleaned_data['topic'], self.get_text())
| Fix contact form - send to recipient, not managers
| from django import forms
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.conf import settings
from .models import Contact
def my_mail_send(subject, recipient, message):
subject = '%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject)
from_email = settings.SERVER_EMAIL
return send_mail(subject, message, from_email, [recipient])
class ContactForm(forms.Form):
recipient = forms.ModelChoiceField(required=True, label=_("Contact person"),
queryset=Contact.objects.all())
topic = forms.CharField(required=True, max_length=150,
label=_("Topic of messages"))
body = forms.CharField(required=True, widget=forms.Textarea(), label=_("Content"))
email = forms.EmailField(required=True, label=_("E-mail"))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('contact:form')
self.helper.form_method = 'post'
self.helper.add_input(Submit('submit', _('Send'), css_class="btn-lg btn-block"))
def get_text(self):
return "%(body)s \n\nE-mail: %(email)s" % self.cleaned_data
def send(self):
my_mail_send(subject=self.cleaned_data['topic'],
recipient=self.cleaned_data['recipient'].email,
message=self.get_text())
|
from pathlib import Path
SETTINGS_PATH = 'conf/settings.py'
PARTIALS_PATH = 'conf/partials.txt'
HOME_DIR = 'HOME'
def load_config(path=SETTINGS_PATH):
settings = eval(open(path).read())
return settings
def root():
# this file is under HOME_DIR, which is directly under the repo root
path = Path(__file__).resolve() # resolve symlinks (~/bin=setup/HOME/bin)
return path.parents[path.parts[::-1].index(HOME_DIR)]
def home():
return root() / HOME_DIR
def home_path(path):
"""Get the path within setup's HOME for the given path
Note: no valid setup path for anything outside of $HOME, so throws exception
"""
return home() / Path(path).resolve().relative_to(Path.home())
| Improve comment in setup module
| from pathlib import Path
SETTINGS_PATH = 'conf/settings.py'
PARTIALS_PATH = 'conf/partials.txt'
HOME_DIR = 'HOME'
def load_config(path=SETTINGS_PATH):
settings = eval(open(path).read())
return settings
def root():
"""Return the path of the root of this setup repository."""
# this file is under HOME_DIR. HOME_DIR's parent is the root.
# So search backwards for HOME_DIR and get its parent.
path = Path(__file__).resolve() # resolve symlinks (~/bin=setup/HOME/bin)
return path.parents[path.parts[::-1].index(HOME_DIR)]
def home():
return root() / HOME_DIR
def home_path(path):
"""Get the path within setup's HOME for the given path
Note: no valid setup path for anything outside of $HOME, so throws exception
"""
return home() / Path(path).resolve().relative_to(Path.home())
|
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id == self.template_id)
| [IMP] account_invoice_comment_template: Move comment_template_id field to the Invoicing tab
[IMP] account_invoice_comment_template: rename partner field name from comment_template_id to invoice_comment_template_id
[IMP] account_invoice_comment_template: Make partner field company_dependant and move domain definition of invoice fields from the view to the model
[MOV] account_invoice_comment_template: comment_template_id to base_comment_template
[IMP] account_invoice_comment_template: Translate templates when partner changes
| # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo.tests.common import TransactionCase
class TestResPartner(TransactionCase):
def setUp(self):
super(TestResPartner, self).setUp()
self.template_id = self.env['base.comment.template'].create({
'name': 'Comment before lines',
'position': 'before_lines',
'text': 'Text before lines',
})
def test_commercial_partner_fields(self):
# Azure Interior
partner_id = self.env.ref('base.res_partner_12')
partner_id.property_comment_template_id = self.template_id.id
# Test childs propagation of commercial partner field
for child_id in partner_id.child_ids:
self.assertEqual(
child_id.property_comment_template_id, self.template_id)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Onboarding group."""
Group = apps.get_model('auth', 'Group')
Group.objects.create(name='Onboarding')
def backwards(apps, schema_editor):
"""Delete Onboarding group."""
Group = apps.get_model('auth', 'Group')
Group.objects.filter(name='Onboarding').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0010_auto_20171221_0112'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| Check if Onboarding exists before creating.
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Onboarding group."""
Group = apps.get_model('auth', 'Group')
if not Group.objects.filter(name='Onboarding').exists():
Group.objects.create(name='Onboarding')
def backwards(apps, schema_editor):
"""Delete Onboarding group."""
Group = apps.get_model('auth', 'Group')
if Group.objects.filter(name='Onboarding').exists():
Group.objects.filter(name='Onboarding').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0010_auto_20171221_0112'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
|
from elasticsearch_dsl import String, Nested, Boolean, DocType, tokenizer, analyzer
# Required for case sensitivity
metadata_analyzer = analyzer("metadata_analyzer", tokenizer=tokenizer("keyword"))
class Metadata(DocType):
property_list = Nested(
properties={
"name": String(),
"value": String(analyzer=metadata_analyzer),
"immutable": Boolean()
}
)
def update_all(self, metadata):
"""
Updates all metadata related to an artifact.
Args
metadata(dict): collection of metadata for document.
"""
self.property_list = metadata.values()
| Add case sensitivity to field and clarify analyzer.
| from elasticsearch_dsl import String, Nested, Boolean, DocType, tokenizer, analyzer
# Required for case sensitivity
# To add an analyzer to an existing mapping requires mapping to be "closed"
case_sensitive_analyzer = analyzer("case_sensitive_analyzer", tokenizer=tokenizer("keyword"))
class Metadata(DocType):
property_list = Nested(
properties={
"name": String(analyzer=case_sensitive_analyzer),
"value": String(analyzer=case_sensitive_analyzer),
"immutable": Boolean()
}
)
def update_all(self, metadata):
"""
Updates all metadata related to an artifact.
Args
metadata(dict): collection of metadata for document.
"""
self.property_list = metadata.values()
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from io import TextIOWrapper
from rest_framework.parsers import BaseParser
class PlainTextParser(BaseParser):
media_type = "text/plain"
def parse(self, stream, media_type=None, parser_context=None):
print "Running PlainTextParser"
charset = self.get_charset(media_type)
if charset:
stream = TextIOWrapper(stream, encoding=charset)
return stream
def get_charset(self, media_type):
if not media_type:
return None
charset = None
msplit = media_type.split(" ");
for m in msplit:
m = m.strip()
if "charset" in m:
csplit = m.split("=")
if len(csplit) > 1:
charset = csplit[1]
return charset.strip().lower()
return None
| Fix parsing data from request
The object passed to the parser method is not a real IOBase stream. It may only
be a Request object which has read, etc. methods. Therefore the real data must
be encapsulated in a BytesIO stream before changing the content type.
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from io import TextIOWrapper, BytesIO
from django.core.handlers.wsgi import WSGIRequest
from rest_framework.parsers import BaseParser
class PlainTextParser(BaseParser):
media_type = "text/plain"
def parse(self, stream, media_type=None, parser_context=None):
print "Running PlainTextParser"
charset = self.get_charset(media_type)
if charset:
if isinstance(stream, WSGIRequest):
stream = BytesIO(stream.read())
stream = TextIOWrapper(stream, encoding=charset)
return stream
def get_charset(self, media_type):
if not media_type:
return None
charset = None
msplit = media_type.split(" ");
for m in msplit:
m = m.strip()
if "charset" in m:
csplit = m.split("=")
if len(csplit) > 1:
charset = csplit[1]
return charset.strip().lower()
return None
|
from django.urls import path
from django_cradmin import viewhelpers
from .views import overview
urlpatterns = [
path('simple/<str:mockname>',
viewhelpers.uimock.UiMock.as_view(template_directory='uimock_demo/simple/'),
name='cradmin_uimock_demo_simple'),
path('',
overview.Overview.as_view(),
name='cradmin_uimock_demo'),
]
| Fix url that was wrongly converted to django3.
| from django.urls import path, re_path
from django_cradmin import viewhelpers
from .views import overview
urlpatterns = [
re_path(r'^simple/(?P<mockname>.+)?$',
viewhelpers.uimock.UiMock.as_view(template_directory='uimock_demo/simple/'),
name='cradmin_uimock_demo_simple'),
path('',
overview.Overview.as_view(),
name='cradmin_uimock_demo'),
]
|
from django.core.urlresolvers import reverse
import pytest
from saleor.userprofile.impersonate import can_impersonate
from saleor.userprofile.models import User
def test_staff_with_permission_can_impersonate(
staff_client, customer_user, staff_user, permission_impersonate_user,
staff_group):
staff_group.permissions.add(permission_impersonate_user)
staff_user.groups.add(staff_group)
staff_user = User.objects.get(pk=staff_user.pk)
response = staff_client.get('/impersonate/{}'.format(customer_user.pk),
follow=True)
assert response.context['user'] == customer_user
assert response.context['user'].is_impersonate
assert response.context['request'].impersonator == staff_user
| Use reverse function in tests
| from django.core.urlresolvers import reverse
import pytest
from saleor.userprofile.impersonate import can_impersonate
from saleor.userprofile.models import User
def test_staff_with_permission_can_impersonate(
staff_client, customer_user, staff_user, permission_impersonate_user,
staff_group):
staff_group.permissions.add(permission_impersonate_user)
staff_user.groups.add(staff_group)
staff_user = User.objects.get(pk=staff_user.pk)
response = staff_client.get(reverse('impersonate-start',
args=[customer_user.pk]), follow=True)
assert response.context['user'] == customer_user
assert response.context['user'].is_impersonate
assert response.context['request'].impersonator == staff_user
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', source_info['tag']),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
| Add auto branch checkout functionality
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def _checkout(name):
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', name),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
_checkout(source_info['tag'])
elif 'branch' in source_info:
_checkout(source_info['branch'])
|
"""Django integration for codejail"""
from django.core.exceptions import MiddlewareNotUsed
from django.conf import settings
import codejail.jail_code
class ConfigureCodeJailMiddleware(object):
"""Middleware to configure codejail on startup."""
def __init__(self):
python_bin = settings.CODE_JAIL.get('python_bin')
if python_bin:
user = settings.CODE_JAIL['user']
codejail.jail_code.configure("python", python_bin, user=user)
raise MiddlewareNotUsed
| Add more detail in docstring
| """Django integration for codejail.
Code to glue codejail into a Django environment.
"""
from django.core.exceptions import MiddlewareNotUsed
from django.conf import settings
import codejail.jail_code
class ConfigureCodeJailMiddleware(object):
"""
Middleware to configure codejail on startup.
This is a Django idiom to have code run once on server startup: put the
code in the `__init__` of some middleware, and have it do the work, then
raise `MiddlewareNotUsed` to disable the middleware.
"""
def __init__(self):
python_bin = settings.CODE_JAIL.get('python_bin')
if python_bin:
user = settings.CODE_JAIL['user']
codejail.jail_code.configure("python", python_bin, user=user)
raise MiddlewareNotUsed
|
from UM.View.View import View
from UM.View.Renderer import Renderer
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Resources import Resources
class LayerView(View):
def __init__(self):
super().__init__()
self._material = None
def beginRendering(self):
scene = self.getController().getScene()
renderer = self.getRenderer()
if not self._material:
self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'color.frag'))
self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0])
for node in DepthFirstIterator(scene.getRoot()):
if not node.render(renderer):
if node.getMeshData() and node.isVisible():
try:
layerData = node.getMeshData().layerData
except AttributeError:
continue
renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLineLoop)
def endRendering(self):
pass
| Support colours for rendering the layer view
| from UM.View.View import View
from UM.View.Renderer import Renderer
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Resources import Resources
class LayerView(View):
def __init__(self):
super().__init__()
self._material = None
def beginRendering(self):
scene = self.getController().getScene()
renderer = self.getRenderer()
if not self._material:
self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'vertexcolor.frag'))
self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0])
for node in DepthFirstIterator(scene.getRoot()):
if not node.render(renderer):
if node.getMeshData() and node.isVisible():
try:
layerData = node.getMeshData().layerData
except AttributeError:
continue
renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLines)
def endRendering(self):
pass
|
# Filename: test_plot.py
# pylint: disable=locally-disabled,C0111,R0904,C0103
import numpy as np
from km3pipe.testing import TestCase
from km3pipe.plot import bincenters
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "mlotze@km3net.de"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
| Add tests for plot functions
| # Filename: test_plot.py
# pylint: disable=locally-disabled,C0111,R0904,C0103
import numpy as np
from km3pipe.testing import TestCase, patch
from km3pipe.plot import bincenters, meshgrid, automeshgrid, diag
__author__ = "Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Moritz Lotze"
__email__ = "mlotze@km3net.de"
__status__ = "Development"
class TestBins(TestCase):
def test_binlims(self):
bins = np.linspace(0, 20, 21)
assert bincenters(bins).shape[0] == bins.shape[0] - 1
class TestMeshStuff(TestCase):
def test_meshgrid(self):
xx, yy = meshgrid(-1, 1, 0.8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-1.0, -1.0, -1.0],
[-0.2, -0.2, -0.2],
[0.6, 0.6, 0.6]], yy)
def test_meshgrid_with_y_specs(self):
xx, yy = meshgrid(-1, 1, 0.8, -10, 10, 8)
assert np.allclose([[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6],
[-1.0, -0.2, 0.6]], xx)
assert np.allclose([[-10, -10, -10],
[-2, -2, -2],
[6, 6, 6]], yy)
class TestDiag(TestCase):
def test_call(self):
diag()
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from troposphere import Tags
from . import AWSObject
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
"Description": (str, False),
"SourceArn": (str, True),
"Tags": (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
"Description": (str, False),
"RegistryName": (str, False),
"Tags": (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
"Policy": (dict, True),
"RegistryName": (str, True),
"RevisionId": (str, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
"Content": (str, True),
"Description": (str, False),
"RegistryName": (str, True),
"SchemaName": (str, False),
"Tags": (Tags, False),
"Type": (str, True),
}
| Update EventSchemas per 2021-09-02 changes
| # Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 41.0.0
from troposphere import Tags
from . import AWSObject
from .validators import boolean
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
"CrossAccount": (boolean, False),
"Description": (str, False),
"SourceArn": (str, True),
"Tags": (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
"Description": (str, False),
"RegistryName": (str, False),
"Tags": (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
"Policy": (dict, True),
"RegistryName": (str, True),
"RevisionId": (str, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
"Content": (str, True),
"Description": (str, False),
"RegistryName": (str, True),
"SchemaName": (str, False),
"Tags": (Tags, False),
"Type": (str, True),
}
|
class PID:
def __init__(self, Kp=1, Ki=0.1, Kd=1, maxIntegralCorrection=0, minIntegralCorrection=-0):
self.Kp = Kp # Proporiional gain
self.Ki = Ki # Integral gain
self.Kd = Kd # Derivative gain
self.integral = 0
def incrementTime(self, error, dt):
self.integral = self.integral + error*self.Ki*dt
def computeCorrection(self, error, derror):
correction = self.Kp*(error) + self.Kd*derror + self.integral
return correction
| Correct typing error and arrange indentation
| class PID:
def __init__(self, Kp=1, Ki=0.1, Kd=1, maxIntegralCorrection=0, minIntegralCorrection=-0):
self.Kp = Kp # Proportional gain
self.Ki = Ki # Integral gain
self.Kd = Kd # Derivative gain
self.integral = 0
def incrementTime(self, error, dt):
self.integral = self.integral + error*self.Ki*dt
def computeCorrection(self, error, derror):
correction = self.Kp*(error) + self.Kd*derror + self.integral
return correction
|
from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
class Command(BaseCommand):
help = 'Generate redux boilerplate'
def add_arguments(self, parser):
parser.add_argument('action_name', type=str)
parser.add_argument('--thunk',
action='store_true',
dest='thunk',
default=False,
help='Generate a redux thunk')
def handle(self, *args, **options):
if options['thunk']:
template_name = 'django_redux_generator/thunk_fetch.js'
else:
template_name = 'django_redux_generator/action_creator.js'
print(render_to_string(template_name, {
'action_name': options['action_name'],
}))
| Return the output rather than print
| from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
class Command(BaseCommand):
help = 'Generate redux boilerplate'
def add_arguments(self, parser):
parser.add_argument('action_name', type=str)
parser.add_argument('--thunk',
action='store_true',
dest='thunk',
default=False,
help='Generate a redux thunk')
def handle(self, *args, **options):
if options['thunk']:
template_name = 'django_redux_generator/thunk_fetch.js'
else:
template_name = 'django_redux_generator/action_creator.js'
return render_to_string(template_name, {
'action_name': options['action_name'],
})
|
"""
chdir2
~~~~~~
An alternative implementation of :func:`chdir.chdir`.
:copyright: © 2014 by Petr Zemek <s3rvac@gmail.com>
:license: BSD, see LICENSE for more details
"""
import os
class chdir2():
"""An alternative implementation of :func:`chdir.chdir`."""
def __init__(self, dir):
self.dir = dir
def __enter__(self):
self.orig_cwd = os.getcwd()
os.chdir(self.dir)
def __exit__(self, *exc_info):
os.chdir(self.orig_cwd)
| blog/en-2014-06-21: Add missing parentheses after 'chdir'.
It is a function, so we better add parentheses to make this clearer.
| """
chdir2
~~~~~~
An alternative implementation of :func:`chdir.chdir()`.
:copyright: © 2014 by Petr Zemek <s3rvac@gmail.com>
:license: BSD, see LICENSE for more details
"""
import os
class chdir2():
"""An alternative implementation of :func:`chdir.chdir()`."""
def __init__(self, dir):
self.dir = dir
def __enter__(self):
self.orig_cwd = os.getcwd()
os.chdir(self.dir)
def __exit__(self, *exc_info):
os.chdir(self.orig_cwd)
|
from cartodb_services.here.geocoder import HereMapsGeocoder
from cartodb_services.here.bulk_geocoder import HereMapsBulkGeocoder
from cartodb_services.here.routing import HereMapsRoutingIsoline
| Add new imports for apikey parameter support
| from cartodb_services.here.geocoder import HereMapsGeocoder, HereMapsGeocoderV7
from cartodb_services.here.bulk_geocoder import HereMapsBulkGeocoder, HereMapsBulkGeocoderV7
from cartodb_services.here.service_factory import get_geocoder, get_bulk_geocoder, get_routing_isoline
from cartodb_services.here.routing import HereMapsRoutingIsoline |
# ome - Object Message Expressions
# Copyright (c) 2015-2016 Luke McCarthy <luke@iogopro.co.uk>. All rights reserved.
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
if command_args.verbose:
print('ome: using target {}'.format(target.name))
print('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
if command_args.verbose:
print('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
| Use print_verbose for conditional printing.
| # ome - Object Message Expressions
# Copyright (c) 2015-2016 Luke McCarthy <luke@iogopro.co.uk>. All rights reserved.
import sys
from .command import command_args
from .error import OmeError
from .terminal import stderr
def print_verbose(*args, **kwargs):
if command_args.verbose:
print(*args, **kwargs)
def main():
stderr.reset()
try:
from . import compiler
target = compiler.get_target(command_args.target)
build_options = compiler.BuildOptions(target)
backend = compiler.get_backend(target, command_args.backend)
print_verbose('ome: using target {}'.format(target.name))
print_verbose('ome: using backend {} {}'.format(backend.name, backend.version))
for filename in command_args.filename:
print_verbose('ome: compiling {}'.format(filename))
if command_args.print_code:
print(compiler.compile_file(filename, target).decode(target.encoding))
else:
build_options.make_executable(filename, backend)
except OmeError as error:
error.write_ansi(stderr)
stderr.reset()
sys.exit(1)
if __name__ == '__main__':
if sys.version_info[0] < 3:
sys.exit('ome: error: please use python 3.x')
main()
|
import logging
import requests
from .exceptions import APIError
from .version import __version__
logger = logging.getLogger(__name__)
class BaseService(object):
def __init__(self, api, **config):
self.api = api
self.config = {
'base_url': 'http://data.police.uk/api/',
'user_agent': 'police-api-client-python/%s' % __version__,
}
self.config.update(config)
def raise_for_status(self, request):
try:
request.raise_for_status()
except requests.models.HTTPError as e:
raise APIError(e)
def request(self, verb, method, **kwargs):
verb = verb.upper()
request_kwargs = {
'headers': {
'User-Agent': self.config['user_agent'],
},
'timeout': self.config.get('timeout', 30),
}
if 'username' in self.config:
request_kwargs['auth'] = (self.config.get('username', ''),
self.config.get('password', ''))
if verb == 'GET':
request_kwargs['params'] = kwargs
else:
request_kwargs['data'] = kwargs
url = self.config['base_url'] + method
logger.debug('%s %s' % (verb, url))
r = requests.request(verb, url, **request_kwargs)
self.raise_for_status(r)
return r.json()
| Refactor request mechanics into an internal method on BaseService
| import logging
import requests
from .exceptions import APIError
from .version import __version__
logger = logging.getLogger(__name__)
class BaseService(object):
def __init__(self, api, **config):
self.api = api
self.config = {
'base_url': 'http://data.police.uk/api/',
'user_agent': 'police-api-client-python/%s' % __version__,
}
self.config.update(config)
def raise_for_status(self, request):
try:
request.raise_for_status()
except requests.models.HTTPError as e:
raise APIError(e)
def _make_request(self, verb, url, params={}):
request_kwargs = {
'headers': {
'User-Agent': self.config['user_agent'],
},
'timeout': self.config.get('timeout', 30),
}
if 'username' in self.config:
request_kwargs['auth'] = (self.config.get('username', ''),
self.config.get('password', ''))
if verb == 'GET':
request_kwargs['params'] = params
else:
request_kwargs['data'] = params
logger.debug('%s %s' % (verb, url))
r = requests.request(verb, url, **request_kwargs)
self.raise_for_status(r)
return r.json()
def request(self, verb, method, **kwargs):
url = self.config['base_url'] + method
return self._make_request(verb.upper(), url, kwargs)
|
import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def run_tests():
runner = unittest.TextTestRunner()
suite = unittest.TestLoader().loadTestsFromTestCase(RotorTestCase)
runner.run(suite)
if __name__ == '__main__': # pragma: no cover
run_tests() | Test if default rotor encodes backwards properly
| import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def test_rotor_reverse_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('U', rotor.encode_reverse('A'))
def run_tests():
runner = unittest.TextTestRunner()
suite = unittest.TestLoader().loadTestsFromTestCase(RotorTestCase)
runner.run(suite)
if __name__ == '__main__': # pragma: no cover
run_tests() |
Make the test data accessible to the tests.
| import os
from functools import partial
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_DATA_DIR = os.path.join(TESTS_DIR, 'test_data')
AppEventTestXml = partial(open, os.path.join(TEST_DATA_DIR, 'app_event.xml'))
|
|
import numpy as np
def arctan_transform(array, thresh):
return np.arctan(array/thresh)
| Convert a condensed distance matrix (pdist) into an upper triangular matrix
|
import numpy as np
from functools import partial
def arctan_transform(array, thresh):
return np.arctan(array/thresh)
def dist_uppertri(cond_arr, shape):
dist_arr = np.zeros((shape, ) * 2, dtype=cond_arr.dtype)
def unrav_ind(i, j, n):
return n*j - j*(j+1)/2 + i - 1 - j
arr_ind = partial(unrav_ind, n=shape)
for i in xrange(shape):
for j in xrange(i):
dist_arr[i, j] = cond_arr[arr_ind(i, j)]
return dist_arr
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import Greeting
# Create your views here.
def index(request):
return HttpResponse('Hello from Python!')
def db(request):
greeting = Greeting()
greeting.save()
greetings = Greeting.objects.all()
return render(request, 'db.html', {'greetings': greetings})
| Add the view serializer for the account model.
| from rest_framework import permissions, viewsets
from authentication_app.models import Account
from authentication_app.permissions import IsAccountOwner
from authentication_app.serializers import AccountSerializer
'''
@name : AccountViewSerializer
@desc : Defines the serializer for the account view.
'''
class AccountViewSerializer(viewsets.ModelViewSet):
lookup_field = 'username'
queryset = Account.objects.all()
serializer_class = AccountSerializer
def get_permissions(self):
if self.reqiest.method in permissions.SAFE_METHODS:
return (permissions.AllowAny(),)
if self.request.method == 'POST':
return (permissions.AllowAny(),)
return (permissions.IsAuthenticated(), IsAccountOwner(),)
def create(self, request):
serializer = self.serializer_class(data=reqiest.data)
if serializer.is_valid():
Account.objects.create_user(**serializer.validated_data)
return Response(serializer.validated_data, status=status.HTTP_201_CREATED)
return Response({
'status' : 'Bad Request',
'message' : 'Account could not be created with the received data.'
}, status=status.HTTP_400_BAD_REQUEST)
|
__version__ = '1.0.0'
from story.story import BaseStory
from . import print_method
class Story(BaseStory):
name = 'hellopython'
adventures = [
print_method
]
| Add a title to the story the story
| __version__ = '1.0.0'
from story.story import BaseStory
from . import print_method
class Story(BaseStory):
name = 'hellopython'
title = 'Introuction to python'
adventures = [
print_method
]
|
from django.conf.urls import url, include
from rest_framework import routers
import service.authors.views
import service.friendrequest.views
import service.users.views
import service.posts.views
router = routers.DefaultRouter()
router.register(r'users', service.users.views.UserViewSet)
router.register(r'nodes', service.nodes.views.NodeViewSet)
router.register(r'author', service.authors.views.AuthorViewSet, base_name="author")
router.register(r'posts', service.posts.views.PublicPostsViewSet, base_name="post")
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browseable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^friendrequest/', service.friendrequest.views.friendrequest, name='friend-request'),
]
| Add missing Node view import
| from django.conf.urls import url, include
from rest_framework import routers
import service.authors.views
import service.friendrequest.views
import service.users.views
import service.nodes.views
import service.posts.views
router = routers.DefaultRouter()
router.register(r'users', service.users.views.UserViewSet)
router.register(r'nodes', service.nodes.views.NodeViewSet)
router.register(r'author', service.authors.views.AuthorViewSet, base_name="author")
router.register(r'posts', service.posts.views.PublicPostsViewSet, base_name="post")
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browseable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^friendrequest/', service.friendrequest.views.friendrequest, name='friend-request'),
]
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
| Make yaml fields lowercase in serfnode section | import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
Add utility view for static pages
| import os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c) | Return constructed URL for each result.
| from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c) |
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
| Improve librmm python API and convert all pytests to use RMM to create device_arrays.
| import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
| Add time methods to BmiDakota
| #!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
| Correct it to actually follow the README...
| def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
| Fix the VERSION path for doc
| from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
| Remove the extra comma at the end.
Fixes #7
| import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
| Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26
| # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
| Add 'qscintilla' to the program requirements
| from setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
| Add download url for pypi
| #!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
| Migrate to python 3, django 3.2
| from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com> | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
| Fix NameError on Python 2.6
| #!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
| Prepare for submission to PyPI.
| from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
| Move `django-brightcove` dependency into `brightcove` optional extra.
| import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
| Add the boto as requirement
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
| Update classifiers to show Python 3 support
| #! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
| Add POST method to ProductList class
| from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
| Test ensure_authority for both nonexistent and already existing Authority records.
| def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
Test female breakdown median of organization with 2 employees
| from django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
| Mark runserver as a plumbing command
| """Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error
| from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
| Enforce lowercase on word selection
| """Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
| Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
| from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
| Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values | # Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
| Test routine for contour plotting
| import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
| Add function to test network connectivity
| #!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
# -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
| Fix flake8 in app test
| # -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| Add the PAM_SERVICE setting to select a custom pam service for authentication
| import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
| Set environment settings to suit localhost
| import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508) | Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.
| from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508) |
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
| webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.
| # Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
| Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output
| """
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
| Use full path of template as filename when compiling
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
| Change default user_agent to match mozilla standard
| import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| Allow changing to own domain name
| from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
| Use bigrams in Markov chain generator
| """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
| Undo my 'work-around' for flask-heroku.
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
import logging
import azure.functions as func
def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]):
body = msgIn.get_body().decode('utf-8')
logging.info(f'Processed Service Bus Queue message: {body}')
msgOut.set(msgbody)
| Fix var name in service bus function
...i am a horrible programmer | import logging
import azure.functions as func
def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]):
body = msgIn.get_body().decode('utf-8')
logging.info(f'Processed Service Bus Queue message: {body}')
msgOut.set(body)
|
from nameko.web.handlers import http
from pokebattle.scores import ScoreService
class GameService(object):
score_service = RpcProxy('score_service')
@http('POST', '/signup')
def signup(self):
pass
@http('POST', '/login')
def login(self):
pass
@http('POST', '/battle')
def new_game(self):
pass
@http('GET', '/leaderboard')
def leaderboard(self):
pass
@http('GET', '/user/<int:id>')
def user(self):
pass
@http('GET', '/user/<int:id>/pokemons')
def user_pokemons(self):
pass
| Add leaderbord rpc call and add request arg to all methods
| import json
from nameko.web.handlers import http
from nameko.rpc import RpcProxy
from pokebattle.scores import ScoreService
class GameService(object):
name = 'game_service'
score_rpc = RpcProxy('score_service')
@http('POST', '/signup')
def signup(self, request):
pass
@http('POST', '/login')
def login(self, request):
pass
@http('POST', '/battle')
def new_game(self, request):
pass
@http('GET', '/leaderboard')
def leaderboard(self, request):
return json.dumps(self.score_rpc.leaderboard())
@http('GET', '/user/<int:id>')
def user(self, request):
pass
@http('GET', '/user/<int:id>/pokemons')
def user_pokemons(self, request):
pass
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
from django.conf import settings
from libs.paths import delete_path, create_path
def get_experiment_outputs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values))
def get_experiment_logs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
return os.path.join(settings.LOGS_ROOT, '/'.join(values))
def delete_experiment_logs(experiment_group_name):
path = get_experiment_logs_path(experiment_group_name)
delete_path(path)
def delete_experiment_outputs(experiment_group_name):
path = get_experiment_outputs_path(experiment_group_name)
delete_path(path)
def create_experiment_logs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
path = settings.LOGS_ROOT
for value in values[:-1]:
path = os.path.join(path, value)
if not os.path.isdir(path):
create_path(path)
| Update experiment logs path creation
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
from django.conf import settings
from libs.paths import delete_path, create_path
def get_experiment_outputs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values))
def get_experiment_logs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
return os.path.join(settings.LOGS_ROOT, '/'.join(values))
def delete_experiment_logs(experiment_group_name):
path = get_experiment_logs_path(experiment_group_name)
delete_path(path)
def delete_experiment_outputs(experiment_group_name):
path = get_experiment_outputs_path(experiment_group_name)
delete_path(path)
def create_experiment_logs_path(experiment_name):
values = experiment_name.split('.')
if len(values) == 3:
values.insert(2, 'independents')
path = settings.LOGS_ROOT
for value in values[:-1]:
path = os.path.join(path, value)
if not os.path.isdir(path):
create_path(path)
# Create file with permissions
path = os.path.join(path, values[-1])
open(path, 'w+')
os.chmod(path, 0o777)
|
from django import template
from django.template.loader import render_to_string, TemplateDoesNotExist
register = template.Library()
@register.simple_tag(takes_context=True)
def render_with_template_if_exist(context, template, fallback):
text = fallback
try:
text = render_to_string(template, context)
except:
pass
return text
@register.simple_tag(takes_context=True)
def language_selector(context):
""" displays a language selector dropdown in the admin, based on Django "LANGUAGES" context.
requires:
* USE_I18N = True / settings.py
* LANGUAGES specified / settings.py (otherwise all Django locales will be displayed)
* "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view)
"""
output = ""
from django.conf import settings
i18 = getattr(settings, 'USE_I18N', False)
if i18:
template = "admin/language_selector.html"
context['i18n_is_set'] = True
try:
output = render_to_string(template, context)
except:
pass
return output
@register.filter(name='column_width')
def column_width(value):
return 12/len(list(value)) | Fix column_width filter in python3
Force integer division otherwise we'll fsck bootstrap classes
As seen here:
https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsets
| from django import template
from django.template.loader import render_to_string, TemplateDoesNotExist
register = template.Library()
@register.simple_tag(takes_context=True)
def render_with_template_if_exist(context, template, fallback):
text = fallback
try:
text = render_to_string(template, context)
except:
pass
return text
@register.simple_tag(takes_context=True)
def language_selector(context):
""" displays a language selector dropdown in the admin, based on Django "LANGUAGES" context.
requires:
* USE_I18N = True / settings.py
* LANGUAGES specified / settings.py (otherwise all Django locales will be displayed)
* "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view)
"""
output = ""
from django.conf import settings
i18 = getattr(settings, 'USE_I18N', False)
if i18:
template = "admin/language_selector.html"
context['i18n_is_set'] = True
try:
output = render_to_string(template, context)
except:
pass
return output
@register.filter(name='column_width')
def column_width(value):
return 12 // len(list(value))
|
import os as _os
from . import LI_V00
from . import BO_V901
from . import SI_V07
from . import TI_V00
from . import TS_V500
from . import TB_V300
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300']
li = LI_V00
tb = TB_V300
bo = BO_V901
ts = TS_V500
si = SI_V07
ti = TI_V00
| Return TS to V400 for release
| import os as _os
from . import LI_V00
from . import BO_V901
from . import SI_V07
from . import TI_V00
from . import TS_V400
from . import TB_V300
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300']
li = LI_V00
tb = TB_V300
bo = BO_V901
ts = TS_V400
si = SI_V07
ti = TI_V00
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from imp import load_source
setup(
name='cmis',
version=load_source('', 'cmis/_version.py').__version__,
description='A server architecture built on top of a solid foundation '
'provided by flask, sqlalchemy, and various extensions.',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/concordusapps/alchemist',
packages=find_packages('.'),
entry_points={'pytest11': ['alchemist = alchemist.plugin']},
dependency_links=[
'git+git://github.com/concordusapps/python-cmislib.git@master'
'#egg=cmislib-dev',
],
install_requires=[
"cmislib == dev"
],
)
| Fix cmislib branch for py3k
| #!/usr/bin/env python
from setuptools import setup, find_packages
from imp import load_source
setup(
name='cmis',
version=load_source('', 'cmis/_version.py').__version__,
description='A server architecture built on top of a solid foundation '
'provided by flask, sqlalchemy, and various extensions.',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/concordusapps/alchemist',
packages=find_packages('.'),
entry_points={'pytest11': ['alchemist = alchemist.plugin']},
dependency_links=[
'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k'
'#egg=cmislib-dev',
],
install_requires=[
"cmislib == dev"
],
)
|
from setuptools import setup, find_packages
setup(
name="elasticmagic",
version="0.0.0a0",
author="Alexander Koval",
author_email="kovalidis@gmail.com",
description=("Python orm for elasticsearch."),
license="Apache License 2.0",
keywords="elasticsearch dsl",
url="https://github.com/anti-social/elasticmagic",
packages=find_packages(exclude=["tests"]),
install_requires=[
"elasticsearch>=6.0.0,<8.0",
"python-dateutil",
],
extras_require={
"geo": [
"python-geohash",
],
"async": [
"elasticsearch-py-async",
],
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP :: Indexing/Search",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
| Remove version bounds for elasticsearch dependency
| from setuptools import setup, find_packages
setup(
name="elasticmagic",
version="0.0.0a0",
author="Alexander Koval",
author_email="kovalidis@gmail.com",
description=("Python orm for elasticsearch."),
license="Apache License 2.0",
keywords="elasticsearch dsl",
url="https://github.com/anti-social/elasticmagic",
packages=find_packages(exclude=["tests"]),
install_requires=[
"elasticsearch",
"python-dateutil",
],
extras_require={
"geo": [
"python-geohash",
],
"async": [
"elasticsearch-py-async",
],
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP :: Indexing/Search",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
VERSION = '3.1.1'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
long_description_content_type="text/markdown",
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
install_requires=[],
keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
],
)
| Remove old Python pypi classifiers.
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
VERSION = '3.1.1'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
long_description_content_type="text/markdown",
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
install_requires=[],
keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
],
)
|
from setuptools import setup
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt')
setup(
name = "lrs",
version = "0.0.0",
author = "ADL",
packages=['lrs'],
install_requires=[str(ir.req) for ir in install_reqs],
)
| Handle newer versions of pip.
| from setuptools import setup
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=False)
setup(
name = "lrs",
version = "0.0.0",
author = "ADL",
packages=['lrs'],
install_requires=[str(ir.req) for ir in install_reqs],
)
|