commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 19
3.3k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
0748838525cb2c2ee838da3a3e906ebf8dd25a3b | setup.py | setup.py | from setuptools import setup
import curtsies
setup(name='curtsies',
version=curtsies.__version__,
description='Curses-like terminal wrapper, with colored strings!',
url='https://github.com/thomasballinger/curtsies',
author='Thomas Ballinger',
author_email='thomasballinger@gmail.com',
license='MIT',
packages=['curtsies'],
install_requires = [
'blessings==1.5.1'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
],
zip_safe=False)
| from setuptools import setup
import ast
import os
def version():
"""Return version string."""
with open(os.path.join('curtsies', '__init__.py')) as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s
setup(name='curtsies',
version=version(),
description='Curses-like terminal wrapper, with colored strings!',
url='https://github.com/thomasballinger/curtsies',
author='Thomas Ballinger',
author_email='thomasballinger@gmail.com',
license='MIT',
packages=['curtsies'],
install_requires = [
'blessings==1.5.1'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
],
zip_safe=False)
| Fix installation, broken since started doing import in __init__ | Fix installation, broken since started doing import in __init__
Thanks @myint for the catch and code suggestion
| Python | mit | sebastinas/curtsies,thomasballinger/curtsies,spthaolt/curtsies | from setuptools import setup
import ast
import os
def version():
"""Return version string."""
with open(os.path.join('curtsies', '__init__.py')) as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s
setup(name='curtsies',
version=version(),
description='Curses-like terminal wrapper, with colored strings!',
url='https://github.com/thomasballinger/curtsies',
author='Thomas Ballinger',
author_email='thomasballinger@gmail.com',
license='MIT',
packages=['curtsies'],
install_requires = [
'blessings==1.5.1'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
],
zip_safe=False)
| Fix installation, broken since started doing import in __init__
Thanks @myint for the catch and code suggestion
from setuptools import setup
import curtsies
setup(name='curtsies',
version=curtsies.__version__,
description='Curses-like terminal wrapper, with colored strings!',
url='https://github.com/thomasballinger/curtsies',
author='Thomas Ballinger',
author_email='thomasballinger@gmail.com',
license='MIT',
packages=['curtsies'],
install_requires = [
'blessings==1.5.1'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
],
zip_safe=False)
|
ee85d2fffc0e42022be66bf667005eb44391cb9e | django/similarities/utils.py | django/similarities/utils.py | import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
| from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
| Order similar artist results properly | Order similar artist results properly
| Python | bsd-3-clause | FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja | from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
| Order similar artist results properly
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
|
041123e7348cf05dd1432d8550cc497a1995351d | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="0.8.2",
description="A Fragmentary Python Library.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="0.9",
description="A Fragmentary Python Library.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| Set the version to 0.9 | Set the version to 0.9
| Python | mit | xgfone/xutils,xgfone/pycom | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="0.9",
description="A Fragmentary Python Library.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| Set the version to 0.9
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="0.8.2",
description="A Fragmentary Python Library.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
7be606951b22d77a53274d014cd94aae30af93f5 | samples/oauth2_for_devices.py | samples/oauth2_for_devices.py | # -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print "Enter the following code at %s: %s" % (flow_info.verification_url,
flow_info.user_code)
print "Then press Enter."
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print "Access token:", credentials.access_token
print "Refresh token:", credentials.refresh_token
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
| # -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print("Enter the following code at {0}: {1}".format(flow_info.verification_url,
flow_info.user_code))
print("Then press Enter.")
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print("Access token: {0}".format(credentials.access_token))
print("Refresh token: {0}".format(credentials.refresh_token))
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
| Fix example to be Python3 compatible, use format() | Fix example to be Python3 compatible, use format()
Both print() and format() are compatible from 2.6. Also, format() is much nicer to use for internationalization since you can define the location of your substitutions. It works similarly to Java and .net's format() as well. Great stuff!
Should I tackle the other examples as well, or is piece meal all right? | Python | apache-2.0 | googleapis/oauth2client,jonparrott/oauth2client,google/oauth2client,jonparrott/oauth2client,clancychilds/oauth2client,googleapis/oauth2client,google/oauth2client,clancychilds/oauth2client | # -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print("Enter the following code at {0}: {1}".format(flow_info.verification_url,
flow_info.user_code))
print("Then press Enter.")
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print("Access token: {0}".format(credentials.access_token))
print("Refresh token: {0}".format(credentials.refresh_token))
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
| Fix example to be Python3 compatible, use format()
Both print() and format() are compatible from 2.6. Also, format() is much nicer to use for internationalization since you can define the location of your substitutions. It works similarly to Java and .net's format() as well. Great stuff!
Should I tackle the other examples as well, or is piece meal all right?
# -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print "Enter the following code at %s: %s" % (flow_info.verification_url,
flow_info.user_code)
print "Then press Enter."
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print "Access token:", credentials.access_token
print "Refresh token:", credentials.refresh_token
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
|
04182bff7a097b8842073f96bac834abb34f7118 | setup.py | setup.py | from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(
name='more.static',
version='0.10.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.13',
'bowerstatic >= 0.8',
],
extras_require=dict(
test=[
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'
],
),
)
| import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.static',
version='0.10.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.13',
'bowerstatic >= 0.8',
],
extras_require=dict(
test=[
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'
],
),
)
| Use io.open with encoding='utf-8' and flake8 compliance | Use io.open with encoding='utf-8' and flake8 compliance
| Python | bsd-3-clause | morepath/more.static | import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.static',
version='0.10.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.13',
'bowerstatic >= 0.8',
],
extras_require=dict(
test=[
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'
],
),
)
| Use io.open with encoding='utf-8' and flake8 compliance
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(
name='more.static',
version='0.10.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.13',
'bowerstatic >= 0.8',
],
extras_require=dict(
test=[
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'
],
),
)
|
4a817aff14ca6bc9717bd617d5bc49d15e698272 | teuthology/orchestra/test/test_console.py | teuthology/orchestra/test/test_console.py | from teuthology.config import config as teuth_config
from .. import console
class TestConsole(object):
pass
class TestPhysicalConsole(TestConsole):
klass = console.PhysicalConsole
def setup(self):
teuth_config.ipmi_domain = 'ipmi_domain'
teuth_config.ipmi_user = 'ipmi_user'
teuth_config.ipmi_password = 'ipmi_pass'
self.hostname = 'host'
def test_build_command(self):
cmd_templ = 'ipmitool -H {h}.{d} -I lanplus -U {u} -P {p} {c}'
cons = self.klass(
self.hostname,
teuth_config.ipmi_user,
teuth_config.ipmi_password,
teuth_config.ipmi_domain,
)
sol_cmd = cons._build_command('sol activate')
assert sol_cmd == cmd_templ.format(
h=self.hostname,
d=teuth_config.ipmi_domain,
u=teuth_config.ipmi_user,
p=teuth_config.ipmi_password,
c='sol activate',
)
pc_cmd = cons._build_command('power cycle')
assert pc_cmd == sol_cmd.replace('sol activate', 'power cycle')
| Add some tests for the console module | Add some tests for the console module
... better late than never?
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
| Python | mit | ceph/teuthology,dmick/teuthology,SUSE/teuthology,dmick/teuthology,SUSE/teuthology,ktdreyer/teuthology,dmick/teuthology,ktdreyer/teuthology,ceph/teuthology,SUSE/teuthology | from teuthology.config import config as teuth_config
from .. import console
class TestConsole(object):
pass
class TestPhysicalConsole(TestConsole):
klass = console.PhysicalConsole
def setup(self):
teuth_config.ipmi_domain = 'ipmi_domain'
teuth_config.ipmi_user = 'ipmi_user'
teuth_config.ipmi_password = 'ipmi_pass'
self.hostname = 'host'
def test_build_command(self):
cmd_templ = 'ipmitool -H {h}.{d} -I lanplus -U {u} -P {p} {c}'
cons = self.klass(
self.hostname,
teuth_config.ipmi_user,
teuth_config.ipmi_password,
teuth_config.ipmi_domain,
)
sol_cmd = cons._build_command('sol activate')
assert sol_cmd == cmd_templ.format(
h=self.hostname,
d=teuth_config.ipmi_domain,
u=teuth_config.ipmi_user,
p=teuth_config.ipmi_password,
c='sol activate',
)
pc_cmd = cons._build_command('power cycle')
assert pc_cmd == sol_cmd.replace('sol activate', 'power cycle')
| Add some tests for the console module
... better late than never?
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
|
|
c41115875ce46be3eacc1ec7c539010b430b0374 | kegg_adapter/kegg.py | kegg_adapter/kegg.py | import urllib2
import json
#response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath')
#html = response.read()
#lines = html.split('\n');
#data = {};
#for line in lines:
# parts = line.split('\t');
# if len(parts) >= 2:
# data[parts[0]] = parts[1]
#json_data = json.dumps(data)
#print json_data
def search(args):
if not 'operation' in args.keys():
exit(1);
if not 'argument' in args.keys():
exit(1);
url = 'http://rest.kegg.jp/'
operation = args['operation']
argument = args['argument']
url+= operation + '/' + argument
if 'argument2' in args.keys():
url+= '/' + args['argument2']
if 'option' in args.keys():
url+= '/' + args['option']
response = urllib2.urlopen(url)
html = response.read()
data = {}
if operation == 'find' or operation == 'list'\
or operation == 'link' or operation == 'conv':
print "jsonizing"
lines = html.split('\n')
for line in lines:
parts = line.split('\t');
if len(parts) >= 2:
data[parts[0]] = parts[1]
result = {}
result['results'] = data
result['args'] = args
print json.dumps(result);
| import urllib2
import json
#response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath')
#html = response.read()
#lines = html.split('\n');
#data = {};
#for line in lines:
# parts = line.split('\t');
# if len(parts) >= 2:
# data[parts[0]] = parts[1]
#json_data = json.dumps(data)
#print json_data
def search(args):
if not 'operation' in args.keys():
exit(0);
if not 'argument' in args.keys():
exit(0);
url = 'http://rest.kegg.jp/'
operation = args['operation']
argument = args['argument']
url+= operation + '/' + argument
if 'argument2' in args.keys():
url+= '/' + args['argument2']
if 'option' in args.keys():
url+= '/' + args['option']
response = urllib2.urlopen(url)
html = response.read()
data = {}
if operation == 'find' or operation == 'list'\
or operation == 'link' or operation == 'conv':
lines = html.split('\n')
for line in lines:
parts = line.split('\t');
if len(parts) >= 2:
data[parts[0]] = parts[1]
result = {}
result['results'] = data
result['args'] = args
print json.dumps(result);
| Remove debugging print statements changed exit status from 1 to 0 | Remove debugging print statements
changed exit status from 1 to 0
| Python | artistic-2.0 | Arabidopsis-Information-Portal/Intern-Hello-World,Arabidopsis-Information-Portal/KEGG-Pathway-API | import urllib2
import json
#response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath')
#html = response.read()
#lines = html.split('\n');
#data = {};
#for line in lines:
# parts = line.split('\t');
# if len(parts) >= 2:
# data[parts[0]] = parts[1]
#json_data = json.dumps(data)
#print json_data
def search(args):
if not 'operation' in args.keys():
exit(0);
if not 'argument' in args.keys():
exit(0);
url = 'http://rest.kegg.jp/'
operation = args['operation']
argument = args['argument']
url+= operation + '/' + argument
if 'argument2' in args.keys():
url+= '/' + args['argument2']
if 'option' in args.keys():
url+= '/' + args['option']
response = urllib2.urlopen(url)
html = response.read()
data = {}
if operation == 'find' or operation == 'list'\
or operation == 'link' or operation == 'conv':
lines = html.split('\n')
for line in lines:
parts = line.split('\t');
if len(parts) >= 2:
data[parts[0]] = parts[1]
result = {}
result['results'] = data
result['args'] = args
print json.dumps(result);
| Remove debugging print statements
changed exit status from 1 to 0
import urllib2
import json
#response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath')
#html = response.read()
#lines = html.split('\n');
#data = {};
#for line in lines:
# parts = line.split('\t');
# if len(parts) >= 2:
# data[parts[0]] = parts[1]
#json_data = json.dumps(data)
#print json_data
def search(args):
if not 'operation' in args.keys():
exit(1);
if not 'argument' in args.keys():
exit(1);
url = 'http://rest.kegg.jp/'
operation = args['operation']
argument = args['argument']
url+= operation + '/' + argument
if 'argument2' in args.keys():
url+= '/' + args['argument2']
if 'option' in args.keys():
url+= '/' + args['option']
response = urllib2.urlopen(url)
html = response.read()
data = {}
if operation == 'find' or operation == 'list'\
or operation == 'link' or operation == 'conv':
print "jsonizing"
lines = html.split('\n')
for line in lines:
parts = line.split('\t');
if len(parts) >= 2:
data[parts[0]] = parts[1]
result = {}
result['results'] = data
result['args'] = args
print json.dumps(result);
|
6358f3fb8a3ece53adeb71f9b59f96a5a3a9ca70 | examples/system/ulp_adc/example_test.py | examples/system/ulp_adc/example_test.py | from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
| from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
| Fix regex in ulp_adc example test | CI: Fix regex in ulp_adc example test
| Python | apache-2.0 | espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf | from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
| CI: Fix regex in ulp_adc example test
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
a565235303e1f2572ed34490e25c7e0f31aba74c | turngeneration/serializers.py | turngeneration/serializers.py | from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
| from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
| Support nested generator inside the realm. | Support nested generator inside the realm.
| Python | mit | jbradberry/django-turn-generation,jbradberry/django-turn-generation | from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
| Support nested generator inside the realm.
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
|
7531ed0c9ae25f04884250c84b39a630ae7ef34b | raiden/storage/migrations/v20_to_v21.py | raiden/storage/migrations/v20_to_v21.py | import json
from raiden.storage.sqlite import SQLiteStorage
SOURCE_VERSION = 20
TARGET_VERSION = 21
def _transform_snapshot(raw_snapshot: str) -> str:
snapshot = json.loads(raw_snapshot)
for task in snapshot['payment_mapping']['secrethashes_to_task'].values():
if 'raiden.transfer.state.InitiatorTask' in task['_type']:
for initiator in task['manager_task']['initiator_transfers'].values():
initiator['transfer_description']['allocated_fee'] = 0
ids_to_addrs = dict()
for payment_network in snapshot['identifiers_to_paymentnetworks'].values():
for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values():
ids_to_addrs[payment_network['address']] = token_network['token_address']
snapshot['tokennetworkaddresses_to_paymentnetworkaddresses'] = ids_to_addrs
for payment_network in snapshot['identifiers_to_paymentnetworks'].values():
for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values():
for channel_state in token_network['channelidentifiers_to_channels'].values():
channel_state['mediation_fee'] = 0
return json.dumps(snapshot)
def _update_snapshots(storage: SQLiteStorage):
updated_snapshots_data = []
for snapshot in storage.get_snapshots():
new_snapshot = _transform_snapshot(snapshot.data)
updated_snapshots_data.append((new_snapshot, snapshot.identifier))
storage.update_snapshots(updated_snapshots_data)
def _update_statechanges(storage: SQLiteStorage):
batch_size = 50
batch_query = storage.batch_query_state_changes(
batch_size=batch_size,
filters=[
('_type', 'raiden.transfer.state_change.ContractReceiveChannelNew'),
],
)
for state_changes_batch in batch_query:
for state_change in state_changes_batch:
state_change['channel_state']['mediation_fee'] = 0
storage.update_state_changes(state_changes_batch)
batch_query = storage.batch_query_state_changes(
batch_size=batch_size,
filters=[
('_type', 'raiden.transfer.mediated_transfer.state_change.ActionInitInitiator'),
],
)
for state_changes_batch in batch_query:
for state_change in state_changes_batch:
state_change['transfer']['allocated_fee'] = 0
storage.update_state_changes(state_changes_batch)
def upgrade_v19_to_v20(
storage: SQLiteStorage,
old_version: int,
**kwargs, # pylint: disable=unused-argument
) -> int:
if old_version == SOURCE_VERSION:
_update_snapshots(storage)
_update_statechanges(storage)
return TARGET_VERSION
| Move migration 21 to it's proper file | Move migration 21 to it's proper file
| Python | mit | hackaugusto/raiden,hackaugusto/raiden | import json
from raiden.storage.sqlite import SQLiteStorage
SOURCE_VERSION = 20
TARGET_VERSION = 21
def _transform_snapshot(raw_snapshot: str) -> str:
snapshot = json.loads(raw_snapshot)
for task in snapshot['payment_mapping']['secrethashes_to_task'].values():
if 'raiden.transfer.state.InitiatorTask' in task['_type']:
for initiator in task['manager_task']['initiator_transfers'].values():
initiator['transfer_description']['allocated_fee'] = 0
ids_to_addrs = dict()
for payment_network in snapshot['identifiers_to_paymentnetworks'].values():
for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values():
ids_to_addrs[payment_network['address']] = token_network['token_address']
snapshot['tokennetworkaddresses_to_paymentnetworkaddresses'] = ids_to_addrs
for payment_network in snapshot['identifiers_to_paymentnetworks'].values():
for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values():
for channel_state in token_network['channelidentifiers_to_channels'].values():
channel_state['mediation_fee'] = 0
return json.dumps(snapshot)
def _update_snapshots(storage: SQLiteStorage):
updated_snapshots_data = []
for snapshot in storage.get_snapshots():
new_snapshot = _transform_snapshot(snapshot.data)
updated_snapshots_data.append((new_snapshot, snapshot.identifier))
storage.update_snapshots(updated_snapshots_data)
def _update_statechanges(storage: SQLiteStorage):
batch_size = 50
batch_query = storage.batch_query_state_changes(
batch_size=batch_size,
filters=[
('_type', 'raiden.transfer.state_change.ContractReceiveChannelNew'),
],
)
for state_changes_batch in batch_query:
for state_change in state_changes_batch:
state_change['channel_state']['mediation_fee'] = 0
storage.update_state_changes(state_changes_batch)
batch_query = storage.batch_query_state_changes(
batch_size=batch_size,
filters=[
('_type', 'raiden.transfer.mediated_transfer.state_change.ActionInitInitiator'),
],
)
for state_changes_batch in batch_query:
for state_change in state_changes_batch:
state_change['transfer']['allocated_fee'] = 0
storage.update_state_changes(state_changes_batch)
def upgrade_v19_to_v20(
storage: SQLiteStorage,
old_version: int,
**kwargs, # pylint: disable=unused-argument
) -> int:
if old_version == SOURCE_VERSION:
_update_snapshots(storage)
_update_statechanges(storage)
return TARGET_VERSION
| Move migration 21 to it's proper file
|
|
5545bd1df34e6d3bb600b78b92d757ea12e3861b | printer/PlatformPhysicsOperation.py | printer/PlatformPhysicsOperation.py | from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
| from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
| Use GroupedOperation for merging PlatformPhyisicsOperation | Use GroupedOperation for merging PlatformPhyisicsOperation
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
| Use GroupedOperation for merging PlatformPhyisicsOperation
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
|
b1963f00e5290c11654eefbd24fbce185bbcd8b4 | packages/Preferences/define.py | packages/Preferences/define.py | import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
| import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
| Add config ini file name. | Add config ini file name.
| Python | mit | takavfx/Mantle | import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
| Add config ini file name.
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
567e12bfb8d0f4e2a4f6fddf0fab9ffbcbf6d49f | requests/_bug.py | requests/_bug.py | """Module containing bug report helper(s)."""
from __future__ import print_function
import json
import platform
import sys
import ssl
from . import __version__ as requests_version
try:
from .packages.urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None
cryptography = None
else:
import OpenSSL
import cryptography
def _implementation():
"""Return a dict with the Python implementation and verison.
Provide both the name and the version of the Python implementation
currently running. For example, on CPython 2.7.5 it will return
{'name': 'CPython', 'version': '2.7.5'}.
This function works best on CPython and PyPy: in particular, it probably
doesn't work for Jython or IronPython. Future investigation should be done
to work out the correct shape of the code for those platforms.
"""
implementation = platform.python_implementation()
if implementation == 'CPython':
implementation_version = platform.python_version()
elif implementation == 'PyPy':
implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
implementation_version = ''.join([
implementation_version, sys.pypy_version_info.releaselevel
])
elif implementation == 'Jython':
implementation_version = platform.python_version() # Complete Guess
elif implementation == 'IronPython':
implementation_version = platform.python_version() # Complete Guess
else:
implementation_version = 'Unknown'
return {'name': implementation, 'version': implementation_version}
def information():
"""Generate information for a bug report."""
try:
platform_info = {
'system': platform.system(),
'release': platform.release(),
}
except IOError:
platform_info = {
'system': 'Unknown',
'release': 'Unknown',
}
implementation_info = _implementation()
pyopenssl_info = {
'version': None,
'openssl_version': '',
}
if OpenSSL:
pyopenssl_info = {
'version': OpenSSL.__version__,
'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
}
cryptography_info = {
'version': getattr(cryptography, '__version__', ''),
}
return {
'platform': platform_info,
'implementation': implementation_info,
'system_ssl': {
'version': '%x' % ssl.OPENSSL_VERSION_NUMBER,
},
'using_pyopenssl': pyopenssl is not None,
'pyOpenSSL': pyopenssl_info,
'cryptography': cryptography_info,
'requests': {
'version': requests_version,
},
}
def print_information():
"""Pretty-print the bug information as JSON."""
print(json.dumps(information(), sort_keys=True, indent=2))
| Add debugging submodule for bug reporters | Add debugging submodule for bug reporters
The suggested usage in a bug report would be
python -c 'from requests import _bug; _bug.print_information()'
This should generate most of the information we tend to ask for
repeatedly from bug reporters.
| Python | apache-2.0 | psf/requests | """Module containing bug report helper(s)."""
from __future__ import print_function
import json
import platform
import sys
import ssl
from . import __version__ as requests_version
try:
from .packages.urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None
cryptography = None
else:
import OpenSSL
import cryptography
def _implementation():
"""Return a dict with the Python implementation and verison.
Provide both the name and the version of the Python implementation
currently running. For example, on CPython 2.7.5 it will return
{'name': 'CPython', 'version': '2.7.5'}.
This function works best on CPython and PyPy: in particular, it probably
doesn't work for Jython or IronPython. Future investigation should be done
to work out the correct shape of the code for those platforms.
"""
implementation = platform.python_implementation()
if implementation == 'CPython':
implementation_version = platform.python_version()
elif implementation == 'PyPy':
implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
implementation_version = ''.join([
implementation_version, sys.pypy_version_info.releaselevel
])
elif implementation == 'Jython':
implementation_version = platform.python_version() # Complete Guess
elif implementation == 'IronPython':
implementation_version = platform.python_version() # Complete Guess
else:
implementation_version = 'Unknown'
return {'name': implementation, 'version': implementation_version}
def information():
"""Generate information for a bug report."""
try:
platform_info = {
'system': platform.system(),
'release': platform.release(),
}
except IOError:
platform_info = {
'system': 'Unknown',
'release': 'Unknown',
}
implementation_info = _implementation()
pyopenssl_info = {
'version': None,
'openssl_version': '',
}
if OpenSSL:
pyopenssl_info = {
'version': OpenSSL.__version__,
'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
}
cryptography_info = {
'version': getattr(cryptography, '__version__', ''),
}
return {
'platform': platform_info,
'implementation': implementation_info,
'system_ssl': {
'version': '%x' % ssl.OPENSSL_VERSION_NUMBER,
},
'using_pyopenssl': pyopenssl is not None,
'pyOpenSSL': pyopenssl_info,
'cryptography': cryptography_info,
'requests': {
'version': requests_version,
},
}
def print_information():
"""Pretty-print the bug information as JSON."""
print(json.dumps(information(), sort_keys=True, indent=2))
| Add debugging submodule for bug reporters
The suggested usage in a bug report would be
python -c 'from requests import _bug; _bug.print_information()'
This should generate most of the information we tend to ask for
repeatedly from bug reporters.
|
|
e561c1354d2f9a550f2b27bb88d8e4d0f3f76203 | common/djangoapps/student/management/commands/recover_truncated_anonymous_ids.py | common/djangoapps/student/management/commands/recover_truncated_anonymous_ids.py | """
Generate sql commands to fix truncated anonymous student ids in the ORA database
"""
import sys
from django.core.management.base import NoArgsCommand
from student.models import AnonymousUserId, anonymous_id_for_user
class Command(NoArgsCommand):
help = __doc__
def handle_noargs(self, **options):
"""
Reads a list of ids (newline separated) from stdin, and
dumps sql queries to run on the ORA database to fix those ids
from their truncated form to the full 32 character change.
The following query will generate the list of ids needed to be fixed
from the ORA database:
SELECT student_id FROM peer_grading_calibrationhistory WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM controller_submission WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_timing WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_studentcourseprofile WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_studentprofile WHERE LENGTH(student_id) = 16;
"""
ids = [line.strip() for line in sys.stdin]
old_ids = AnonymousUserId.objects.raw(
"""
SELECT *
FROM student_anonymoususerid_temp_archive
WHERE anonymous_user_id IN ({})
""".format(','.join(['%s']*len(ids))),
ids
)
for old_id in old_ids:
new_id = anonymous_id_for_user(old_id.user, old_id.course_id)
for table in ('peer_grading_calibrationhistory', 'controller_submission', 'metrics_timing'):
self.stdout.write(
"UPDATE {} "
"SET student_id = '{}' "
"WHERE student_id = '{}';\n".format(
table,
new_id,
old_id.anonymous_user_id,
)
)
self.stdout.write(
"DELETE FROM metrics_studentcourseprofile "
"WHERE student_id = '{}' "
"AND problems_attempted = 0;\n".format(old_id.anonymous_user_id)
)
self.stdout.write(
"DELETE FROM metrics_studentprofile "
"WHERE student_id = '{}' "
"AND messages_sent = 0 "
"AND messages_received = 0 "
"AND average_message_feedback_length = 0 "
"AND student_is_staff_banned = 0 "
"AND student_cannot_submit_more_for_peer_grading = 0;\n".format(old_id.anonymous_user_id)
)
| Add managemant command to generate sql to clean up tp truncated student ids in ORA db | Add managemant command to generate sql to clean up tp truncated student ids in ORA db
| Python | agpl-3.0 | openfun/edx-platform,synergeticsedx/deployment-wipro,shashank971/edx-platform,bigdatauniversity/edx-platform,shabab12/edx-platform,philanthropy-u/edx-platform,openfun/edx-platform,motion2015/edx-platform,ubc/edx-platform,jolyonb/edx-platform,cognitiveclass/edx-platform,ferabra/edx-platform,jswope00/griffinx,proversity-org/edx-platform,ampax/edx-platform,proversity-org/edx-platform,jazztpt/edx-platform,yokose-ks/edx-platform,UXE/local-edx,pabloborrego93/edx-platform,ahmadiga/min_edx,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-1,arifsetiawan/edx-platform,alu042/edx-platform,alexthered/kienhoc-platform,appliedx/edx-platform,IndonesiaX/edx-platform,4eek/edx-platform,ak2703/edx-platform,mahendra-r/edx-platform,LearnEra/LearnEraPlaftform,jswope00/griffinx,zerobatu/edx-platform,doganov/edx-platform,xuxiao19910803/edx-platform,jbassen/edx-platform,nanolearning/edx-platform,mitocw/edx-platform,WatanabeYasumasa/edx-platform,jbassen/edx-platform,abdoosh00/edraak,alexthered/kienhoc-platform,antonve/s4-project-mooc,Edraak/edraak-platform,longmen21/edx-platform,edx/edx-platform,chrisndodge/edx-platform,sameetb-cuelogic/edx-platform-test,cselis86/edx-platform,jamesblunt/edx-platform,hkawasaki/kawasaki-aio8-2,jonathan-beard/edx-platform,hamzehd/edx-platform,jonathan-beard/edx-platform,shubhdev/edx-platform,mushtaqak/edx-platform,romain-li/edx-platform,Stanford-Online/edx-platform,nagyistoce/edx-platform,shubhdev/edx-platform,longmen21/edx-platform,motion2015/edx-platform,AkA84/edx-platform,analyseuc3m/ANALYSE-v1,shurihell/testasia,Stanford-Online/edx-platform,torchingloom/edx-platform,chauhanhardik/populo,bdero/edx-platform,jamesblunt/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,peterm-itr/edx-platform,Unow/edx-platform,franosincic/edx-platform,Edraak/circleci-edx-platform,rhndg/openedx,ahmadiga/min_edx,proversity-org/edx-platform,beni55/edx-platform,etzhou/edx-platform,jswope00/griffinx,olexiim/edx-platform,kamalx/edx-platform,JioEducation/edx-platform,auferack08/edx-platform,doismellburning/edx-platform,utecuy/edx-platform,kmoocdev2/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,chudaol/edx-platform,edx-solutions/edx-platform,JioEducation/edx-platform,10clouds/edx-platform,4eek/edx-platform,alu042/edx-platform,jruiperezv/ANALYSE,vikas1885/test1,chauhanhardik/populo_2,nanolearningllc/edx-platform-cypress,eemirtekin/edx-platform,benpatterson/edx-platform,iivic/BoiseStateX,shubhdev/openedx,etzhou/edx-platform,louyihua/edx-platform,dsajkl/123,shubhdev/openedx,itsjeyd/edx-platform,10clouds/edx-platform,chand3040/cloud_that,CourseTalk/edx-platform,jzoldak/edx-platform,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,unicri/edx-platform,IONISx/edx-platform,motion2015/a3,Semi-global/edx-platform,zadgroup/edx-platform,halvertoluke/edx-platform,motion2015/edx-platform,ovnicraft/edx-platform,J861449197/edx-platform,benpatterson/edx-platform,vasyarv/edx-platform,arifsetiawan/edx-platform,philanthropy-u/edx-platform,romain-li/edx-platform,valtech-mooc/edx-platform,ampax/edx-platform-backup,synergeticsedx/deployment-wipro,zadgroup/edx-platform,ferabra/edx-platform,J861449197/edx-platform,dsajkl/reqiop,mbareta/edx-platform-ft,chrisndodge/edx-platform,morenopc/edx-platform,JioEducation/edx-platform,mjg2203/edx-platform-seas,DNFcode/edx-platform,LICEF/edx-platform,playm2mboy/edx-platform,shurihell/testasia,Shrhawk/edx-platform,unicri/edx-platform,mushtaqak/edx-platform,nikolas/edx-platform,4eek/edx-platform,mushtaqak/edx-platform,Softmotions/edx-platform,don-github/edx-platform,DefyVentures/edx-platform,shubhdev/edx-platform,WatanabeYasumasa/edx-platform,pelikanchik/edx-platform,nttks/jenkins-test,defance/edx-platform,IndonesiaX/edx-platform,edry/edx-platform,arifsetiawan/edx-platform,raccoongang/edx-platform,doismellburning/edx-platform,nanolearning/edx-platform,etzhou/edx-platform,olexiim/edx-platform,motion2015/a3,bdero/edx-platform,JCBarahona/edX,xuxiao19910803/edx,procangroup/edx-platform,torchingloom/edx-platform,hkawasaki/kawasaki-aio8-1,dkarakats/edx-platform,eduNEXT/edunext-platform,ampax/edx-platform-backup,Livit/Livit.Learn.EdX,simbs/edx-platform,zerobatu/edx-platform,ak2703/edx-platform,ubc/edx-platform,jonathan-beard/edx-platform,miptliot/edx-platform,a-parhom/edx-platform,dsajkl/123,carsongee/edx-platform,J861449197/edx-platform,openfun/edx-platform,knehez/edx-platform,jbzdak/edx-platform,hkawasaki/kawasaki-aio8-0,beacloudgenius/edx-platform,Softmotions/edx-platform,unicri/edx-platform,ovnicraft/edx-platform,Semi-global/edx-platform,B-MOOC/edx-platform,louyihua/edx-platform,jswope00/GAI,doganov/edx-platform,beni55/edx-platform,IONISx/edx-platform,IndonesiaX/edx-platform,deepsrijit1105/edx-platform,Edraak/edx-platform,shubhdev/edxOnBaadal,chauhanhardik/populo_2,nttks/edx-platform,SravanthiSinha/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,Kalyzee/edx-platform,hastexo/edx-platform,mjg2203/edx-platform-seas,Stanford-Online/edx-platform,beni55/edx-platform,kmoocdev2/edx-platform,eduNEXT/edx-platform,simbs/edx-platform,playm2mboy/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,hamzehd/edx-platform,kamalx/edx-platform,jazkarta/edx-platform,pomegranited/edx-platform,SravanthiSinha/edx-platform,naresh21/synergetics-edx-platform,procangroup/edx-platform,zofuthan/edx-platform,chand3040/cloud_that,torchingloom/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,MakeHer/edx-platform,OmarIthawi/edx-platform,jamiefolsom/edx-platform,bigdatauniversity/edx-platform,miptliot/edx-platform,Shrhawk/edx-platform,ferabra/edx-platform,apigee/edx-platform,mjirayu/sit_academy,nanolearningllc/edx-platform-cypress,xinjiguaike/edx-platform,nanolearningllc/edx-platform-cypress-2,olexiim/edx-platform,vikas1885/test1,caesar2164/edx-platform,pku9104038/edx-platform,appsembler/edx-platform,leansoft/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,B-MOOC/edx-platform,EDUlib/edx-platform,B-MOOC/edx-platform,don-github/edx-platform,jazkarta/edx-platform-for-isc,eemirtekin/edx-platform,openfun/edx-platform,msegado/edx-platform,motion2015/a3,arbrandes/edx-platform,appliedx/edx-platform,gymnasium/edx-platform,JCBarahona/edX,zhenzhai/edx-platform,shubhdev/edxOnBaadal,JCBarahona/edX,DNFcode/edx-platform,LICEF/edx-platform,xingyepei/edx-platform,polimediaupv/edx-platform,franosincic/edx-platform,pomegranited/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,UOMx/edx-platform,doganov/edx-platform,abdoosh00/edx-rtl-final,polimediaupv/edx-platform,jazkarta/edx-platform,itsjeyd/edx-platform,cselis86/edx-platform,edx/edx-platform,edry/edx-platform,hastexo/edx-platform,sameetb-cuelogic/edx-platform-test,kursitet/edx-platform,jazkarta/edx-platform,morenopc/edx-platform,pku9104038/edx-platform,jjmiranda/edx-platform,doismellburning/edx-platform,LearnEra/LearnEraPlaftform,pomegranited/edx-platform,pepeportela/edx-platform,kamalx/edx-platform,chauhanhardik/populo,xingyepei/edx-platform,nttks/edx-platform,sameetb-cuelogic/edx-platform-test,lduarte1991/edx-platform,miptliot/edx-platform,chand3040/cloud_that,tanmaykm/edx-platform,teltek/edx-platform,mbareta/edx-platform-ft,ahmadio/edx-platform,iivic/BoiseStateX,cecep-edu/edx-platform,TeachAtTUM/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,SivilTaram/edx-platform,Endika/edx-platform,xuxiao19910803/edx-platform,gsehub/edx-platform,beni55/edx-platform,MSOpenTech/edx-platform,antoviaque/edx-platform,10clouds/edx-platform,a-parhom/edx-platform,4eek/edx-platform,MSOpenTech/edx-platform,fintech-circle/edx-platform,arbrandes/edx-platform,yokose-ks/edx-platform,jswope00/GAI,angelapper/edx-platform,vismartltd/edx-platform,franosincic/edx-platform,martynovp/edx-platform,jamiefolsom/edx-platform,nikolas/edx-platform,antonve/s4-project-mooc,stvstnfrd/edx-platform,nttks/edx-platform,carsongee/edx-platform,prarthitm/edxplatform,nagyistoce/edx-platform,Edraak/circleci-edx-platform,vasyarv/edx-platform,angelapper/edx-platform,rue89-tech/edx-platform,waheedahmed/edx-platform,alu042/edx-platform,vismartltd/edx-platform,mitocw/edx-platform,Edraak/circleci-edx-platform,jbassen/edx-platform,Edraak/edraak-platform,atsolakid/edx-platform,zhenzhai/edx-platform,sudheerchintala/LearnEraPlatForm,ovnicraft/edx-platform,teltek/edx-platform,torchingloom/edx-platform,kursitet/edx-platform,jbassen/edx-platform,jazkarta/edx-platform-for-isc,dcosentino/edx-platform,valtech-mooc/edx-platform,devs1991/test_edx_docmode,mbareta/edx-platform-ft,LearnEra/LearnEraPlaftform,alexthered/kienhoc-platform,cpennington/edx-platform,fly19890211/edx-platform,doganov/edx-platform,CredoReference/edx-platform,shubhdev/openedx,chudaol/edx-platform,atsolakid/edx-platform,mahendra-r/edx-platform,deepsrijit1105/edx-platform,alu042/edx-platform,simbs/edx-platform,dkarakats/edx-platform,zubair-arbi/edx-platform,DefyVentures/edx-platform,DNFcode/edx-platform,knehez/edx-platform,andyzsf/edx,antonve/s4-project-mooc,J861449197/edx-platform,simbs/edx-platform,Livit/Livit.Learn.EdX,jelugbo/tundex,utecuy/edx-platform,rue89-tech/edx-platform,appliedx/edx-platform,edry/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,SravanthiSinha/edx-platform,peterm-itr/edx-platform,J861449197/edx-platform,apigee/edx-platform,wwj718/ANALYSE,pomegranited/edx-platform,procangroup/edx-platform,rhndg/openedx,mjg2203/edx-platform-seas,LICEF/edx-platform,beacloudgenius/edx-platform,eduNEXT/edunext-platform,ahmadio/edx-platform,ferabra/edx-platform,dsajkl/reqiop,TeachAtTUM/edx-platform,inares/edx-platform,wwj718/edx-platform,TeachAtTUM/edx-platform,Ayub-Khan/edx-platform,LICEF/edx-platform,antoviaque/edx-platform,longmen21/edx-platform,defance/edx-platform,fintech-circle/edx-platform,SivilTaram/edx-platform,leansoft/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,zerobatu/edx-platform,ahmedaljazzar/edx-platform,franosincic/edx-platform,xingyepei/edx-platform,ampax/edx-platform,kmoocdev2/edx-platform,DefyVentures/edx-platform,prarthitm/edxplatform,cognitiveclass/edx-platform,adoosii/edx-platform,Edraak/edx-platform,carsongee/edx-platform,msegado/edx-platform,jazkarta/edx-platform,jswope00/GAI,inares/edx-platform,motion2015/a3,eestay/edx-platform,Endika/edx-platform,pabloborrego93/edx-platform,kursitet/edx-platform,abdoosh00/edx-rtl-final,peterm-itr/edx-platform,jonathan-beard/edx-platform,itsjeyd/edx-platform,peterm-itr/edx-platform,wwj718/ANALYSE,wwj718/edx-platform,jelugbo/tundex,amir-qayyum-khan/edx-platform,eduNEXT/edunext-platform,jbzdak/edx-platform,prarthitm/edxplatform,y12uc231/edx-platform,xuxiao19910803/edx,appliedx/edx-platform,kamalx/edx-platform,jamesblunt/edx-platform,bigdatauniversity/edx-platform,MakeHer/edx-platform,polimediaupv/edx-platform,rhndg/openedx,jamiefolsom/edx-platform,MakeHer/edx-platform,AkA84/edx-platform,atsolakid/edx-platform,Kalyzee/edx-platform,Lektorium-LLC/edx-platform,xuxiao19910803/edx-platform,IONISx/edx-platform,mcgachey/edx-platform,mjirayu/sit_academy,jazztpt/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,jzoldak/edx-platform,lduarte1991/edx-platform,procangroup/edx-platform,shubhdev/openedx,motion2015/edx-platform,cselis86/edx-platform,chand3040/cloud_that,hamzehd/edx-platform,tiagochiavericosta/edx-platform,zubair-arbi/edx-platform,teltek/edx-platform,y12uc231/edx-platform,nttks/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,nikolas/edx-platform,shubhdev/edxOnBaadal,utecuy/edx-platform,jazztpt/edx-platform,zubair-arbi/edx-platform,ahmadio/edx-platform,antonve/s4-project-mooc,shashank971/edx-platform,jelugbo/tundex,lduarte1991/edx-platform,waheedahmed/edx-platform,UOMx/edx-platform,dcosentino/edx-platform,benpatterson/edx-platform,sudheerchintala/LearnEraPlatForm,SivilTaram/edx-platform,inares/edx-platform,wwj718/edx-platform,pelikanchik/edx-platform,mbareta/edx-platform-ft,kmoocdev/edx-platform,kxliugang/edx-platform,benpatterson/edx-platform,eduNEXT/edx-platform,pelikanchik/edx-platform,pku9104038/edx-platform,synergeticsedx/deployment-wipro,IONISx/edx-platform,motion2015/edx-platform,JCBarahona/edX,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress-2,jazztpt/edx-platform,UOMx/edx-platform,DNFcode/edx-platform,kxliugang/edx-platform,iivic/BoiseStateX,apigee/edx-platform,Endika/edx-platform,JCBarahona/edX,EDUlib/edx-platform,chudaol/edx-platform,beni55/edx-platform,rismalrv/edx-platform,xinjiguaike/edx-platform,mcgachey/edx-platform,MSOpenTech/edx-platform,dkarakats/edx-platform,nttks/jenkins-test,analyseuc3m/ANALYSE-v1,nikolas/edx-platform,dsajkl/reqiop,jolyonb/edx-platform,solashirai/edx-platform,nikolas/edx-platform,y12uc231/edx-platform,zofuthan/edx-platform,shashank971/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,halvertoluke/edx-platform,itsjeyd/edx-platform,Unow/edx-platform,kxliugang/edx-platform,antoviaque/edx-platform,bitifirefly/edx-platform,CredoReference/edx-platform,shurihell/testasia,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,SravanthiSinha/edx-platform,olexiim/edx-platform,ovnicraft/edx-platform,jelugbo/tundex,bdero/edx-platform,kamalx/edx-platform,abdoosh00/edx-rtl-final,mjg2203/edx-platform-seas,proversity-org/edx-platform,polimediaupv/edx-platform,morenopc/edx-platform,jbzdak/edx-platform,SravanthiSinha/edx-platform,leansoft/edx-platform,hamzehd/edx-platform,DefyVentures/edx-platform,pku9104038/edx-platform,caesar2164/edx-platform,TeachAtTUM/edx-platform,halvertoluke/edx-platform,edx-solutions/edx-platform,zofuthan/edx-platform,andyzsf/edx,fly19890211/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,jbzdak/edx-platform,cognitiveclass/edx-platform,jelugbo/tundex,angelapper/edx-platform,SivilTaram/edx-platform,defance/edx-platform,nanolearningllc/edx-platform-cypress,zofuthan/edx-platform,xuxiao19910803/edx,angelapper/edx-platform,naresh21/synergetics-edx-platform,pepeportela/edx-platform,nanolearningllc/edx-platform-cypress-2,Semi-global/edx-platform,iivic/BoiseStateX,y12uc231/edx-platform,a-parhom/edx-platform,ampax/edx-platform-backup,hkawasaki/kawasaki-aio8-0,Ayub-Khan/edx-platform,msegado/edx-platform,wwj718/edx-platform,jswope00/GAI,shubhdev/edxOnBaadal,CredoReference/edx-platform,waheedahmed/edx-platform,edx/edx-platform,ampax/edx-platform,rue89-tech/edx-platform,cecep-edu/edx-platform,jbzdak/edx-platform,shabab12/edx-platform,simbs/edx-platform,Shrhawk/edx-platform,andyzsf/edx,playm2mboy/edx-platform,halvertoluke/edx-platform,Unow/edx-platform,leansoft/edx-platform,inares/edx-platform,arifsetiawan/edx-platform,jazkarta/edx-platform-for-isc,jolyonb/edx-platform,pabloborrego93/edx-platform,playm2mboy/edx-platform,edx/edx-platform,abdoosh00/edraak,mushtaqak/edx-platform,chand3040/cloud_that,msegado/edx-platform,hkawasaki/kawasaki-aio8-0,Kalyzee/edx-platform,utecuy/edx-platform,hmcmooc/muddx-platform,chauhanhardik/populo_2,ESOedX/edx-platform,zerobatu/edx-platform,shurihell/testasia,rue89-tech/edx-platform,MSOpenTech/edx-platform,don-github/edx-platform,UOMx/edx-platform,devs1991/test_edx_docmode,vasyarv/edx-platform,jazztpt/edx-platform,tiagochiavericosta/edx-platform,louyihua/edx-platform,dcosentino/edx-platform,hmcmooc/muddx-platform,IONISx/edx-platform,rismalrv/edx-platform,RPI-OPENEDX/edx-platform,fintech-circle/edx-platform,mitocw/edx-platform,raccoongang/edx-platform,shubhdev/edx-platform,vikas1885/test1,RPI-OPENEDX/edx-platform,DefyVentures/edx-platform,shubhdev/edxOnBaadal,etzhou/edx-platform,antonve/s4-project-mooc,nttks/jenkins-test,jruiperezv/ANALYSE,cecep-edu/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,dkarakats/edx-platform,jswope00/griffinx,UXE/local-edx,CredoReference/edx-platform,cyanna/edx-platform,martynovp/edx-platform,mjirayu/sit_academy,fly19890211/edx-platform,Edraak/edx-platform,Shrhawk/edx-platform,ahmedaljazzar/edx-platform,bdero/edx-platform,zadgroup/edx-platform,dsajkl/123,rismalrv/edx-platform,hkawasaki/kawasaki-aio8-2,gsehub/edx-platform,JioEducation/edx-platform,philanthropy-u/edx-platform,mtlchun/edx,jzoldak/edx-platform,ahmedaljazzar/edx-platform,jbassen/edx-platform,jamesblunt/edx-platform,ahmadiga/min_edx,analyseuc3m/ANALYSE-v1,AkA84/edx-platform,gymnasium/edx-platform,nttks/edx-platform,stvstnfrd/edx-platform,10clouds/edx-platform,dcosentino/edx-platform,dsajkl/123,beacloudgenius/edx-platform,AkA84/edx-platform,openfun/edx-platform,raccoongang/edx-platform,kmoocdev/edx-platform,nanolearning/edx-platform,motion2015/a3,Semi-global/edx-platform,vikas1885/test1,hmcmooc/muddx-platform,nttks/jenkins-test,rue89-tech/edx-platform,ak2703/edx-platform,eestay/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress,tanmaykm/edx-platform,4eek/edx-platform,yokose-ks/edx-platform,romain-li/edx-platform,rhndg/openedx,ampax/edx-platform-backup,ubc/edx-platform,bigdatauniversity/edx-platform,fly19890211/edx-platform,bitifirefly/edx-platform,nanolearning/edx-platform,zerobatu/edx-platform,ubc/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,marcore/edx-platform,RPI-OPENEDX/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,hkawasaki/kawasaki-aio8-1,WatanabeYasumasa/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadio/edx-platform,bigdatauniversity/edx-platform,gymnasium/edx-platform,pepeportela/edx-platform,carsongee/edx-platform,yokose-ks/edx-platform,cpennington/edx-platform,polimediaupv/edx-platform,kxliugang/edx-platform,jazkarta/edx-platform-for-isc,rhndg/openedx,zadgroup/edx-platform,auferack08/edx-platform,vikas1885/test1,defance/edx-platform,edry/edx-platform,appliedx/edx-platform,atsolakid/edx-platform,naresh21/synergetics-edx-platform,Ayub-Khan/edx-platform,alexthered/kienhoc-platform,playm2mboy/edx-platform,etzhou/edx-platform,zhenzhai/edx-platform,nanolearningllc/edx-platform-cypress,marcore/edx-platform,pabloborrego93/edx-platform,wwj718/edx-platform,miptliot/edx-platform,rismalrv/edx-platform,sameetb-cuelogic/edx-platform-test,caesar2164/edx-platform,shubhdev/openedx,halvertoluke/edx-platform,mtlchun/edx,tanmaykm/edx-platform,doismellburning/edx-platform,appsembler/edx-platform,marcore/edx-platform,mcgachey/edx-platform,DNFcode/edx-platform,cecep-edu/edx-platform,bitifirefly/edx-platform,BehavioralInsightsTeam/edx-platform,kmoocdev/edx-platform,vismartltd/edx-platform,kmoocdev2/edx-platform,valtech-mooc/edx-platform,fly19890211/edx-platform,kursitet/edx-platform,fintech-circle/edx-platform,pepeportela/edx-platform,IndonesiaX/edx-platform,atsolakid/edx-platform,UXE/local-edx,Softmotions/edx-platform,cyanna/edx-platform,gymnasium/edx-platform,xuxiao19910803/edx-platform,CourseTalk/edx-platform,caesar2164/edx-platform,Livit/Livit.Learn.EdX,Kalyzee/edx-platform,jonathan-beard/edx-platform,eemirtekin/edx-platform,Lektorium-LLC/edx-platform,xinjiguaike/edx-platform,cselis86/edx-platform,Edraak/edx-platform,Edraak/edraak-platform,ZLLab-Mooc/edx-platform,beacloudgenius/edx-platform,sudheerchintala/LearnEraPlatForm,vasyarv/edx-platform,Edraak/edraak-platform,morenopc/edx-platform,solashirai/edx-platform,antoviaque/edx-platform,cognitiveclass/edx-platform,valtech-mooc/edx-platform,y12uc231/edx-platform,shubhdev/edx-platform,MakeHer/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-1,chauhanhardik/populo,mitocw/edx-platform,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,kmoocdev/edx-platform,Unow/edx-platform,jamesblunt/edx-platform,gsehub/edx-platform,zhenzhai/edx-platform,devs1991/test_edx_docmode,tiagochiavericosta/edx-platform,Stanford-Online/edx-platform,eduNEXT/edx-platform,xuxiao19910803/edx,jamiefolsom/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,xingyepei/edx-platform,jzoldak/edx-platform,martynovp/edx-platform,chrisndodge/edx-platform,nanolearning/edx-platform,philanthropy-u/edx-platform,jjmiranda/edx-platform,mtlchun/edx,nanolearningllc/edx-platform-cypress-2,hastexo/edx-platform,olexiim/edx-platform,zofuthan/edx-platform,mtlchun/edx,mcgachey/edx-platform,ak2703/edx-platform,MSOpenTech/edx-platform,Shrhawk/edx-platform,appsembler/edx-platform,ak2703/edx-platform,shashank971/edx-platform,B-MOOC/edx-platform,raccoongang/edx-platform,Lektorium-LLC/edx-platform,Edraak/circleci-edx-platform,adoosii/edx-platform,hkawasaki/kawasaki-aio8-2,nanolearningllc/edx-platform-cypress-2,Ayub-Khan/edx-platform,xinjiguaike/edx-platform,CourseTalk/edx-platform,mahendra-r/edx-platform,nttks/jenkins-test,teltek/edx-platform,ferabra/edx-platform,auferack08/edx-platform,ampax/edx-platform-backup,andyzsf/edx,eemirtekin/edx-platform,Endika/edx-platform,Softmotions/edx-platform,bitifirefly/edx-platform,kursitet/edx-platform,jruiperezv/ANALYSE,marcore/edx-platform,jjmiranda/edx-platform,ubc/edx-platform,abdoosh00/edraak,knehez/edx-platform,vismartltd/edx-platform,sudheerchintala/LearnEraPlatForm,zadgroup/edx-platform,chudaol/edx-platform,Edraak/edx-platform,hastexo/edx-platform,CourseTalk/edx-platform,B-MOOC/edx-platform,shabab12/edx-platform,mjirayu/sit_academy,xinjiguaike/edx-platform,eestay/edx-platform,arifsetiawan/edx-platform,UXE/local-edx,jazkarta/edx-platform,torchingloom/edx-platform,morenopc/edx-platform,jazkarta/edx-platform-for-isc,OmarIthawi/edx-platform,cyanna/edx-platform,iivic/BoiseStateX,jolyonb/edx-platform,solashirai/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,louyihua/edx-platform,mushtaqak/edx-platform,OmarIthawi/edx-platform,wwj718/ANALYSE,dsajkl/123,yokose-ks/edx-platform,auferack08/edx-platform,gsehub/edx-platform,ovnicraft/edx-platform,analyseuc3m/ANALYSE-v1,solashirai/edx-platform,doismellburning/edx-platform,hmcmooc/muddx-platform,chudaol/edx-platform,adoosii/edx-platform,Edraak/circleci-edx-platform,AkA84/edx-platform,kxliugang/edx-platform,chauhanhardik/populo,nagyistoce/edx-platform,tiagochiavericosta/edx-platform,Semi-global/edx-platform,martynovp/edx-platform,prarthitm/edxplatform,chauhanhardik/populo_2,dkarakats/edx-platform,Kalyzee/edx-platform,jamiefolsom/edx-platform,ZLLab-Mooc/edx-platform,EDUlib/edx-platform,adoosii/edx-platform,mahendra-r/edx-platform,mcgachey/edx-platform,OmarIthawi/edx-platform,unicri/edx-platform,pelikanchik/edx-platform,MakeHer/edx-platform,doganov/edx-platform,leansoft/edx-platform,stvstnfrd/edx-platform,ZLLab-Mooc/edx-platform,eestay/edx-platform,xingyepei/edx-platform,chauhanhardik/populo_2,Lektorium-LLC/edx-platform,deepsrijit1105/edx-platform,ZLLab-Mooc/edx-platform,zhenzhai/edx-platform,inares/edx-platform,jjmiranda/edx-platform,kmoocdev/edx-platform,BehavioralInsightsTeam/edx-platform,cpennington/edx-platform,bitifirefly/edx-platform,vismartltd/edx-platform,pomegranited/edx-platform,utecuy/edx-platform,mahendra-r/edx-platform,abdoosh00/edx-rtl-final,WatanabeYasumasa/edx-platform,nagyistoce/edx-platform,Ayub-Khan/edx-platform,vasyarv/edx-platform,mtlchun/edx,LearnEra/LearnEraPlaftform,hkawasaki/kawasaki-aio8-0,nagyistoce/edx-platform,devs1991/test_edx_docmode,mjirayu/sit_academy,abdoosh00/edraak,Livit/Livit.Learn.EdX,franosincic/edx-platform,don-github/edx-platform,LICEF/edx-platform,waheedahmed/edx-platform,arbrandes/edx-platform,naresh21/synergetics-edx-platform,tiagochiavericosta/edx-platform,ahmadiga/min_edx,solashirai/edx-platform,tanmaykm/edx-platform,valtech-mooc/edx-platform,appsembler/edx-platform,Softmotions/edx-platform,dsajkl/reqiop,chrisndodge/edx-platform,rismalrv/edx-platform,amir-qayyum-khan/edx-platform,ampax/edx-platform,RPI-OPENEDX/edx-platform,jswope00/griffinx,cselis86/edx-platform,wwj718/ANALYSE,shabab12/edx-platform,dcosentino/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,hkawasaki/kawasaki-aio8-2,shashank971/edx-platform,apigee/edx-platform,devs1991/test_edx_docmode,don-github/edx-platform,knehez/edx-platform,beacloudgenius/edx-platform,eestay/edx-platform | """
Generate sql commands to fix truncated anonymous student ids in the ORA database
"""
import sys
from django.core.management.base import NoArgsCommand
from student.models import AnonymousUserId, anonymous_id_for_user
class Command(NoArgsCommand):
help = __doc__
def handle_noargs(self, **options):
"""
Reads a list of ids (newline separated) from stdin, and
dumps sql queries to run on the ORA database to fix those ids
from their truncated form to the full 32 character change.
The following query will generate the list of ids needed to be fixed
from the ORA database:
SELECT student_id FROM peer_grading_calibrationhistory WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM controller_submission WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_timing WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_studentcourseprofile WHERE LENGTH(student_id) = 16
UNION SELECT student_id FROM metrics_studentprofile WHERE LENGTH(student_id) = 16;
"""
ids = [line.strip() for line in sys.stdin]
old_ids = AnonymousUserId.objects.raw(
"""
SELECT *
FROM student_anonymoususerid_temp_archive
WHERE anonymous_user_id IN ({})
""".format(','.join(['%s']*len(ids))),
ids
)
for old_id in old_ids:
new_id = anonymous_id_for_user(old_id.user, old_id.course_id)
for table in ('peer_grading_calibrationhistory', 'controller_submission', 'metrics_timing'):
self.stdout.write(
"UPDATE {} "
"SET student_id = '{}' "
"WHERE student_id = '{}';\n".format(
table,
new_id,
old_id.anonymous_user_id,
)
)
self.stdout.write(
"DELETE FROM metrics_studentcourseprofile "
"WHERE student_id = '{}' "
"AND problems_attempted = 0;\n".format(old_id.anonymous_user_id)
)
self.stdout.write(
"DELETE FROM metrics_studentprofile "
"WHERE student_id = '{}' "
"AND messages_sent = 0 "
"AND messages_received = 0 "
"AND average_message_feedback_length = 0 "
"AND student_is_staff_banned = 0 "
"AND student_cannot_submit_more_for_peer_grading = 0;\n".format(old_id.anonymous_user_id)
)
| Add managemant command to generate sql to clean up tp truncated student ids in ORA db
|
|
52189e2161e92b36df47a04c2150dff38f81f5e9 | tests/unit/tests/test_activations.py | tests/unit/tests/test_activations.py | from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
| Add mocked tests for activation | Add mocked tests for activation
| Python | agpl-3.0 | pombredanne/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,ribeiro-ucl/viewflow | from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
| Add mocked tests for activation
|
|
c78c4b4bd56453fe1f3a7db71222c12336c2dcf5 | future/tests/test_str_is_unicode.py | future/tests/test_str_is_unicode.py | from __future__ import absolute_import
from future import str_is_unicode
import unittest
class TestIterators(unittest.TestCase):
def test_str(self):
self.assertIsNot(str, bytes) # Py2: assertIsNot only in 2.7
self.assertEqual(str('blah'), u'blah') # Py3.3 and Py2 only
unittest.main()
| Add tests for str_is_unicode module | Add tests for str_is_unicode module
| Python | mit | michaelpacer/python-future,michaelpacer/python-future,krischer/python-future,QuLogic/python-future,QuLogic/python-future,PythonCharmers/python-future,PythonCharmers/python-future,krischer/python-future | from __future__ import absolute_import
from future import str_is_unicode
import unittest
class TestIterators(unittest.TestCase):
def test_str(self):
self.assertIsNot(str, bytes) # Py2: assertIsNot only in 2.7
self.assertEqual(str('blah'), u'blah') # Py3.3 and Py2 only
unittest.main()
| Add tests for str_is_unicode module
|
|
83e0394dc837e55a3ed544e54f6e84954f9311b0 | onepercentclub/settings/travis.py | onepercentclub/settings/travis.py | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
| # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| Disable front end tests on Travis for now. | Disable front end tests on Travis for now.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| Disable front end tests on Travis for now.
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
|
1c397202b6df7b62cbd22509ee7cc366c2c09d6c | setup.py | setup.py | try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='debexpo',
version="",
#description='',
#author='',
#author_email='',
#url='',
install_requires=[
"Pylons>=1.0",
"SQLAlchemy>=0.6",
"Webhelpers>=0.6.1",
"Babel>=0.9.6",
"ZSI",
"python-debian==0.1.16",
"soaplib==0.8.1"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'debexpo': [
('**.py', 'python', None),
('templates/**.mako', 'mako', None),
('public/**', 'ignore', None)]},
entry_points="""
[paste.app_factory]
main = debexpo.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
[console_scripts]
debexpo-importer = debexpo.scripts.debexpo_importer:main
debexpo-user-importer = debexpo.scripts.user_importer:main
""",
)
| try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='debexpo',
version="",
#description='',
#author='',
#author_email='',
#url='',
install_requires=[
"Pylons>=1.0",
"SQLAlchemy>=0.6",
"Webhelpers>=0.6.1",
"Babel>=0.9.6",
"ZSI",
"python-debian>=0.1.16",
"soaplib==0.8.1"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'debexpo': [
('**.py', 'python', None),
('templates/**.mako', 'mako', None),
('public/**', 'ignore', None)]},
entry_points="""
[paste.app_factory]
main = debexpo.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
[console_scripts]
debexpo-importer = debexpo.scripts.debexpo_importer:main
debexpo-user-importer = debexpo.scripts.user_importer:main
""",
)
| Make library dependencies python-debian a bit more sane | Make library dependencies python-debian a bit more sane
| Python | mit | jadonk/debexpo,jonnylamb/debexpo,jadonk/debexpo,jonnylamb/debexpo,swvist/Debexpo,jadonk/debexpo,swvist/Debexpo,swvist/Debexpo,jonnylamb/debexpo | try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='debexpo',
version="",
#description='',
#author='',
#author_email='',
#url='',
install_requires=[
"Pylons>=1.0",
"SQLAlchemy>=0.6",
"Webhelpers>=0.6.1",
"Babel>=0.9.6",
"ZSI",
"python-debian>=0.1.16",
"soaplib==0.8.1"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'debexpo': [
('**.py', 'python', None),
('templates/**.mako', 'mako', None),
('public/**', 'ignore', None)]},
entry_points="""
[paste.app_factory]
main = debexpo.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
[console_scripts]
debexpo-importer = debexpo.scripts.debexpo_importer:main
debexpo-user-importer = debexpo.scripts.user_importer:main
""",
)
| Make library dependencies python-debian a bit more sane
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='debexpo',
version="",
#description='',
#author='',
#author_email='',
#url='',
install_requires=[
"Pylons>=1.0",
"SQLAlchemy>=0.6",
"Webhelpers>=0.6.1",
"Babel>=0.9.6",
"ZSI",
"python-debian==0.1.16",
"soaplib==0.8.1"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'debexpo': [
('**.py', 'python', None),
('templates/**.mako', 'mako', None),
('public/**', 'ignore', None)]},
entry_points="""
[paste.app_factory]
main = debexpo.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
[console_scripts]
debexpo-importer = debexpo.scripts.debexpo_importer:main
debexpo-user-importer = debexpo.scripts.user_importer:main
""",
)
|
78821f2df84bbb822e076fb1591dfccc09bcb43c | cpm_data/migrations/0004_add_seasons_data.py | cpm_data/migrations/0004_add_seasons_data.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-27 22:21
from __future__ import unicode_literals
from django.db import migrations
def _get_seasons():
return '2012 2013 2014 2015 2016 2017'.split()
def add_seasons(apps, schema_editor):
Season = apps.get_model('cpm_data.Season')
Season.objects.bulk_create(
[Season(name_en=s, name_be=s, name_ru=s) for s in _get_seasons()]
)
def remove_seasons(apps, schema_editor):
Season = apps.get_model('cpm_data.Season')
Season.objects.delete(name_en__in=_get_seasons())
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0003_seasonrelatedjurymember_seasonrelatedpartner'),
]
operations = [
migrations.RunPython(add_seasons, remove_seasons),
]
| Add migrations for adding seasons | Add migrations for adding seasons
| Python | unlicense | kinaklub/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-27 22:21
from __future__ import unicode_literals
from django.db import migrations
def _get_seasons():
return '2012 2013 2014 2015 2016 2017'.split()
def add_seasons(apps, schema_editor):
Season = apps.get_model('cpm_data.Season')
Season.objects.bulk_create(
[Season(name_en=s, name_be=s, name_ru=s) for s in _get_seasons()]
)
def remove_seasons(apps, schema_editor):
Season = apps.get_model('cpm_data.Season')
Season.objects.delete(name_en__in=_get_seasons())
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0003_seasonrelatedjurymember_seasonrelatedpartner'),
]
operations = [
migrations.RunPython(add_seasons, remove_seasons),
]
| Add migrations for adding seasons
|
|
5e9c6c527902fd8361391f111a88a8f4b4ce71df | aospy/proj.py | aospy/proj.py | """proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
| """proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
| Delete unnecessary vars attr of Proj | Delete unnecessary vars attr of Proj
| Python | apache-2.0 | spencerkclark/aospy,spencerahill/aospy | """proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
| Delete unnecessary vars attr of Proj
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
cb08d632fac453403bc8b91391b14669dbe932cc | circonus/__init__.py | circonus/__init__.py | from __future__ import absolute_import
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
| __title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
| Remove unnecessary absolute import statement. | Remove unnecessary absolute import statement.
| Python | mit | monetate/circonus,monetate/circonus | __title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
| Remove unnecessary absolute import statement.
from __future__ import absolute_import
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
|
14d223068e2d8963dfe1f4e71854e9ea9c194bc5 | Datasnakes/Tools/sge/qsubber.py | Datasnakes/Tools/sge/qsubber.py | import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
| Set up shell argparser for sge module | Set up shell argparser for sge module
| Python | mit | datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts | import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
| Set up shell argparser for sge module
|
|
59927047347b7db3f46ab99152d2d99f60039043 | trac/versioncontrol/web_ui/__init__.py | trac/versioncontrol/web_ui/__init__.py | from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
| from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
| Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file) | Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,walty8/trac,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,netjunki/trac-Pygit2 | from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
| Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
f2506c07caf66b3ad42f6f1c09325097edd2e169 | src/django_healthchecks/contrib.py | src/django_healthchecks/contrib.py | import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
| import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
| Make sure the cursor is properly closed after usage | Make sure the cursor is properly closed after usage
| Python | mit | mvantellingen/django-healthchecks | import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
| Make sure the cursor is properly closed after usage
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
54a345eb96bce8c3035b402ce009b1e3fda46a42 | quran_text/serializers.py | quran_text/serializers.py | from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
class Meta:
model = Ayah
fields = ['sura', 'number', 'text']
| from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
sura_id = serializers.IntegerField(source='sura.pk')
sura_name = serializers.CharField(source='sura.name')
ayah_number = serializers.IntegerField(source='number')
class Meta:
model = Ayah
fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
| Change label and add Sura name to Ayah Serlialzer | Change label and add Sura name to Ayah Serlialzer
| Python | mit | EmadMokhtar/tafseer_api | from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
sura_id = serializers.IntegerField(source='sura.pk')
sura_name = serializers.CharField(source='sura.name')
ayah_number = serializers.IntegerField(source='number')
class Meta:
model = Ayah
fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
| Change label and add Sura name to Ayah Serlialzer
from rest_framework import serializers
from .models import Sura, Ayah
class SuraSerializer(serializers.ModelSerializer):
class Meta:
model = Sura
fields = ['index', 'name']
class AyahSerializer(serializers.ModelSerializer):
class Meta:
model = Ayah
fields = ['sura', 'number', 'text']
|
e68b8146c6ae509489fde97faf10d5748904a20c | sentrylogs/helpers.py | sentrylogs/helpers.py | """
Helper functions for Sentry Logs
"""
from sentry_sdk import capture_message, configure_scope
from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS
def send_message(message, level, data):
"""Send a message to the Sentry server"""
# Only send messages for desired log level
if (SENTRY_LOG_LEVELS.index(level)
< SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)):
return
with configure_scope() as scope:
for key, value in data.items():
scope.set_extra(key, value)
capture_message(message, level)
| """
Helper functions for Sentry Logs
"""
from sentry_sdk import capture_message, configure_scope
from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS
def send_message(message, level, data):
"""Send a message to the Sentry server"""
# Only send messages for desired log level
if (SENTRY_LOG_LEVELS.index(level)
< SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)):
return
with configure_scope() as scope:
for key, value in data.items():
scope.set_context(key, value)
capture_message(message, level)
| Use structured context instead of additional data | Use structured context instead of additional data
Additional Data is deprecated https://docs.sentry.io/platforms/python/enriching-events/context/#additional-data
| Python | bsd-3-clause | mdgart/sentrylogs | """
Helper functions for Sentry Logs
"""
from sentry_sdk import capture_message, configure_scope
from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS
def send_message(message, level, data):
"""Send a message to the Sentry server"""
# Only send messages for desired log level
if (SENTRY_LOG_LEVELS.index(level)
< SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)):
return
with configure_scope() as scope:
for key, value in data.items():
scope.set_context(key, value)
capture_message(message, level)
| Use structured context instead of additional data
Additional Data is deprecated https://docs.sentry.io/platforms/python/enriching-events/context/#additional-data
"""
Helper functions for Sentry Logs
"""
from sentry_sdk import capture_message, configure_scope
from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS
def send_message(message, level, data):
"""Send a message to the Sentry server"""
# Only send messages for desired log level
if (SENTRY_LOG_LEVELS.index(level)
< SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)):
return
with configure_scope() as scope:
for key, value in data.items():
scope.set_extra(key, value)
capture_message(message, level)
|
cbe773d051168e05118774708ff7a0ce881617f4 | ganglia/settings.py | ganglia/settings.py | DEBUG = True
GANGLIA_PATH = '/usr/local/etc' # where gmetad.conf is located
API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted
BASE_URL = '/ganglia/api/v2'
LOGFILE = '/var/log/ganglia-api.log'
PIDFILE = '/var/run/ganglia-api.pid'
| DEBUG = True
GANGLIA_PATH = '/etc/ganglia' # where gmetad.conf is located
API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted
BASE_URL = '/ganglia/api/v2'
LOGFILE = '/var/log/ganglia-api.log'
PIDFILE = '/var/run/ganglia-api.pid'
| Make GANGLIA_PATH default to /etc/ganglia | Make GANGLIA_PATH default to /etc/ganglia
| Python | apache-2.0 | guardian/ganglia-api | DEBUG = True
GANGLIA_PATH = '/etc/ganglia' # where gmetad.conf is located
API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted
BASE_URL = '/ganglia/api/v2'
LOGFILE = '/var/log/ganglia-api.log'
PIDFILE = '/var/run/ganglia-api.pid'
| Make GANGLIA_PATH default to /etc/ganglia
DEBUG = True
GANGLIA_PATH = '/usr/local/etc' # where gmetad.conf is located
API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted
BASE_URL = '/ganglia/api/v2'
LOGFILE = '/var/log/ganglia-api.log'
PIDFILE = '/var/run/ganglia-api.pid'
|
df89f96113d73017a9e18964bfd456b06a2e2a6d | jsk_apc2015_common/scripts/create_mask_applied_dataset.py | jsk_apc2015_common/scripts/create_mask_applied_dataset.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
| Add script to create mask applied dataset | Add script to create mask applied dataset
| Python | bsd-3-clause | pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
| Add script to create mask applied dataset
|
|
bd2f5a6c62e446fc8b720b94e75313b5117767cb | trac/upgrades/db11.py | trac/upgrades/db11.py | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| Fix typo in upgrade script | Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac | import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
| Fix typo in upgrade script
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
|
6037d11a8da5ea15c8de468dd730670ba10a44c6 | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
| Add trove classifier for license | Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data
| Python | mit | uiri/toml,uiri/toml | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
| Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
1619c955c75f91b9d61c3195704f17fc88ef9e04 | aybu/manager/utils/pshell.py | aybu/manager/utils/pshell.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2010 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.core.models.Base.metadata.bind = env['engine']
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2010 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.manager.models.Base.metadata.bind = env['engine']
aybu.manager.models.Environment.initialize(settings)
env['session'] = env['request'].db_session
| Initialize session and environment in shell | Initialize session and environment in shell
| Python | apache-2.0 | asidev/aybu-manager | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2010 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.manager.models.Base.metadata.bind = env['engine']
aybu.manager.models.Environment.initialize(settings)
env['session'] = env['request'].db_session
| Initialize session and environment in shell
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2010 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy import engine_from_config
import aybu.manager.models
def setup(env):
settings = env['request'].registry.settings
env['models'] = aybu.manager.models
env['engine'] = engine_from_config(settings, 'sqlalchemy.')
env['request'].set_db_engine = env['engine']
aybu.core.models.Base.metadata.bind = env['engine']
|
a25e6fb5f9e63ffa30a6c655a6775eead4206bcb | setup.py | setup.py | from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| Test edit - to check svn email hook | Test edit - to check svn email hook | Python | bsd-3-clause | gef756/statsmodels,kiyoto/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,detrout/debian-statsmodels,kiyoto/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,alekz112/statsmodels,hainm/statsmodels,bsipocz/statsmodels,phobson/statsmodels,huongttlan/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,nguyentu1602/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,nguyentu1602/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,bsipocz/statsmodels,bsipocz/statsmodels,bavardage/statsmodels,musically-ut/statsmodels,pprett/statsmodels,yarikoptic/pystatsmodels,wzbozon/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,astocko/statsmodels,bert9bert/statsmodels,wwf5067/statsmodels,astocko/statsmodels,pprett/statsmodels,bavardage/statsmodels,nvoron23/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,ChadFulton/statsmodels,wesm/statsmodels,wdurhamh/statsmodels,waynenilsen/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,bsipocz/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,huongttlan/statsmodels,wesm/statsmodels,wkfwkf/statsmodels,bzero/statsmodels,wdurhamh/statsmodels,adammenges/statsmodels,bashtage/statsmodels,rgommers/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,bzero/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,astocko/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,saketkc/statsmodels,detrout/debian-statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,yarikoptic/pystatsmodels,pprett/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,yl565/statsmodels,kiyoto/statsmodels,hainm/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,edhuckle/statsmodels,wzbozon/statsmodels,josef-pkt/statsmodels,musically-ut/statsmodels,bashtage/statsmodels,musically-ut/statsmodels,hlin117/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,adammenges/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bzero/statsmodels,cbmoore/statsmodels,pprett/statsmodels,hlin117/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,wwf5067/statsmodels,phobson/statsmodels,alekz112/statsmodels,hlin117/statsmodels,Averroes/statsmodels,wzbozon/statsmodels,statsmodels/statsmodels,astocko/statsmodels,nguyentu1602/statsmodels,adammenges/statsmodels,jseabold/statsmodels,edhuckle/statsmodels,YihaoLu/statsmodels,alekz112/statsmodels,hainm/statsmodels,bashtage/statsmodels,bzero/statsmodels,yl565/statsmodels,phobson/statsmodels,DonBeo/statsmodels,wesm/statsmodels,adammenges/statsmodels,phobson/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,josef-pkt/statsmodels,YihaoLu/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,waynenilsen/statsmodels,gef756/statsmodels,statsmodels/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,gef756/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,rgommers/statsmodels,gef756/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,rgommers/statsmodels,wdurhamh/statsmodels,waynenilsen/statsmodels,detrout/debian-statsmodels,huongttlan/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,bavardage/statsmodels,bavardage/statsmodels,josef-pkt/statsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,DonBeo/statsmodels,rgommers/statsmodels,phobson/statsmodels,hlin117/statsmodels,nvoron23/statsmodels | import os, glob, string, shutil
from distutils.core import setup
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
| Test edit - to check svn email hook
from distutils.core import setup
import os, glob, string, shutil
# Packages
packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap']
def main():
setup (name = 'neuroimaging',
version = '0.01a',
description = 'This is a neuroimaging python package',
author = 'Various, one of whom is Jonathan Taylor',
author_email = 'jonathan.taylor@stanford.edu',
ext_package = 'neuroimaging',
packages=packages,
package_dir = {'neuroimaging': 'lib'},
url = 'http://neuroimaging.scipy.org',
long_description =
'''
''')
if __name__ == "__main__":
main()
|
47dedd31b9ee0f768ca3f9f781133458ddc99f4f | setup.py | setup.py | from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
| from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
| Add sphinx to dev requirements | Add sphinx to dev requirements
| Python | mit | Turbasen/turbasen.py | from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
| Add sphinx to dev requirements
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
|
01d3027e568bcd191e7e25337c6597eb75b82789 | setup.py | setup.py | #!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://github.com/pimutils/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:cli',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'todoman/version.py',
},
setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'],
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Console :: Curses',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Topic :: Office/Business :: Scheduling',
'Topic :: Utilities',
]
)
| #!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://github.com/pimutils/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:cli',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'todoman/version.py',
},
setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'],
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Console :: Curses',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Office/Business :: Scheduling',
'Topic :: Utilities',
]
)
| Add classifiers for supported python versions | Add classifiers for supported python versions
| Python | isc | Sakshisaraswat/todoman,AnubhaAgrawal/todoman,hobarrera/todoman,pimutils/todoman,asalminen/todoman,rimshaakhan/todoman | #!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://github.com/pimutils/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:cli',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'todoman/version.py',
},
setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'],
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Console :: Curses',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Office/Business :: Scheduling',
'Topic :: Utilities',
]
)
| Add classifiers for supported python versions
#!/usr/bin/env python3
from setuptools import setup
setup(
name='todoman',
description='A simple CalDav-based todo manager.',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://github.com/pimutils/todoman',
license='MIT',
packages=['todoman'],
entry_points={
'console_scripts': [
'todo = todoman.cli:cli',
]
},
install_requires=[
open('requirements.txt').readlines()
],
long_description=open('README.rst').read(),
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'todoman/version.py',
},
setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'],
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Console :: Curses',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Topic :: Office/Business :: Scheduling',
'Topic :: Utilities',
]
)
|
8147dab8fffb8d9d9753009f43b27afc1729febc | setup.py | setup.py | from setuptools import setup, find_packages
import os
setup(
name="cpgintegrate",
version="0.2.17-SNAPSHOT",
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.18.4',
'pandas>=0.23.0',
'xlrd',
'sqlalchemy>=1.0',
'beautifulsoup4',
'lxml<4.0',
'numpy',
'scipy',
],
extras_require={'dev': [
'pytest>=3.2.2',
'apache-airflow>=1.10.0',
],
'win_auto': [
'pywinauto',
'patool',
],
},
data_files=[
(os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'),
['cpgintegrate/airflow/cpg_airflow_plugin.py'])
],
)
| from setuptools import setup, find_packages
import os
setup(
name="cpgintegrate",
version="0.2.17",
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.18.4',
'pandas>=0.23.0',
'xlrd',
'sqlalchemy>=1.0',
'beautifulsoup4',
'lxml<5.0',
'numpy',
'scipy',
],
extras_require={'dev': [
'pytest>=3.2.2',
'apache-airflow>=1.10.0',
],
'win_auto': [
'pywinauto',
'patool',
],
},
data_files=[
(os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'),
['cpgintegrate/airflow/cpg_airflow_plugin.py'])
],
)
| Bump version, allow newer lxml | Bump version, allow newer lxml
| Python | agpl-3.0 | PointyShinyBurning/cpgintegrate | from setuptools import setup, find_packages
import os
setup(
name="cpgintegrate",
version="0.2.17",
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.18.4',
'pandas>=0.23.0',
'xlrd',
'sqlalchemy>=1.0',
'beautifulsoup4',
'lxml<5.0',
'numpy',
'scipy',
],
extras_require={'dev': [
'pytest>=3.2.2',
'apache-airflow>=1.10.0',
],
'win_auto': [
'pywinauto',
'patool',
],
},
data_files=[
(os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'),
['cpgintegrate/airflow/cpg_airflow_plugin.py'])
],
)
| Bump version, allow newer lxml
from setuptools import setup, find_packages
import os
setup(
name="cpgintegrate",
version="0.2.17-SNAPSHOT",
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.18.4',
'pandas>=0.23.0',
'xlrd',
'sqlalchemy>=1.0',
'beautifulsoup4',
'lxml<4.0',
'numpy',
'scipy',
],
extras_require={'dev': [
'pytest>=3.2.2',
'apache-airflow>=1.10.0',
],
'win_auto': [
'pywinauto',
'patool',
],
},
data_files=[
(os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'),
['cpgintegrate/airflow/cpg_airflow_plugin.py'])
],
)
|
ab63395c1d8c9ec6bce13811965c8335463b0b78 | setup.py | setup.py | from distutils.core import setup, Extension
setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
| from distutils.core import setup, Extension
import os
os.environ['CFLAGS'] = "-Qunused-arguments"
setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
| Fix compile error on OS X 10.9 | Fix compile error on OS X 10.9
| Python | apache-2.0 | pombredanne/python-rabin-fingerprint,pombredanne/python-rabin-fingerprint,cschwede/python-rabin-fingerprint,cschwede/python-rabin-fingerprint | from distutils.core import setup, Extension
import os
os.environ['CFLAGS'] = "-Qunused-arguments"
setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
| Fix compile error on OS X 10.9
from distutils.core import setup, Extension
setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
|
638b8be8a07262803c087e796e40a51858c08983 | __init__.py | __init__.py | from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
| from . import LayerView
def getMetaData():
return {
'type': 'view',
'plugin': {
"name": "Layer View"
},
'view': {
'name': 'Layers'
}
}
def register(app):
return LayerView.LayerView()
| Update plugin metadata to the new format | Update plugin metadata to the new format
| Python | agpl-3.0 | totalretribution/Cura,markwal/Cura,quillford/Cura,DeskboxBrazil/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,bq/Ultimaker-Cura,ad1217/Cura,fieldOfView/Cura,fieldOfView/Cura,DeskboxBrazil/Cura,Curahelper/Cura,Curahelper/Cura,hmflash/Cura,bq/Ultimaker-Cura,hmflash/Cura,markwal/Cura,quillford/Cura,derekhe/Cura,totalretribution/Cura,lo0ol/Ultimaker-Cura,ynotstartups/Wanhao,fxtentacle/Cura,fxtentacle/Cura,senttech/Cura,ynotstartups/Wanhao,derekhe/Cura,ad1217/Cura | from . import LayerView
def getMetaData():
return {
'type': 'view',
'plugin': {
"name": "Layer View"
},
'view': {
'name': 'Layers'
}
}
def register(app):
return LayerView.LayerView()
| Update plugin metadata to the new format
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
|
ca6891f3b867fd691c0b682566ffec1fd7f0ac2a | pryvate/blueprints/simple/simple.py | pryvate/blueprints/simple/simple.py | """Simple blueprint."""
import os
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['POST'])
def search_simple():
"""Handling pip search."""
return make_response('Not implemented', 501)
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
@blueprint.route('/<package>/', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
| """Simple blueprint."""
import os
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['POST'])
def search_simple():
"""Handling pip search."""
return make_response('Not implemented', 501)
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
@blueprint.route('/<package>/', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
if os.path.isdir(package_path):
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
digest_file = os.path.join(package_path, filename)
with open(digest_file, 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
else:
return make_response('404', 404)
| Return 404 if package was not found instead of raising an exception | Return 404 if package was not found instead of raising an exception
| Python | mit | Dinoshauer/pryvate,Dinoshauer/pryvate | """Simple blueprint."""
import os
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['POST'])
def search_simple():
"""Handling pip search."""
return make_response('Not implemented', 501)
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
@blueprint.route('/<package>/', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
if os.path.isdir(package_path):
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
digest_file = os.path.join(package_path, filename)
with open(digest_file, 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
else:
return make_response('404', 404)
| Return 404 if package was not found instead of raising an exception
"""Simple blueprint."""
import os
from flask import Blueprint, current_app, make_response, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['POST'])
def search_simple():
"""Handling pip search."""
return make_response('Not implemented', 501)
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
@blueprint.route('/<package>/', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
|
995f06a33bf92dcff185a50f84743323170a8b7a | setup.py | setup.py | from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
| import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
| Use io.open with encoding='utf-8' and flake8 compliance | Use io.open with encoding='utf-8' and flake8 compliance
| Python | bsd-3-clause | faassen/bowerstatic,faassen/bowerstatic | import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
| Use io.open with encoding='utf-8' and flake8 compliance
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
|
3520217e38849ad18b11245c6cac51d79db8422d | pytablereader/loadermanager/_base.py | pytablereader/loadermanager/_base.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| Add an interface to change table_name | Add an interface to change table_name
| Python | mit | thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| Add an interface to change table_name
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
72a573c24d5234003b9eeb9e0cc487d174908a2e | typeahead_search/trie.py | typeahead_search/trie.py | """A Trie (prefix tree) class for use in typeahead search.
Every node in the TypeaheadSearchTrie is another TypeaheadSearchTrie instance.
"""
from weakref import WeakSet
class TypeaheadSearchTrie(object):
def __init__(self):
# The children of this node. Because ordered traversals are not
# important, these are stored in a dictionary.
self.children = {}
# Data entries associated with the word stored in the path to
# this node. Stored in a WeakSet so that entries disappear
# automatically when data entries are deleted.
self.entries = WeakSet()
def add(self, word, entry):
"""Adds the given data entry to the given Trie word.
The word is created in the Trie if it doesn't already exist.
"""
if word:
self.children.setdefault(
word[0],
TypeaheadSearchTrie()
).add(word[1:], entry)
else:
self.entries.add(entry)
def search(self, word):
"""Return a set of all data entries represented by prefix `word`.
Returns an empty set if this prefix is not in the Trie.
"""
if word:
try:
return self.children[word[0]].search(word[1:])
except KeyError:
return set()
else:
return self.gather_entries()
def gather_entries(self):
"""Gather all data entries stored in this node and its children."""
return set(self.entries).update(
child.gather_entries() for child in self.children.itervalues()
)
| Add a Trie for storage of data string tokens. | [typeahead_search] Add a Trie for storage of data string tokens.
| Python | mit | geekofalltrades/quora-coding-challenges | """A Trie (prefix tree) class for use in typeahead search.
Every node in the TypeaheadSearchTrie is another TypeaheadSearchTrie instance.
"""
from weakref import WeakSet
class TypeaheadSearchTrie(object):
def __init__(self):
# The children of this node. Because ordered traversals are not
# important, these are stored in a dictionary.
self.children = {}
# Data entries associated with the word stored in the path to
# this node. Stored in a WeakSet so that entries disappear
# automatically when data entries are deleted.
self.entries = WeakSet()
def add(self, word, entry):
"""Adds the given data entry to the given Trie word.
The word is created in the Trie if it doesn't already exist.
"""
if word:
self.children.setdefault(
word[0],
TypeaheadSearchTrie()
).add(word[1:], entry)
else:
self.entries.add(entry)
def search(self, word):
"""Return a set of all data entries represented by prefix `word`.
Returns an empty set if this prefix is not in the Trie.
"""
if word:
try:
return self.children[word[0]].search(word[1:])
except KeyError:
return set()
else:
return self.gather_entries()
def gather_entries(self):
"""Gather all data entries stored in this node and its children."""
return set(self.entries).update(
child.gather_entries() for child in self.children.itervalues()
)
| [typeahead_search] Add a Trie for storage of data string tokens.
|
|
a36d3a621cde4a2d19bb0f1169ba707304c5caaf | setup.py | setup.py | try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
| try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
| Exclude tests directory from install | Exclude tests directory from install
| Python | bsd-2-clause | AMOSoft/fabtools,n0n0x/fabtools-python,pombredanne/fabtools,fabtools/fabtools,ahnjungho/fabtools,bitmonk/fabtools,wagigi/fabtools-python,badele/fabtools,hagai26/fabtools,pahaz/fabtools,sociateru/fabtools,davidcaste/fabtools,prologic/fabtools,ronnix/fabtools | try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
| Exclude tests directory from install
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
9037c6c67add92304b6cfdbfb3a79ac1b3e9e64e | test/checker/test_checker_binary.py | test/checker/test_checker_binary.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| Fix test cases for Python2 | Fix test cases for Python2
| Python | mit | thombashi/typepy | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
["abc".encode("utf_8"), "いろは".encode("utf_8")],
[StrictLevel.MIN, StrictLevel.MAX],
[True],
)
)
+ list(
itertools.product(
[" ".encode("utf_8"), "\n".encode("utf_8")], [StrictLevel.MIN], [True]
)
)
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
| Fix test cases for Python2
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import unicode_literals
import itertools
import pytest
import six
from six import MAXSIZE
from typepy import Binary, StrictLevel, Typecode
nan = float("nan")
inf = float("inf")
class Test_Binary_is_type(object):
@pytest.mark.parametrize(
["value", "strict_level", "expected"],
list(itertools.product([], [StrictLevel.MIN, StrictLevel.MAX], [False]))
+ list(
itertools.product(
[six.b("abc"), "いろは".encode("utf_8")], [StrictLevel.MIN, StrictLevel.MAX], [True]
)
)
+ list(itertools.product([six.b(""), six.b(" "), six.b("\n")], [StrictLevel.MIN], [True]))
+ list(
itertools.product(["", " ", "\n", MAXSIZE, inf, nan, None], [StrictLevel.MAX], [False])
),
)
def test_normal(self, value, strict_level, expected):
type_checker = Binary(value, strict_level=strict_level)
assert type_checker.is_type() == expected
assert type_checker.typecode == Typecode.STRING
|
9f6b12b2579f228fd9d04151771a22474a2744a3 | tabula/wrapper.py | tabula/wrapper.py | import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
jar_path = os.path.abspath(os.path.dirname(__file__))
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
args = ["java", "-jar"] + [jar_path + "/" + JAR_NAME] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
| import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
jar_dir = os.path.abspath(os.path.dirname(__file__))
jar_path = os.path.join(jar_dir, JAR_NAME)
args = ["java", "-jar", jar_path] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
| Use os.path.join for Jar path to make it OS independent | Use os.path.join for Jar path to make it OS independent
| Python | mit | chezou/tabula-py | import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
jar_dir = os.path.abspath(os.path.dirname(__file__))
jar_path = os.path.join(jar_dir, JAR_NAME)
args = ["java", "-jar", jar_path] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
| Use os.path.join for Jar path to make it OS independent
import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
jar_path = os.path.abspath(os.path.dirname(__file__))
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
args = ["java", "-jar"] + [jar_path + "/" + JAR_NAME] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
|
e056dc3581785fe34123189cccd9901e1e9afe71 | pylatex/__init__.py | pylatex/__init__.py | # flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
| # flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular, Tabu, LongTable, \
LongTabu
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
| Add Tabu, LongTable and LongTabu global import | Add Tabu, LongTable and LongTabu global import
| Python | mit | sebastianhaas/PyLaTeX,sebastianhaas/PyLaTeX,votti/PyLaTeX,ovaskevich/PyLaTeX,JelteF/PyLaTeX,bjodah/PyLaTeX,votti/PyLaTeX,jendas1/PyLaTeX,bjodah/PyLaTeX,jendas1/PyLaTeX,JelteF/PyLaTeX,ovaskevich/PyLaTeX | # flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular, Tabu, LongTable, \
LongTabu
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
| Add Tabu, LongTable and LongTabu global import
# flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
|
41021030afe45c61d8551128515d7d17ebdd09b8 | setup.py | setup.py | import sys
from setuptools import find_packages, setup
with open('VERSION') as version_fp:
VERSION = version_fp.read().strip()
install_requires = [
'django-local-settings>=1.0a13',
'stashward',
]
if sys.version_info[:2] < (3, 4):
install_requires.append('enum34')
setup(
name='django-arcutils',
version=VERSION,
url='https://github.com/PSU-OIT-ARC/django-arcutils',
author='PSU - OIT - ARC',
author_email='consultants@pdx.edu',
description='Common utilities used in ARC Django projects',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'ldap': [
'certifi>=2015.11.20.1',
'ldap3>=1.0.3',
],
'dev': [
'django>=1.7,<1.9',
'djangorestframework>3.3',
'flake8',
'ldap3',
],
},
entry_points="""
[console_scripts]
arcutils = arcutils.__main__:main
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| import sys
from setuptools import find_packages, setup
with open('VERSION') as version_fp:
VERSION = version_fp.read().strip()
install_requires = [
'django-local-settings>=1.0a13',
'stashward',
]
if sys.version_info[:2] < (3, 4):
install_requires.append('enum34')
setup(
name='django-arcutils',
version=VERSION,
url='https://github.com/PSU-OIT-ARC/django-arcutils',
author='PSU - OIT - ARC',
author_email='consultants@pdx.edu',
description='Common utilities used in ARC Django projects',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'ldap': [
'certifi>=2015.11.20.1',
'ldap3>=1.0.4',
],
'dev': [
'django>=1.7,<1.9',
'djangorestframework>3.3',
'flake8',
'ldap3',
],
},
entry_points="""
[console_scripts]
arcutils = arcutils.__main__:main
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| Update ldap3 1.0.3 => 1.0.4 | Update ldap3 1.0.3 => 1.0.4
| Python | mit | wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils | import sys
from setuptools import find_packages, setup
with open('VERSION') as version_fp:
VERSION = version_fp.read().strip()
install_requires = [
'django-local-settings>=1.0a13',
'stashward',
]
if sys.version_info[:2] < (3, 4):
install_requires.append('enum34')
setup(
name='django-arcutils',
version=VERSION,
url='https://github.com/PSU-OIT-ARC/django-arcutils',
author='PSU - OIT - ARC',
author_email='consultants@pdx.edu',
description='Common utilities used in ARC Django projects',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'ldap': [
'certifi>=2015.11.20.1',
'ldap3>=1.0.4',
],
'dev': [
'django>=1.7,<1.9',
'djangorestframework>3.3',
'flake8',
'ldap3',
],
},
entry_points="""
[console_scripts]
arcutils = arcutils.__main__:main
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| Update ldap3 1.0.3 => 1.0.4
import sys
from setuptools import find_packages, setup
with open('VERSION') as version_fp:
VERSION = version_fp.read().strip()
install_requires = [
'django-local-settings>=1.0a13',
'stashward',
]
if sys.version_info[:2] < (3, 4):
install_requires.append('enum34')
setup(
name='django-arcutils',
version=VERSION,
url='https://github.com/PSU-OIT-ARC/django-arcutils',
author='PSU - OIT - ARC',
author_email='consultants@pdx.edu',
description='Common utilities used in ARC Django projects',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'ldap': [
'certifi>=2015.11.20.1',
'ldap3>=1.0.3',
],
'dev': [
'django>=1.7,<1.9',
'djangorestframework>3.3',
'flake8',
'ldap3',
],
},
entry_points="""
[console_scripts]
arcutils = arcutils.__main__:main
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
bf006aa3dc8ee331eccb4abd8244a134949c8cc0 | bawebauth/apps/bawebauth/fields.py | bawebauth/apps/bawebauth/fields.py | # -*- coding: utf-8 -*-
from django.db import models
class PositiveBigIntegerField(models.PositiveIntegerField):
"""Represents MySQL's unsigned BIGINT data type (works with MySQL only!)"""
empty_strings_allowed = False
def get_internal_type(self):
return "PositiveBigIntegerField"
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
# This is how MySQL defines 64 bit unsigned integer data types
return "BIGINT UNSIGNED"
return super(PositiveBigIntegerField, self).db_type(connection)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['bawebauth\.fields\.PositiveBigIntegerField'])
except ImportError:
pass
| # -*- coding: utf-8 -*-
from django.db import models
class PositiveBigIntegerField(models.PositiveIntegerField):
"""Represents MySQL's unsigned BIGINT data type (works with MySQL only!)"""
empty_strings_allowed = False
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
# This is how MySQL defines 64 bit unsigned integer data types
return "BIGINT UNSIGNED"
return super(PositiveBigIntegerField, self).db_type(connection)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['bawebauth\.fields\.PositiveBigIntegerField'])
except ImportError:
pass
| Fix tests by removing obsolete internal field type declaration | Fix tests by removing obsolete internal field type declaration
| Python | mit | mback2k/django-bawebauth,mback2k/django-bawebauth,mback2k/django-bawebauth,mback2k/django-bawebauth | # -*- coding: utf-8 -*-
from django.db import models
class PositiveBigIntegerField(models.PositiveIntegerField):
"""Represents MySQL's unsigned BIGINT data type (works with MySQL only!)"""
empty_strings_allowed = False
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
# This is how MySQL defines 64 bit unsigned integer data types
return "BIGINT UNSIGNED"
return super(PositiveBigIntegerField, self).db_type(connection)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['bawebauth\.fields\.PositiveBigIntegerField'])
except ImportError:
pass
| Fix tests by removing obsolete internal field type declaration
# -*- coding: utf-8 -*-
from django.db import models
class PositiveBigIntegerField(models.PositiveIntegerField):
"""Represents MySQL's unsigned BIGINT data type (works with MySQL only!)"""
empty_strings_allowed = False
def get_internal_type(self):
return "PositiveBigIntegerField"
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
# This is how MySQL defines 64 bit unsigned integer data types
return "BIGINT UNSIGNED"
return super(PositiveBigIntegerField, self).db_type(connection)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['bawebauth\.fields\.PositiveBigIntegerField'])
except ImportError:
pass
|
60a5104f0138af7bbfc5056fae01898c148b10a0 | benchmarks/serialization.py | benchmarks/serialization.py | """
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
| """
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null", "w"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
| Fix the benchmark so it's not throwing exceptions every time a message is written | Fix the benchmark so it's not throwing exceptions every time a message is written | Python | apache-2.0 | ScatterHQ/eliot,ScatterHQ/eliot,ClusterHQ/eliot,ScatterHQ/eliot | """
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null", "w"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
| Fix the benchmark so it's not throwing exceptions every time a message is written
"""
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
|
039c552b3674531a746c14d1c34bd2f13fd078e5 | Cura/util/removableStorage.py | Cura/util/removableStorage.py | import platform
import string
import glob
import os
import stat
def getPossibleSDcardDrives():
drives = []
if platform.system() == "Windows":
from ctypes import windll
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2:
drives.append(letter + ':/')
bitmask >>= 1
elif platform.system() == "Darwin":
for volume in glob.glob('/Volumes/*'):
if stat.S_ISLNK(os.lstat(volume).st_mode):
continue
#'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume))
drives.append(volume)
else:
for volume in glob.glob('/media/*'):
drives.append(volume)
return drives
| import platform
import string
import glob
import os
import stat
def getPossibleSDcardDrives():
drives = []
if platform.system() == "Windows":
from ctypes import windll
import ctypes
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2:
volumeName = ''
nameBuffer = ctypes.create_unicode_buffer(1024)
if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0:
volumeName = nameBuffer.value
if volumeName == '':
volumeName = 'NO NAME'
drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName))
bitmask >>= 1
elif platform.system() == "Darwin":
for volume in glob.glob('/Volumes/*'):
if stat.S_ISLNK(os.lstat(volume).st_mode):
continue
#'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume))
drives.append((os.path.basename(volume), os.path.basename(volume), volume))
else:
for volume in glob.glob('/media/*'):
drives.append((os.path.basename(volume), os.path.basename(volume), volume))
return drives
| Enhance the SD card list with more info. | Enhance the SD card list with more info.
| Python | agpl-3.0 | alephobjects/Cura,alephobjects/Cura,alephobjects/Cura | import platform
import string
import glob
import os
import stat
def getPossibleSDcardDrives():
drives = []
if platform.system() == "Windows":
from ctypes import windll
import ctypes
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2:
volumeName = ''
nameBuffer = ctypes.create_unicode_buffer(1024)
if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0:
volumeName = nameBuffer.value
if volumeName == '':
volumeName = 'NO NAME'
drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName))
bitmask >>= 1
elif platform.system() == "Darwin":
for volume in glob.glob('/Volumes/*'):
if stat.S_ISLNK(os.lstat(volume).st_mode):
continue
#'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume))
drives.append((os.path.basename(volume), os.path.basename(volume), volume))
else:
for volume in glob.glob('/media/*'):
drives.append((os.path.basename(volume), os.path.basename(volume), volume))
return drives
| Enhance the SD card list with more info.
import platform
import string
import glob
import os
import stat
def getPossibleSDcardDrives():
drives = []
if platform.system() == "Windows":
from ctypes import windll
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2:
drives.append(letter + ':/')
bitmask >>= 1
elif platform.system() == "Darwin":
for volume in glob.glob('/Volumes/*'):
if stat.S_ISLNK(os.lstat(volume).st_mode):
continue
#'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume))
drives.append(volume)
else:
for volume in glob.glob('/media/*'):
drives.append(volume)
return drives
|
a4dd889a44cf7b4ea4e2e85880343ede234ec60c | geotrek/core/migrations/0017_remove_path_from_factories.py | geotrek/core/migrations/0017_remove_path_from_factories.py | # Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_paths_factories(apps, schema_editor):
PathModel = apps.get_model('core', 'Path')
PathModel.objects.filter(geom=LineString(Point(700000, 6600000), Point(700100, 6600100), srid=settings.SRID)).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20200406_1340'),
]
operations = [
migrations.RunPython(remove_generated_paths_factories)
]
| Add migration remove generated paths | Add migration remove generated paths
| Python | bsd-2-clause | GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek | # Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_paths_factories(apps, schema_editor):
PathModel = apps.get_model('core', 'Path')
PathModel.objects.filter(geom=LineString(Point(700000, 6600000), Point(700100, 6600100), srid=settings.SRID)).delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20200406_1340'),
]
operations = [
migrations.RunPython(remove_generated_paths_factories)
]
| Add migration remove generated paths
|
|
575f4678b2528bfcfb5d48fdacebd59a2abd9581 | tests/slaves_expectations.py | tests/slaves_expectations.py | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dumps a list of known slaves, along with their OS and master."""
import argparse
import collections
import json
import logging
import os
import subprocess
import sys
# This file is located inside tests. Update this path if that changes.
BUILD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SCRIPTS = os.path.join(BUILD, 'scripts')
LIST_SLAVES = os.path.join(SCRIPTS, 'tools', 'list_slaves.py')
sys.path.append(SCRIPTS)
from common import chromium_utils
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-g',
'--gen',
'--generate',
action='store_true',
dest='generate',
help='Generate slaves.expected for all masters.',
)
args = parser.parse_args()
masters = chromium_utils.ListMastersWithSlaves()
master_map = {}
for master_path in masters:
# Convert ~/<somewhere>/master.<whatever> to just whatever.
master = os.path.basename(master_path).split('.', 1)[-1]
botmap = json.loads(subprocess.check_output([
LIST_SLAVES, '--json', '--master', master]))
slave_map = collections.defaultdict(set)
for entry in botmap:
assert entry['mastername'] == 'master.%s' % master
for builder in entry['builder']:
slave_map[builder].add(entry['hostname'])
master_map[master_path] = {}
for buildername in sorted(slave_map.keys()):
master_map[master_path][buildername] = sorted(slave_map[buildername])
retcode = 0
for master_path, slaves_expectation in master_map.iteritems():
if os.path.exists(master_path):
slaves_expectation_file = os.path.join(master_path, 'slaves.expected')
if args.generate:
with open(slaves_expectation_file, 'w') as fp:
json.dump(slaves_expectation, fp, indent=2, sort_keys=True)
print 'Wrote expectation: %s.' % slaves_expectation_file
else:
if os.path.exists(slaves_expectation_file):
with open(slaves_expectation_file) as fp:
if json.load(fp) != slaves_expectation:
logging.error(
'Mismatched expectation: %s.', slaves_expectation_file)
retcode = 1
else:
logging.error('File not found: %s.', slaves_expectation_file)
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(main())
| Add script for generating slave expectations | Add script for generating slave expectations
BUG=489880
R=friedman@chromium.org
Review URL: https://codereview.chromium.org/1178383002.
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@295683 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dumps a list of known slaves, along with their OS and master."""
import argparse
import collections
import json
import logging
import os
import subprocess
import sys
# This file is located inside tests. Update this path if that changes.
BUILD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SCRIPTS = os.path.join(BUILD, 'scripts')
LIST_SLAVES = os.path.join(SCRIPTS, 'tools', 'list_slaves.py')
sys.path.append(SCRIPTS)
from common import chromium_utils
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-g',
'--gen',
'--generate',
action='store_true',
dest='generate',
help='Generate slaves.expected for all masters.',
)
args = parser.parse_args()
masters = chromium_utils.ListMastersWithSlaves()
master_map = {}
for master_path in masters:
# Convert ~/<somewhere>/master.<whatever> to just whatever.
master = os.path.basename(master_path).split('.', 1)[-1]
botmap = json.loads(subprocess.check_output([
LIST_SLAVES, '--json', '--master', master]))
slave_map = collections.defaultdict(set)
for entry in botmap:
assert entry['mastername'] == 'master.%s' % master
for builder in entry['builder']:
slave_map[builder].add(entry['hostname'])
master_map[master_path] = {}
for buildername in sorted(slave_map.keys()):
master_map[master_path][buildername] = sorted(slave_map[buildername])
retcode = 0
for master_path, slaves_expectation in master_map.iteritems():
if os.path.exists(master_path):
slaves_expectation_file = os.path.join(master_path, 'slaves.expected')
if args.generate:
with open(slaves_expectation_file, 'w') as fp:
json.dump(slaves_expectation, fp, indent=2, sort_keys=True)
print 'Wrote expectation: %s.' % slaves_expectation_file
else:
if os.path.exists(slaves_expectation_file):
with open(slaves_expectation_file) as fp:
if json.load(fp) != slaves_expectation:
logging.error(
'Mismatched expectation: %s.', slaves_expectation_file)
retcode = 1
else:
logging.error('File not found: %s.', slaves_expectation_file)
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(main())
| Add script for generating slave expectations
BUG=489880
R=friedman@chromium.org
Review URL: https://codereview.chromium.org/1178383002.
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@295683 0039d316-1c4b-4281-b951-d872f2087c98
|
|
5ee94e9a74bc4128ed8e7e10a2106ea422f22757 | sandbox/sandbox/polls/serialiser.py | sandbox/sandbox/polls/serialiser.py |
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
| Add attribute to choices field declaration | Add attribute to choices field declaration
| Python | bsd-3-clause | MarkusH/django-nap,limbera/django-nap |
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
| Add attribute to choices field declaration
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
bcc5a9a68f0b97b7e170cf34f9ffea00fb5441f4 | version.py | version.py | major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69 | major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | Tag commit for v0.0.26-master generated by gitmake.py | Tag commit for v0.0.26-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26 | Tag commit for v0.0.26-master generated by gitmake.py
major = 0
minor=0
patch=25
branch="master"
timestamp=1376610207.69 |
57e66ae6cd833b1b0da5b71e1c4b6e223c8ca062 | test/test_data.py | test/test_data.py | """Tests for coverage.data"""
import unittest
from coverage.data import CoverageData
class DataTest(unittest.TestCase):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
| """Tests for coverage.data"""
from coverage.data import CoverageData
from coveragetest import CoverageTest
class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
| Use our CoverageTest base class to get isolation (in a new directory) for the data tests. | Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
| Python | apache-2.0 | 7WebPages/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,hugovk/coveragepy | """Tests for coverage.data"""
from coverage.data import CoverageData
from coveragetest import CoverageTest
class DataTest(CoverageTest):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
| Use our CoverageTest base class to get isolation (in a new directory) for the data tests.
"""Tests for coverage.data"""
import unittest
from coverage.data import CoverageData
class DataTest(unittest.TestCase):
def test_reading(self):
covdata = CoverageData()
covdata.read()
self.assertEqual(covdata.summary(), {})
|
3e03d66c5351ac5e71f82a56aa01ba06865e1c25 | conda_verify/cli.py | conda_verify/cli.py | import os
import sys
from optparse import OptionParser
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
from conda_verify.utilities import render_metadata, iter_cfgs
def cli():
p = OptionParser(
usage="usage: %prog [options] <path to recipes or packages>",
description="tool for (passively) verifying conda recipes and conda "
"packages for the Anaconda distribution")
p.add_option('-v', '--version',
help="display the version being used and exit",
action="store_true")
opts, args = p.parse_args()
if opts.version:
from conda_verify import __version__
sys.exit('conda-verify {}' .format(__version__))
verifier = Verify()
for path in args:
if os.path.isfile(os.path.join(path, 'meta.yaml')):
print("==> %s <==" % path)
for cfg in iter_cfgs():
meta = render_metadata(path, cfg)
try:
verifier.verify_recipe(rendered_meta=meta, recipe_dir=path)
except RecipeError as e:
sys.stderr.write("RecipeError: %s\n" % e)
elif path.endswith(('.tar.bz2', '.tar')):
print('Verifying {}...' .format(path))
verifier.verify_package(path_to_package=path)
| import os
import sys
from optparse import OptionParser
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
from conda_verify.utilities import render_metadata, iter_cfgs
def cli():
p = OptionParser(
usage="usage: %prog [options] <path to recipes or packages>",
description="tool for (passively) verifying conda recipes and conda "
"packages for the Anaconda distribution")
p.add_option('-v', '--version',
help="display the version being used and exit",
action="store_true")
opts, args = p.parse_args()
if opts.version:
from conda_verify import __version__
sys.exit('conda-verify {}' .format(__version__))
verifier = Verify()
for path in args:
meta_file = os.path.join(path, 'meta.yaml')
if os.path.isfile(meta_file):
print('Verifying {}...' .format(meta_file))
for cfg in iter_cfgs():
meta = render_metadata(path, cfg)
try:
verifier.verify_recipe(rendered_meta=meta, recipe_dir=path)
except RecipeError as e:
sys.stderr.write("RecipeError: %s\n" % e)
elif path.endswith(('.tar.bz2', '.tar')):
print('Verifying {}...' .format(path))
verifier.verify_package(path_to_package=path)
| Change script run message output | Change script run message output
| Python | bsd-3-clause | mandeep/conda-verify | import os
import sys
from optparse import OptionParser
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
from conda_verify.utilities import render_metadata, iter_cfgs
def cli():
p = OptionParser(
usage="usage: %prog [options] <path to recipes or packages>",
description="tool for (passively) verifying conda recipes and conda "
"packages for the Anaconda distribution")
p.add_option('-v', '--version',
help="display the version being used and exit",
action="store_true")
opts, args = p.parse_args()
if opts.version:
from conda_verify import __version__
sys.exit('conda-verify {}' .format(__version__))
verifier = Verify()
for path in args:
meta_file = os.path.join(path, 'meta.yaml')
if os.path.isfile(meta_file):
print('Verifying {}...' .format(meta_file))
for cfg in iter_cfgs():
meta = render_metadata(path, cfg)
try:
verifier.verify_recipe(rendered_meta=meta, recipe_dir=path)
except RecipeError as e:
sys.stderr.write("RecipeError: %s\n" % e)
elif path.endswith(('.tar.bz2', '.tar')):
print('Verifying {}...' .format(path))
verifier.verify_package(path_to_package=path)
| Change script run message output
import os
import sys
from optparse import OptionParser
from conda_verify.errors import RecipeError
from conda_verify.verify import Verify
from conda_verify.utilities import render_metadata, iter_cfgs
def cli():
p = OptionParser(
usage="usage: %prog [options] <path to recipes or packages>",
description="tool for (passively) verifying conda recipes and conda "
"packages for the Anaconda distribution")
p.add_option('-v', '--version',
help="display the version being used and exit",
action="store_true")
opts, args = p.parse_args()
if opts.version:
from conda_verify import __version__
sys.exit('conda-verify {}' .format(__version__))
verifier = Verify()
for path in args:
if os.path.isfile(os.path.join(path, 'meta.yaml')):
print("==> %s <==" % path)
for cfg in iter_cfgs():
meta = render_metadata(path, cfg)
try:
verifier.verify_recipe(rendered_meta=meta, recipe_dir=path)
except RecipeError as e:
sys.stderr.write("RecipeError: %s\n" % e)
elif path.endswith(('.tar.bz2', '.tar')):
print('Verifying {}...' .format(path))
verifier.verify_package(path_to_package=path)
|
6016b6531822615f7c697b0ac380150662d41ba0 | setup.py | setup.py | import os
import sys
from setuptools import setup, find_packages, Command
SEP='<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>'
class Doctest(Command):
if sys.argv[-1] == 'test':
print(SEP)
print("Running docs make and make doctest")
os.system("make doctest -C docs/")
print(SEP)
class Pep8Test(Command):
if sys.argv[-1] == 'test':
print("Running pep8 under source code folder")
os.system("python setup.py pep8 --exclude '.eggs*'")
print(SEP)
setup(name='Kytos OpenFlow Parser library',
version='0.1',
description='Library to parse and generate OpenFlow messages',
url='http://github.com/kytos/python-openflow',
author='Kytos Team',
author_email='of-ng-dev@ncc.unesp.br',
license='MIT',
test_suite='tests',
packages=find_packages(exclude=["tests", "*v0x02*"]),
setup_requires=['setuptools-pep8'],
cmdclass={
'doctests': Doctest
},
zip_safe=False)
| import os
import sys
from setuptools import setup, find_packages, Command
class Doctest(Command):
if sys.argv[-1] == 'test':
print("Running docs make and make doctest")
os.system("make doctest -C docs/")
class Pep8Test(Command):
if sys.argv[-1] == 'test':
print("Running pep8 under source code folder")
os.system("python3 setup.py pep8 --exclude '.eggs*'")
setup(name='Kytos OpenFlow Parser library',
version='0.1',
description='Library to parse and generate OpenFlow messages',
url='http://github.com/kytos/python-openflow',
author='Kytos Team',
author_email='of-ng-dev@ncc.unesp.br',
license='MIT',
test_suite='tests',
packages=find_packages(exclude=["tests", "*v0x02*"]),
setup_requires=['setuptools-pep8'],
cmdclass={
'doctests': Doctest
},
zip_safe=False)
| Enforce python3 on pep8 test (and remove print markers) | Enforce python3 on pep8 test (and remove print markers)
| Python | mit | cemsbr/python-openflow,kytos/python-openflow | import os
import sys
from setuptools import setup, find_packages, Command
class Doctest(Command):
if sys.argv[-1] == 'test':
print("Running docs make and make doctest")
os.system("make doctest -C docs/")
class Pep8Test(Command):
if sys.argv[-1] == 'test':
print("Running pep8 under source code folder")
os.system("python3 setup.py pep8 --exclude '.eggs*'")
setup(name='Kytos OpenFlow Parser library',
version='0.1',
description='Library to parse and generate OpenFlow messages',
url='http://github.com/kytos/python-openflow',
author='Kytos Team',
author_email='of-ng-dev@ncc.unesp.br',
license='MIT',
test_suite='tests',
packages=find_packages(exclude=["tests", "*v0x02*"]),
setup_requires=['setuptools-pep8'],
cmdclass={
'doctests': Doctest
},
zip_safe=False)
| Enforce python3 on pep8 test (and remove print markers)
import os
import sys
from setuptools import setup, find_packages, Command
SEP='<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>'
class Doctest(Command):
if sys.argv[-1] == 'test':
print(SEP)
print("Running docs make and make doctest")
os.system("make doctest -C docs/")
print(SEP)
class Pep8Test(Command):
if sys.argv[-1] == 'test':
print("Running pep8 under source code folder")
os.system("python setup.py pep8 --exclude '.eggs*'")
print(SEP)
setup(name='Kytos OpenFlow Parser library',
version='0.1',
description='Library to parse and generate OpenFlow messages',
url='http://github.com/kytos/python-openflow',
author='Kytos Team',
author_email='of-ng-dev@ncc.unesp.br',
license='MIT',
test_suite='tests',
packages=find_packages(exclude=["tests", "*v0x02*"]),
setup_requires=['setuptools-pep8'],
cmdclass={
'doctests': Doctest
},
zip_safe=False)
|
4d4a639ba46cf72454497bc100b3e811e66af4b2 | tests/test_deprecations.py | tests/test_deprecations.py | # -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
| # -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
| Add test for deprecated flask.Request properties. | Add test for deprecated flask.Request properties.
| Python | bsd-3-clause | moluzhang/flask,karen-wang/flask,happyspace/flask,alanhamlett/flask,kuhli/flask,auready/flask,margguo/flask,tcnoviembre2013/flask,rollingstone/flask,wudafucode/flask,mysweet/flask,drewja/flask,tcnoviembre2013/flask,karen-wang/flask,horica-ionescu/flask,nwags/flask,cgvarela/flask,sam-tsai/flask,postelin/flask,jiimaho/flask,jenalgit/flask,bluedazzle/flask,ghhong1986/flask,ffsdmad/flask,raven47git/flask,kuhli/flask,lynget/flask,jawed123/flask,pallets/flask,kuku940/flask,alexwidener/flask,lord63-forks/flask,iamantee/flask,edcomstock/flask,Endika/flask,felipefuka/flask,flamelizard/flask,s7v7nislands/flask,postelin/flask,algoStruct/flask,WSDC-NITWarangal/flask,zailushangde/flask,gbolahanalade/flask,SsangYoon/Flask,arvinls/flask,WSDC-NITWarangal/flask,theo-l/flask,warm200/flask,rawrgulmuffins/flask,liuyi1112/flask,antsar/flask,Freebird2014/flask,wtorcasoGB/flask,GavinCruise/flask,szymonm/flask,Faiz7412/flask,vivekanand1101/flask,AvivC/flask,godfreyy/flask,JingZhou0404/flask,suppandi/flask,zailushangde/flask,mitsuhiko/flask,Faiz7412/flask,luzzyzhang/flask,ffsdmad/flask,s7v7nislands/flask,anujk3/flask,blankme/flask,dominjune/flask,grayswand1r/flask,skmezanul/flask,artleaf/flask,mAzurkovic/flask,Eagles2F/flask,oculardexterity/flask,moluzhang/flask,onetinyr/flask,drewja/flask,TylerB24890/flask,anujk3/flask,hyunchel/flask,dawran6/flask,nZac/flask,szymonm/flask,dhruvsrivastava/flask,gbolahanalade/flask,nju520/flask,Papa2k15/flask,dawran6/flask,treedledee/flask,szymonm/flask,mujiatong/flask,abdulbaqi/flask,happyspace/flask,ryandvill/flask,jonieli/flask,tjxiter/flask,ankurpandey/flask,ghhong1986/flask,bodefuwa/flask,Ricardo666666/flask,wangjun/flask,garaden/flask,SsangYoon/Flask,chenke91/flask,SsangYoon/Flask,rollingstone/flask,hoatle/flask,tripathi62674/flask,AndreaEdwards/flask,fevxie/flask,tangfeng1/flask,xiaoshaozi52/flask,luzzyzhang/flask,jeezybrick/flask,chenke91/flask,treedledee/flask,mAzurkovic/flask,lyn233/flask,grayswand1r/flask,gangeshwark/flask,q1ang/flask,mitchfriedman/flask,artleaf/flask,DesQi/flask,jorpramo/flask,wtorcasoGB/flask,oculardexterity/flask,brianbeale/flask,LonglyCode/flask,mujiatong/flask,homeworkprod/flask,s7v7nislands/flask,rawrgulmuffins/flask,aniketshukla/flask,suppandi/flask,nZac/flask,cgvarela/flask,theo-l/flask,gangeshwark/flask,sage417/flask,jonieli/flask,beni55/flask,ezequielo/flask,EasonYi/flask,ezequielo/flask,JamesTFarrington/flask,sam-tsai/flask,blankme/flask,kyle-sorensen/flask,justanr/flask,liaoqingwei/flask,EasonYi/flask,Faiz7412/flask,mitsuhiko/flask,huwenhui/flask,jorpramo/flask,liuyi1112/flask,fevxie/flask,jeezybrick/flask,lalinsky/flask,pledo/flask,iamantee/flask,brianbeale/flask,jstacoder/flask,lyn233/flask,dominjune/flask,Parkayun/flask,cheesecake4392/flask,adrianmoisey/cptdevops,flabe81/flask,JingZhou0404/flask,tangfeng1/flask,fanshaohua-fan/flask,stephaniemertz/flask,GavinCruise/flask,liukaijv/flask,HeyItsAlan/flask,xujianhai/flask,homeworkprod/flask,liukaijv/flask,tjxiter/flask,nju520/flask,TylerB24890/flask,edcomstock/flask,pxzhenren/flask,tristanfisher/flask,elelianghh/flask,tripathi62674/flask,hyunchel/flask,raven47git/flask,RohithKP/flask,liaoqingwei/flask,vishnugonela/flask,cliu-aa/flask,Papa2k15/flask,VShangxiao/flask,arvinls/flask,zoyanhui/flask,TylerB24890/flask,antsar/flask,godfreyy/flask,mysweet/flask,tony/flask,felipefuka/flask,beni55/flask,jstacoder/flask,kyle-sorensen/flask,aniketshukla/flask,nju520/flask,sonnyhu/flask,lalinsky/flask,wtl-zju/flask,chenrenyi/flask,bodefuwa/flask,alexwidener/flask,Parkayun/flask,gangeshwark/flask,alanhamlett/flask,artleaf/flask,gaomingnudt/gm-flask2.0,robertglen/flask,AndreaEdwards/flask,sixpi/flask,cliu-aa/flask,horica-ionescu/flask,raven47git/flask,karen-wang/flask,ankravch/flask,ZacariasBendeck/flask,robertglen/flask,adrianmoisey/cptdevops,kyle-sorensen/flask,huwenhui/flask,LonglyCode/flask,xiaoshaozi52/flask,anujk3/flask,sonnyhu/flask,niima/flask,godfreyy/flask,luvten/flask,karlw00t/flask,XGiton/flask,sixpi/flask,dhruvsrivastava/flask,alanhamlett/flask,xujianhai/flask,ankurpandey/flask,mitchfriedman/flask,ankravch/flask,pxzhenren/flask,kuku940/flask,margguo/flask,bluedazzle/flask,skmezanul/flask,flamelizard/flask,qinfengsuiyu/flask,VShangxiao/flask,ABaldwinHunter/flask-clone-classic,auready/flask,fkazimierczak/flask,fkazimierczak/flask,AvivC/flask,tangfeng1/flask,wangjun/flask,lord63-forks/flask,jstacoder/flask,wilbert-abreu/flask,ZacariasBendeck/flask,RohithKP/flask,VShangxiao/flask,luvten/flask,Ricardo666666/flask,sage417/flask,gaomingnudt/gm-flask2.0,wtl-zju/flask,EasonYi/flask,XGiton/flask,pinkpet/flask,wldtyp/flask,tony/flask,oculardexterity/flask,justanr/flask,hoatle/flask,cheesecake4392/flask,visaxin/flask,Endika/flask,gilesburnfield/flask,onetinyr/flask,ryandvill/flask,warm200/flask,liaoqingwei/flask,jenalgit/flask,ezequielo/flask,wilbert-abreu/flask,zoyanhui/flask,vishnugonela/flask,jiimaho/flask,HeyItsAlan/flask,Ricardo666666/flask,AndreaEdwards/flask,jonatanblue/flask,fanshaohua-fan/flask,chuijiaolianying/flask,pledo/flask,qinfengsuiyu/flask,JamesTFarrington/flask,jenalgit/flask,ryandvill/flask,liuyi1112/flask,happyspace/flask,algoStruct/flask,vishnugonela/flask,flabe81/flask,wldtyp/flask,warm200/flask,jonieli/flask,chuijiaolianying/flask,vivekanand1101/flask,visaxin/flask,pallets/flask,fengsp/flask,ABaldwinHunter/flask-clone-classic,jonatanblue/flask,GavinCruise/flask,gilesburnfield/flask,niima/flask,jorpramo/flask,Freebird2014/flask,Eagles2F/flask,ankurpandey/flask,suppandi/flask,lynget/flask,fengsp/flask,garaden/flask,visaxin/flask,jawed123/flask,chenrenyi/flask,DesQi/flask,wudafucode/flask,onetinyr/flask,pinkpet/flask,abdulbaqi/flask,elelianghh/flask,stephaniemertz/flask,pallets/flask,nwags/flask,justanr/flask,chenke91/flask,tristanfisher/flask,postelin/flask,chuijiaolianying/flask,karlw00t/flask,algoStruct/flask,q1ang/flask,fkazimierczak/flask,drewja/flask | # -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
| Add test for deprecated flask.Request properties.
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
|
4a7b0fb482011400da0b3e760cde2d6f294d168f | sysrev/models.py | sysrev/models.py | from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
| from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
| Add completed field to review | Add completed field to review
| Python | mit | iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview | from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
| Add completed field to review
from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
|
bce11d469177eb4287d9d926b9880e7528bd53c0 | thumbnails/cache_backends.py | thumbnails/cache_backends.py | # -*- coding: utf-8 -*-
class BaseCacheBackend(object):
def get(self, thumbnail_name):
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._get(thumbnail_name)
def set(self, thumbnail):
thumbnail_name = thumbnail.name
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._set(thumbnail_name, thumbnail)
def _get(self, thumbnail_name):
raise NotImplementedError
def _set(self, thumbnail_name, thumbnail):
raise NotImplementedError
class SimpleCacheBackend(BaseCacheBackend):
thumbnails = {}
def _get(self, thumbnail_name):
if thumbnail_name in self.thumbnails:
return self.thumbnails[thumbnail_name]
def _set(self, thumbnail_name, thumbnail):
self.thumbnails[thumbnail_name] = thumbnail
class DjangoCacheBackend(BaseCacheBackend):
def __init__(self):
from django.core.cache import cache # noqa isort:skip
self.cache = cache
def _get(self, thumbnail_name):
return self.cache.get(thumbnail_name.replace('/', ''))
def _set(self, thumbnail_name, thumbnail):
self.cache.set(thumbnail_name.replace('/', ''), thumbnail)
| # -*- coding: utf-8 -*-
class BaseCacheBackend(object):
def get(self, thumbnail_name):
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._get(thumbnail_name)
def set(self, thumbnail):
return self._set(thumbnail.name, thumbnail)
def _get(self, thumbnail_name):
raise NotImplementedError
def _set(self, thumbnail_name, thumbnail):
raise NotImplementedError
class SimpleCacheBackend(BaseCacheBackend):
thumbnails = {}
def _get(self, thumbnail_name):
if thumbnail_name in self.thumbnails:
return self.thumbnails[thumbnail_name]
def _set(self, thumbnail_name, thumbnail):
self.thumbnails[thumbnail_name] = thumbnail
class DjangoCacheBackend(BaseCacheBackend):
def __init__(self):
from django.core.cache import cache # noqa isort:skip
self.cache = cache
def _get(self, thumbnail_name):
return self.cache.get(thumbnail_name.replace('/', ''))
def _set(self, thumbnail_name, thumbnail):
self.cache.set(thumbnail_name.replace('/', ''), thumbnail)
| Remove unecessary code in cache backend _set | Remove unecessary code in cache backend _set
| Python | mit | python-thumbnails/python-thumbnails,relekang/python-thumbnails | # -*- coding: utf-8 -*-
class BaseCacheBackend(object):
def get(self, thumbnail_name):
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._get(thumbnail_name)
def set(self, thumbnail):
return self._set(thumbnail.name, thumbnail)
def _get(self, thumbnail_name):
raise NotImplementedError
def _set(self, thumbnail_name, thumbnail):
raise NotImplementedError
class SimpleCacheBackend(BaseCacheBackend):
thumbnails = {}
def _get(self, thumbnail_name):
if thumbnail_name in self.thumbnails:
return self.thumbnails[thumbnail_name]
def _set(self, thumbnail_name, thumbnail):
self.thumbnails[thumbnail_name] = thumbnail
class DjangoCacheBackend(BaseCacheBackend):
def __init__(self):
from django.core.cache import cache # noqa isort:skip
self.cache = cache
def _get(self, thumbnail_name):
return self.cache.get(thumbnail_name.replace('/', ''))
def _set(self, thumbnail_name, thumbnail):
self.cache.set(thumbnail_name.replace('/', ''), thumbnail)
| Remove unecessary code in cache backend _set
# -*- coding: utf-8 -*-
class BaseCacheBackend(object):
def get(self, thumbnail_name):
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._get(thumbnail_name)
def set(self, thumbnail):
thumbnail_name = thumbnail.name
if isinstance(thumbnail_name, list):
thumbnail_name = ''.join(thumbnail_name)
return self._set(thumbnail_name, thumbnail)
def _get(self, thumbnail_name):
raise NotImplementedError
def _set(self, thumbnail_name, thumbnail):
raise NotImplementedError
class SimpleCacheBackend(BaseCacheBackend):
thumbnails = {}
def _get(self, thumbnail_name):
if thumbnail_name in self.thumbnails:
return self.thumbnails[thumbnail_name]
def _set(self, thumbnail_name, thumbnail):
self.thumbnails[thumbnail_name] = thumbnail
class DjangoCacheBackend(BaseCacheBackend):
def __init__(self):
from django.core.cache import cache # noqa isort:skip
self.cache = cache
def _get(self, thumbnail_name):
return self.cache.get(thumbnail_name.replace('/', ''))
def _set(self, thumbnail_name, thumbnail):
self.cache.set(thumbnail_name.replace('/', ''), thumbnail)
|
a20c88da5eb0b763072cc7bcba138983fe63ae31 | django_fsm_log/apps.py | django_fsm_log/apps.py | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| Solve warning coming from django 4.0 | Solve warning coming from django 4.0
| Python | mit | ticosax/django-fsm-log,gizmag/django-fsm-log | from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
default_auto_field = 'django.db.models.BigAutoField'
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
| Solve warning coming from django 4.0
from __future__ import unicode_literals
from django.apps import AppConfig
from django.conf import settings
from django.utils.module_loading import import_string
from django_fsm.signals import pre_transition, post_transition
class DjangoFSMLogAppConfig(AppConfig):
name = 'django_fsm_log'
verbose_name = "Django FSM Log"
def ready(self):
backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD)
StateLog = self.get_model('StateLog')
backend.setup_model(StateLog)
pre_transition.connect(backend.pre_transition_callback)
post_transition.connect(backend.post_transition_callback)
|
2d8ddb4ab59bc7198b637bcc9e51914379ff408b | tests/test_i18n.py | tests/test_i18n.py | import datetime as dt
import humanize
def test_i18n():
three_seconds = dt.timedelta(seconds=3)
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
humanize.i18n.activate("ru_RU")
assert humanize.naturaltime(three_seconds) == "3 секунды назад"
humanize.i18n.deactivate()
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
| import datetime as dt
import humanize
def test_i18n():
three_seconds = dt.timedelta(seconds=3)
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
assert humanize.ordinal(5) == "5th"
try:
humanize.i18n.activate("ru_RU")
assert humanize.naturaltime(three_seconds) == "3 секунды назад"
assert humanize.ordinal(5) == "5ый"
finally:
humanize.i18n.deactivate()
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
assert humanize.ordinal(5) == "5th"
| Add i18n test for humanize.ordinal | Add i18n test for humanize.ordinal
| Python | mit | jmoiron/humanize,jmoiron/humanize | import datetime as dt
import humanize
def test_i18n():
three_seconds = dt.timedelta(seconds=3)
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
assert humanize.ordinal(5) == "5th"
try:
humanize.i18n.activate("ru_RU")
assert humanize.naturaltime(three_seconds) == "3 секунды назад"
assert humanize.ordinal(5) == "5ый"
finally:
humanize.i18n.deactivate()
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
assert humanize.ordinal(5) == "5th"
| Add i18n test for humanize.ordinal
import datetime as dt
import humanize
def test_i18n():
three_seconds = dt.timedelta(seconds=3)
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
humanize.i18n.activate("ru_RU")
assert humanize.naturaltime(three_seconds) == "3 секунды назад"
humanize.i18n.deactivate()
assert humanize.naturaltime(three_seconds) == "3 seconds ago"
|
2585b44484b175bb116c228496069cc4269440c0 | hoomd/md/test-py/test_angle_cosinesq.py | hoomd/md/test-py/test_angle_cosinesq.py | # -*- coding: iso-8859-1 -*-
# Maintainer: joaander
from hoomd import *
from hoomd import md
context.initialize()
import unittest
import os
import numpy
# tests md.angle.cosinesq
class angle_cosinesq_tests (unittest.TestCase):
def setUp(self):
print
snap = data.make_snapshot(N=40,
box=data.boxdim(L=100),
particle_types = ['A'],
bond_types = [],
angle_types = ['angleA'],
dihedral_types = [],
improper_types = [])
if comm.get_rank() == 0:
snap.angles.resize(10);
for i in range(10):
x = numpy.array([i, 0, 0], dtype=numpy.float32)
snap.particles.position[4*i+0,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+1,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+2,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+3,:] = x;
snap.angles.group[i,:] = [4*i+0, 4*i+1, 4*i+2];
init.read_snapshot(snap)
context.current.sorter.set_params(grid=8)
# test to see that se can create an md.angle.cosinesq
def test_create(self):
md.angle.cosinesq();
# test setting coefficients
def test_set_coeff(self):
cosinesq = md.angle.cosinesq();
cosinesq.angle_coeff.set('angleA', k=1.0, t0=0.78125)
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
run(100);
# test coefficient not set checking
def test_set_coeff_fail(self):
cosinesq = md.angle.harmonic();
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
self.assertRaises(RuntimeError, run, 100);
def tearDown(self):
context.initialize();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| Add python tests for cosine squared angles | Add python tests for cosine squared angles
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | # -*- coding: iso-8859-1 -*-
# Maintainer: joaander
from hoomd import *
from hoomd import md
context.initialize()
import unittest
import os
import numpy
# tests md.angle.cosinesq
class angle_cosinesq_tests (unittest.TestCase):
def setUp(self):
print
snap = data.make_snapshot(N=40,
box=data.boxdim(L=100),
particle_types = ['A'],
bond_types = [],
angle_types = ['angleA'],
dihedral_types = [],
improper_types = [])
if comm.get_rank() == 0:
snap.angles.resize(10);
for i in range(10):
x = numpy.array([i, 0, 0], dtype=numpy.float32)
snap.particles.position[4*i+0,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+1,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+2,:] = x;
x += numpy.random.random(3)
snap.particles.position[4*i+3,:] = x;
snap.angles.group[i,:] = [4*i+0, 4*i+1, 4*i+2];
init.read_snapshot(snap)
context.current.sorter.set_params(grid=8)
# test to see that se can create an md.angle.cosinesq
def test_create(self):
md.angle.cosinesq();
# test setting coefficients
def test_set_coeff(self):
cosinesq = md.angle.cosinesq();
cosinesq.angle_coeff.set('angleA', k=1.0, t0=0.78125)
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
run(100);
# test coefficient not set checking
def test_set_coeff_fail(self):
cosinesq = md.angle.harmonic();
all = group.all();
md.integrate.mode_standard(dt=0.005);
md.integrate.nve(all);
self.assertRaises(RuntimeError, run, 100);
def tearDown(self):
context.initialize();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| Add python tests for cosine squared angles
|
|
ed97a1f811f04693203f6d1c0e9b64649a3da152 | coney/exceptions.py | coney/exceptions.py |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
| Add a new exception to handle a non-callable handler. | Add a new exception to handle a non-callable handler.
| Python | mit | cbigler/jackrabbit |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
| Add a new exception to handle a non-callable handler.
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
|
51760a4cf96074b9d2eb609451512b3fafff7aaa | make_test_data.py | make_test_data.py | import sqlite3
INSERT_SONG = '''
INSERT INTO jukebox_song_queue VALUES (?)
'''
TEST_URIS = [
'spotify:track:5lB3bZKPhng9s4hKB1sSIe',
'spotify:track:5MSfgtOBZkbxlcwsI9XNpf',
'spotify:track:1shuGbTnKx4AXjlx7IauM5'
]
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
for uri in TEST_URIS:
uri = (uri,)
cursor.execute(INSERT_SONG, uri)
conn.commit()
conn.close()
| Add script to make some test data | Add script to make some test data
| Python | mit | projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox | import sqlite3
INSERT_SONG = '''
INSERT INTO jukebox_song_queue VALUES (?)
'''
TEST_URIS = [
'spotify:track:5lB3bZKPhng9s4hKB1sSIe',
'spotify:track:5MSfgtOBZkbxlcwsI9XNpf',
'spotify:track:1shuGbTnKx4AXjlx7IauM5'
]
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
for uri in TEST_URIS:
uri = (uri,)
cursor.execute(INSERT_SONG, uri)
conn.commit()
conn.close()
| Add script to make some test data
|
|
77ae27596c96ef5b8c05fcd02448576b419de074 | config.py | config.py | class Config:
SECRET_KEY = 'jsA5!@z1'
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = "postgresql://admin:adminpass@localhost/fastmonkeys"
config = {
'development': DevelopmentConfig
}
| class Config:
SECRET_KEY = 'jsA5!@z1'
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = "postgresql://admin:adminpass@localhost/fastmonkeys"
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
config = {
'development': DevelopmentConfig
}
| Add SQLAlchemy commit on after request end | Add SQLAlchemy commit on after request end
| Python | mit | timzdevz/fm-flask-app | class Config:
SECRET_KEY = 'jsA5!@z1'
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = "postgresql://admin:adminpass@localhost/fastmonkeys"
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
config = {
'development': DevelopmentConfig
}
| Add SQLAlchemy commit on after request end
class Config:
SECRET_KEY = 'jsA5!@z1'
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = "postgresql://admin:adminpass@localhost/fastmonkeys"
config = {
'development': DevelopmentConfig
}
|
cee38843bcf4c628b1c1adc6014dbae7ad2e60c0 | acq4/devices/Scanner/scan_program/tests/test_spiral.py | acq4/devices/Scanner/scan_program/tests/test_spiral.py | from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
| Add unit tests for spiral scan | Add unit tests for spiral scan
| Python | mit | acq4/acq4,mgraupe/acq4,campagnola/acq4,meganbkratz/acq4,meganbkratz/acq4,acq4/acq4,acq4/acq4,mgraupe/acq4,tropp/acq4,mgraupe/acq4,meganbkratz/acq4,pbmanis/acq4,pbmanis/acq4,acq4/acq4,pbmanis/acq4,mgraupe/acq4,campagnola/acq4,tropp/acq4,campagnola/acq4,pbmanis/acq4,tropp/acq4,campagnola/acq4,meganbkratz/acq4,tropp/acq4,mgraupe/acq4 | from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
| Add unit tests for spiral scan
|
|
4a6eb1059f2321b2e54edc6bb614dca7d06c186f | CodeFights/isIPv4Address.py | CodeFights/isIPv4Address.py | #!/usr/local/bin/python
# Code Fights Is IPv4 Address Problem
def isIPv4Address(inputString):
import re
pattern = re.compile(r'^\d{1,3}(?:\.\d{1,3}){3}$')
match = re.search(pattern, inputString)
if match:
segments = inputString.split(".")
return sum([int(x) >= 0 and int(x) <= 255 for x in segments]) == 4
return False
def main():
tests = [
["172.16.254.1", True],
["172.316.254.1", False],
[".254.255.0", False],
["1.1.1.1a", False],
["1", False],
["0.254.255.0", True],
["1.23.256.255.", False],
["1.23.256..", False],
["0..1.0", False],
["1.1.1.1.1", False],
["1.256.1.1", False],
["a0.1.1.1", False],
["0.1.1.256", False],
["129380129831213981.255.255.255", False],
["255.255.255.255abcdekjhf", False],
["7283728", False]
]
for t in tests:
res = isIPv4Address(t[0])
if t[1] == res:
print("PASSED: isIPv4Address({}) returned {}"
.format(t[0], res))
else:
print("FAILED: isIPv4Address({}) returned {}, should have returned {}"
.format(t[0], res, t[1]))
if __name__ == '__main__':
main()
| Solve Code Fights is IPv4 address problem | Solve Code Fights is IPv4 address problem
| Python | mit | HKuz/Test_Code | #!/usr/local/bin/python
# Code Fights Is IPv4 Address Problem
def isIPv4Address(inputString):
import re
pattern = re.compile(r'^\d{1,3}(?:\.\d{1,3}){3}$')
match = re.search(pattern, inputString)
if match:
segments = inputString.split(".")
return sum([int(x) >= 0 and int(x) <= 255 for x in segments]) == 4
return False
def main():
tests = [
["172.16.254.1", True],
["172.316.254.1", False],
[".254.255.0", False],
["1.1.1.1a", False],
["1", False],
["0.254.255.0", True],
["1.23.256.255.", False],
["1.23.256..", False],
["0..1.0", False],
["1.1.1.1.1", False],
["1.256.1.1", False],
["a0.1.1.1", False],
["0.1.1.256", False],
["129380129831213981.255.255.255", False],
["255.255.255.255abcdekjhf", False],
["7283728", False]
]
for t in tests:
res = isIPv4Address(t[0])
if t[1] == res:
print("PASSED: isIPv4Address({}) returned {}"
.format(t[0], res))
else:
print("FAILED: isIPv4Address({}) returned {}, should have returned {}"
.format(t[0], res, t[1]))
if __name__ == '__main__':
main()
| Solve Code Fights is IPv4 address problem
|
|
896c287ad6a5d927febaca4fa957708f783fd51a | shinken/modules/logstore_null.py | shinken/modules/logstore_null.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
"""
This class store log broks in a black hole
It is one possibility (!) for an exchangeable storage for log broks
"""
import os
import sys
import time
import datetime
import re
from shinken.objects.service import Service
from shinken.basemodule import BaseModule
from shinken.objects.module import Module
properties = {
'daemons': ['livestatus'],
'type': 'logstore_null',
'external': False,
'phases': ['running'],
}
# called by the plugin manager
def get_instance(plugin):
print "Get an LogStore Null module for plugin %s" % plugin.get_name()
instance = LiveStatusLogStoreNull(plugin)
return instance
class LiveStatusLogStoreNull(BaseModule):
def __init__(self, modconf):
BaseModule.__init__(self, modconf)
self.plugins = []
# Now sleep one second, so that won't get lineno collisions with the last second
time.sleep(1)
Logline.lineno = 0
def load(self, app):
self.app = app
def init(self):
pass
def open(self):
print "open LiveStatusLogStoreNull ok"
def close(self):
pass
def commit(self):
pass
def commit_and_rotate_log_db(self):
pass
def manage_log_brok(self, b):
# log brok successfully stored in the black hole
pass
def add_filter(self, operator, attribute, reference):
pass
def add_filter_and(self, andnum):
pass
def add_filter_or(self, ornum):
pass
def add_filter_not(self):
pass
def get_live_data_log(self):
"""Like get_live_data, but for log objects"""
result = []
return result
| Add a null LogStore backend for livestatus broker | Add a null LogStore backend for livestatus broker
| Python | agpl-3.0 | claneys/shinken,KerkhoffTechnologies/shinken,Simage/shinken,Simage/shinken,claneys/shinken,dfranco/shinken,gst/alignak,h4wkmoon/shinken,mohierf/shinken,geektophe/shinken,KerkhoffTechnologies/shinken,geektophe/shinken,mohierf/shinken,titilambert/alignak,tal-nino/shinken,staute/shinken_package,lets-software/shinken,geektophe/shinken,h4wkmoon/shinken,lets-software/shinken,tal-nino/shinken,rednach/krill,Aimage/shinken,Aimage/shinken,naparuba/shinken,claneys/shinken,dfranco/shinken,kaji-project/shinken,h4wkmoon/shinken,xorpaul/shinken,mohierf/shinken,savoirfairelinux/shinken,ddurieux/alignak,kaji-project/shinken,claneys/shinken,naparuba/shinken,fpeyre/shinken,xorpaul/shinken,tal-nino/shinken,tal-nino/shinken,h4wkmoon/shinken,rledisez/shinken,tal-nino/shinken,lets-software/shinken,lets-software/shinken,KerkhoffTechnologies/shinken,savoirfairelinux/shinken,ddurieux/alignak,naparuba/shinken,Aimage/shinken,staute/shinken_package,geektophe/shinken,geektophe/shinken,titilambert/alignak,Simage/shinken,peeyush-tm/shinken,peeyush-tm/shinken,staute/shinken_deb,gst/alignak,Simage/shinken,KerkhoffTechnologies/shinken,savoirfairelinux/shinken,dfranco/shinken,h4wkmoon/shinken,dfranco/shinken,xorpaul/shinken,rednach/krill,tal-nino/shinken,xorpaul/shinken,kaji-project/shinken,h4wkmoon/shinken,ddurieux/alignak,Simage/shinken,staute/shinken_deb,geektophe/shinken,savoirfairelinux/shinken,dfranco/shinken,claneys/shinken,rednach/krill,lets-software/shinken,fpeyre/shinken,staute/shinken_package,peeyush-tm/shinken,xorpaul/shinken,Alignak-monitoring/alignak,rledisez/shinken,peeyush-tm/shinken,rledisez/shinken,Simage/shinken,peeyush-tm/shinken,fpeyre/shinken,staute/shinken_package,rednach/krill,kaji-project/shinken,ddurieux/alignak,staute/shinken_deb,dfranco/shinken,staute/shinken_package,fpeyre/shinken,titilambert/alignak,rledisez/shinken,kaji-project/shinken,xorpaul/shinken,ddurieux/alignak,kaji-project/shinken,staute/shinken_package,rledisez/shinken,savoirfairelinux/shinken,Aimage/shinken,KerkhoffTechnologies/shinken,mohierf/shinken,Aimage/shinken,mohierf/shinken,xorpaul/shinken,staute/shinken_deb,rledisez/shinken,titilambert/alignak,lets-software/shinken,h4wkmoon/shinken,xorpaul/shinken,naparuba/shinken,savoirfairelinux/shinken,staute/shinken_deb,mohierf/shinken,kaji-project/shinken,h4wkmoon/shinken,naparuba/shinken,gst/alignak,rednach/krill,staute/shinken_deb,claneys/shinken,Alignak-monitoring/alignak,Aimage/shinken,naparuba/shinken,peeyush-tm/shinken,fpeyre/shinken,rednach/krill,fpeyre/shinken,KerkhoffTechnologies/shinken,ddurieux/alignak,gst/alignak | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
"""
This class store log broks in a black hole
It is one possibility (!) for an exchangeable storage for log broks
"""
import os
import sys
import time
import datetime
import re
from shinken.objects.service import Service
from shinken.basemodule import BaseModule
from shinken.objects.module import Module
properties = {
'daemons': ['livestatus'],
'type': 'logstore_null',
'external': False,
'phases': ['running'],
}
# called by the plugin manager
def get_instance(plugin):
print "Get an LogStore Null module for plugin %s" % plugin.get_name()
instance = LiveStatusLogStoreNull(plugin)
return instance
class LiveStatusLogStoreNull(BaseModule):
def __init__(self, modconf):
BaseModule.__init__(self, modconf)
self.plugins = []
# Now sleep one second, so that won't get lineno collisions with the last second
time.sleep(1)
Logline.lineno = 0
def load(self, app):
self.app = app
def init(self):
pass
def open(self):
print "open LiveStatusLogStoreNull ok"
def close(self):
pass
def commit(self):
pass
def commit_and_rotate_log_db(self):
pass
def manage_log_brok(self, b):
# log brok successfully stored in the black hole
pass
def add_filter(self, operator, attribute, reference):
pass
def add_filter_and(self, andnum):
pass
def add_filter_or(self, ornum):
pass
def add_filter_not(self):
pass
def get_live_data_log(self):
"""Like get_live_data, but for log objects"""
result = []
return result
| Add a null LogStore backend for livestatus broker
|
|
f59f94cae98030172024013faccabaddc031b845 | frontends/etiquette_flask/etiquette_flask/decorators.py | frontends/etiquette_flask/etiquette_flask/decorators.py | import flask
from flask import request
import functools
from etiquette import jsonify
def required_fields(fields, forbid_whitespace=False):
'''
Declare that the endpoint requires certain POST body fields. Without them,
we respond with 400 and a message.
forbid_whitespace:
If True, then providing the field is not good enough. It must also
contain at least some non-whitespace characters.
'''
def wrapper(function):
@functools.wraps(function)
def wrapped(*args, **kwargs):
for requirement in fields:
missing = (
requirement not in request.form or
(forbid_whitespace and request.form[requirement].strip() == '')
)
if missing:
response = {
'error_type': 'MISSING_FIELDS',
'error_message': 'Required fields: %s' % ', '.join(fields),
}
response = jsonify.make_json_response(response, status=400)
return response
return function(*args, **kwargs)
return wrapped
return wrapper
| import flask
from flask import request
import functools
from . import jsonify
def required_fields(fields, forbid_whitespace=False):
'''
Declare that the endpoint requires certain POST body fields. Without them,
we respond with 400 and a message.
forbid_whitespace:
If True, then providing the field is not good enough. It must also
contain at least some non-whitespace characters.
'''
def wrapper(function):
@functools.wraps(function)
def wrapped(*args, **kwargs):
for requirement in fields:
missing = (
requirement not in request.form or
(forbid_whitespace and request.form[requirement].strip() == '')
)
if missing:
response = {
'error_type': 'MISSING_FIELDS',
'error_message': 'Required fields: %s' % ', '.join(fields),
}
response = jsonify.make_json_response(response, status=400)
return response
return function(*args, **kwargs)
return wrapped
return wrapper
| Fix required_fields looking at wrong jsonify file. | Fix required_fields looking at wrong jsonify file.
| Python | bsd-3-clause | voussoir/etiquette,voussoir/etiquette,voussoir/etiquette | import flask
from flask import request
import functools
from . import jsonify
def required_fields(fields, forbid_whitespace=False):
'''
Declare that the endpoint requires certain POST body fields. Without them,
we respond with 400 and a message.
forbid_whitespace:
If True, then providing the field is not good enough. It must also
contain at least some non-whitespace characters.
'''
def wrapper(function):
@functools.wraps(function)
def wrapped(*args, **kwargs):
for requirement in fields:
missing = (
requirement not in request.form or
(forbid_whitespace and request.form[requirement].strip() == '')
)
if missing:
response = {
'error_type': 'MISSING_FIELDS',
'error_message': 'Required fields: %s' % ', '.join(fields),
}
response = jsonify.make_json_response(response, status=400)
return response
return function(*args, **kwargs)
return wrapped
return wrapper
| Fix required_fields looking at wrong jsonify file.
import flask
from flask import request
import functools
from etiquette import jsonify
def required_fields(fields, forbid_whitespace=False):
'''
Declare that the endpoint requires certain POST body fields. Without them,
we respond with 400 and a message.
forbid_whitespace:
If True, then providing the field is not good enough. It must also
contain at least some non-whitespace characters.
'''
def wrapper(function):
@functools.wraps(function)
def wrapped(*args, **kwargs):
for requirement in fields:
missing = (
requirement not in request.form or
(forbid_whitespace and request.form[requirement].strip() == '')
)
if missing:
response = {
'error_type': 'MISSING_FIELDS',
'error_message': 'Required fields: %s' % ', '.join(fields),
}
response = jsonify.make_json_response(response, status=400)
return response
return function(*args, **kwargs)
return wrapped
return wrapper
|
230d7745f2f1bbc5099e1288ab482c92262e4126 | examples/ndbc/buoy_type_request.py | examples/ndbc/buoy_type_request.py | # Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('41002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('41002', data_type='supl')
df.head()
| # Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('42002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('42002', data_type='supl')
df.head()
| Update NDBC example for removed buoy | MNT: Update NDBC example for removed buoy
| Python | bsd-3-clause | Unidata/siphon | # Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('42002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('42002', data_type='supl')
df.head()
| MNT: Update NDBC example for removed buoy
# Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('41002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('41002', data_type='supl')
df.head()
|
a4013c7f33226915b3c1fb7863f3e96b24413591 | main.py | main.py | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import json
from google.appengine.ext import vendor
vendor.add('lib')
from flask import Flask
app = Flask(__name__)
from api_key import key
@app.route('/get_author/<title>')
def get_author(title):
host = 'https://www.googleapis.com/books/v1/volumes?q={}&key={}&country=US'.format(title, key)
request = urllib2.Request(host)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error:
contents = error.read()
return str(contents)
html = response.read()
author = json.loads(html)['items'][0]['volumeInfo']['authors'][0]
return author
if __name__ == '__main__':
app.run(debug=True)
| # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import json
from google.appengine.ext import vendor
vendor.add('lib')
from flask import Flask
app = Flask(__name__)
from api_key import key
@app.route('/get_author/<title>')
def get_author(title):
host = 'https://www.googleapis.com/books/v1/volumes?q={}&key={}&country=US'.format(title, key)
request = urllib2.Request(host)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error:
contents = error.read()
print ('Received error from Books API {}'.format(contents))
return str(contents)
html = response.read()
author = json.loads(html)['items'][0]['volumeInfo']['authors'][0]
return author
if __name__ == '__main__':
app.run(debug=True)
| Add Error Message To Server | Add Error Message To Server
| Python | apache-2.0 | bshaffer/appengine-python-vm-hello,googlearchive/appengine-python-vm-hello,bshaffer/appengine-python-vm-hello,googlearchive/appengine-python-vm-hello | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import json
from google.appengine.ext import vendor
vendor.add('lib')
from flask import Flask
app = Flask(__name__)
from api_key import key
@app.route('/get_author/<title>')
def get_author(title):
host = 'https://www.googleapis.com/books/v1/volumes?q={}&key={}&country=US'.format(title, key)
request = urllib2.Request(host)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error:
contents = error.read()
print ('Received error from Books API {}'.format(contents))
return str(contents)
html = response.read()
author = json.loads(html)['items'][0]['volumeInfo']['authors'][0]
return author
if __name__ == '__main__':
app.run(debug=True)
| Add Error Message To Server
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import json
from google.appengine.ext import vendor
vendor.add('lib')
from flask import Flask
app = Flask(__name__)
from api_key import key
@app.route('/get_author/<title>')
def get_author(title):
host = 'https://www.googleapis.com/books/v1/volumes?q={}&key={}&country=US'.format(title, key)
request = urllib2.Request(host)
try:
response = urllib2.urlopen(request)
except urllib2.HTTPError, error:
contents = error.read()
return str(contents)
html = response.read()
author = json.loads(html)['items'][0]['volumeInfo']['authors'][0]
return author
if __name__ == '__main__':
app.run(debug=True)
|
c6a65af70acfed68036914b983856e1cbe26a235 | session2/translate_all.py | session2/translate_all.py | import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main() | import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main() | Add option to check among 20 translations | Add option to check among 20 translations
| Python | bsd-3-clause | vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material | import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main() | Add option to check among 20 translations
import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main() |
a8f152e9a6a2db98305ee84dfb5b3be3cee91a84 | us_ignite/apps/management/commands/app_import.py | us_ignite/apps/management/commands/app_import.py | import requests
from django.core.management.base import BaseCommand, CommandError
from us_ignite.apps import importer
class Command(BaseCommand):
help = 'Import the given JSON file.'
def handle(self, url, *args, **options):
response = requests.get(url)
if not response.status_code == 200:
raise CommandError('Issue getting the file %s', response.content)
result = importer.digest_payload(response.json())
print u'%s apps have been imported.' % len(result)
print u'Done!'
| Implement importer as a management command. | Implement importer as a management command.
Heroku has limitations on the ammount of time that a request
should take.
By using a management command the application can workaround
the time it takes to perform the import.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | import requests
from django.core.management.base import BaseCommand, CommandError
from us_ignite.apps import importer
class Command(BaseCommand):
help = 'Import the given JSON file.'
def handle(self, url, *args, **options):
response = requests.get(url)
if not response.status_code == 200:
raise CommandError('Issue getting the file %s', response.content)
result = importer.digest_payload(response.json())
print u'%s apps have been imported.' % len(result)
print u'Done!'
| Implement importer as a management command.
Heroku has limitations on the ammount of time that a request
should take.
By using a management command the application can workaround
the time it takes to perform the import.
|
|
e913ed7d5643c4acc85ed7ec82a70c235053360f | tests/test_token.py | tests/test_token.py | """
NOTE: There are no tests that check for data validation at this point since
the interpreter doesn't have any data validation as a feature.
"""
import pytest
from calc import INTEGER, Token
def test_no_defaults():
# There's no valid defaults at the moment.
with pytest.raises(TypeError):
Token()
def test_known_type():
# There's no valid defaults at the moment.
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_str_non_string_value():
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
| import pytest
from calc import INTEGER, Token
def test_token_cannot_be_instantiated_with_no_defaults():
"""
Test that there are currently no valid defaults for a :class:`Token`. More
simply, ensure that a :class:`Token` cannot be instantiated without any
arguments.
"""
with pytest.raises(TypeError):
Token()
def test_token_can_be_instantiated_with_known_values():
"""
Test that a :class:`Token` with known good initialization values can be
instantiated.
"""
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_token_can_be_converted_to_str():
"""
Test that a :class:`Token` can be converted into a string, even when
instantiated with a non-string value such as an integer.
"""
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
| Improve documentation in token tests. Rename functions to be more clear | Improve documentation in token tests. Rename functions to be more clear
| Python | isc | bike-barn/red-green-refactor | import pytest
from calc import INTEGER, Token
def test_token_cannot_be_instantiated_with_no_defaults():
"""
Test that there are currently no valid defaults for a :class:`Token`. More
simply, ensure that a :class:`Token` cannot be instantiated without any
arguments.
"""
with pytest.raises(TypeError):
Token()
def test_token_can_be_instantiated_with_known_values():
"""
Test that a :class:`Token` with known good initialization values can be
instantiated.
"""
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_token_can_be_converted_to_str():
"""
Test that a :class:`Token` can be converted into a string, even when
instantiated with a non-string value such as an integer.
"""
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
| Improve documentation in token tests. Rename functions to be more clear
"""
NOTE: There are no tests that check for data validation at this point since
the interpreter doesn't have any data validation as a feature.
"""
import pytest
from calc import INTEGER, Token
def test_no_defaults():
# There's no valid defaults at the moment.
with pytest.raises(TypeError):
Token()
def test_known_type():
# There's no valid defaults at the moment.
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_str_non_string_value():
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
|
2d616924f7dc02458bf0b13a396f3f91b039d321 | hub/views.py | hub/views.py | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
@api_view(['GET'])
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if not request.user.is_authenticated():
raise exceptions.NotAuthenticated()
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
| from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
from django.contrib.auth.decorators import login_required
@login_required
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
| Use `login_required` decorator on `switch_builder` view | Use `login_required` decorator on `switch_builder` view
| Python | agpl-3.0 | kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi | from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
from django.contrib.auth.decorators import login_required
@login_required
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
| Use `login_required` decorator on `switch_builder` view
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
@api_view(['GET'])
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if not request.user.is_authenticated():
raise exceptions.NotAuthenticated()
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
|
86678fce3817388641db3d0f4002b3f8d409377d | pdcupdater/tests/handler_tests/test_kerberos_auth.py | pdcupdater/tests/handler_tests/test_kerberos_auth.py | import pytest
import requests_kerberos
from mock import patch, Mock
import pdcupdater.utils
from test.test_support import EnvironmentVarGuard
import os
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url,
'/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
| import os
from mock import patch, Mock
import pdcupdater.utils
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env = patch.dict(
os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
| Remove invalid imports for TestKerberosAuthentication and fix its styling | Remove invalid imports for TestKerberosAuthentication and fix its styling
| Python | lgpl-2.1 | fedora-infra/pdc-updater | import os
from mock import patch, Mock
import pdcupdater.utils
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env = patch.dict(
os.environ, {'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url, '/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
| Remove invalid imports for TestKerberosAuthentication and fix its styling
import pytest
import requests_kerberos
from mock import patch, Mock
import pdcupdater.utils
from test.test_support import EnvironmentVarGuard
import os
class TestKerberosAuthentication(object):
@patch('os.path.exists', return_value=True)
@patch('requests_kerberos.HTTPKerberosAuth')
@patch('requests.get')
def test_get_token(self, requests_get, kerb_auth, os_path):
self.url = 'https://pdc.fedoraproject.org/rest_api/v1/'
set_env=patch.dict(os.environ,{'KRB5_CLIENT_KTNAME': '/etc/foo.keytab'})
requests_rv = Mock()
requests_rv.json.return_value = {"token": "12345"}
requests_get.return_value = requests_rv
set_env.start()
rv = pdcupdater.utils.get_token(self.url,
'/etc/foo.keytab')
set_env.stop()
assert rv == '12345'
|
c37500894b309a691009b87b1305935ee57648cb | tests/test_test.py | tests/test_test.py | import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| Add test text finding that fails | Add test text finding that fails
This indicates that a different method of specifying how and where
to find text within a document is required.
| Python | mit | IATI/IATI-Website-Tests | import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://iatistandard.org/"
, "http://iatistandard.org/202/namespaces-extensions/"
]
text_to_find = [
("technical publishing framework", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
| Add test text finding that fails
This indicates that a different method of specifying how and where
to find text within a document is required.
import pytest
from web_test_base import *
"""
A class to test new features without running all of the tests.
Usage:
py.test tests/test_test.py -rsx
"""
class TestTest(WebTestBase):
urls_to_get = [
"http://aidtransparency.net/"
]
text_to_find = [
("information", '//*[@id="home-strapline"]/h1')
]
def test_locate_text(self, loaded_request, text_to_find):
"""
Tests that each page contains lthe specified text at the required location.
"""
result = self._get_text_from_xpath(loaded_request, text_to_find[1])
assert self._substring_in_list(text_to_find[0], result)
|
48081a925d5b69e18a1f04c74cbe98b590e77c5b | tests/unit/test_pylama_isort.py | tests/unit/test_pylama_isort.py | import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
| import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
def test_skip(self, src_dir, tmpdir):
incorrect = tmpdir.join("incorrect.py")
incorrect.write("# isort: skip_file\nimport b\nimport a\n")
assert not self.instance.run(str(incorrect))
| Add a test for skip functionality | Add a test for skip functionality
| Python | mit | PyCQA/isort,PyCQA/isort | import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
def test_skip(self, src_dir, tmpdir):
incorrect = tmpdir.join("incorrect.py")
incorrect.write("# isort: skip_file\nimport b\nimport a\n")
assert not self.instance.run(str(incorrect))
| Add a test for skip functionality
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
87e3e6b4930b73563027cb0e125ddd9b9c787d6d | api_tests/requests/views/test_request_action_list.py | api_tests/requests/views/test_request_action_list.py | import pytest
from api.base.settings.defaults import API_BASE
from api_tests.requests.mixins import PreprintRequestTestMixin
@pytest.mark.django_db
class TestPreprintRequestActionList(PreprintRequestTestMixin):
def url(self, request):
return '/{}requests/{}/actions/'.format(API_BASE, request._id)
def test_nonmod_cannot_view(self, app, noncontrib, write_contrib, admin, pre_request, post_request, none_request):
for request in [pre_request, post_request, none_request]:
for user in [noncontrib, write_contrib, admin]:
res = app.get(self.url(request), auth=user.auth, expect_errors=True)
assert res.status_code == 403
def test_mod_can_view(self, app, moderator, pre_request, post_request, auto_approved_pre_request):
for request in [pre_request, post_request]:
res = app.get(self.url(request), auth=moderator.auth)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert res.json['data'][0]['attributes']['auto'] is False
res = app.get(self.url(auto_approved_pre_request), auth=moderator.auth)
assert res.status_code == 200
assert len(res.json['data']) == 2
assert res.json['data'][0]['attributes']['auto'] is True
| Add action list permissions tests | Add action list permissions tests
| Python | apache-2.0 | erinspace/osf.io,caseyrollins/osf.io,pattisdr/osf.io,erinspace/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,felliott/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,aaxelb/osf.io,felliott/osf.io,cslzchen/osf.io,mattclark/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,caseyrollins/osf.io,aaxelb/osf.io,saradbowman/osf.io,felliott/osf.io,mfraezz/osf.io,mfraezz/osf.io,saradbowman/osf.io,cslzchen/osf.io,adlius/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,felliott/osf.io,aaxelb/osf.io,cslzchen/osf.io,mfraezz/osf.io,caseyrollins/osf.io,adlius/osf.io,pattisdr/osf.io,mattclark/osf.io,adlius/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,erinspace/osf.io,HalcyonChimera/osf.io | import pytest
from api.base.settings.defaults import API_BASE
from api_tests.requests.mixins import PreprintRequestTestMixin
@pytest.mark.django_db
class TestPreprintRequestActionList(PreprintRequestTestMixin):
def url(self, request):
return '/{}requests/{}/actions/'.format(API_BASE, request._id)
def test_nonmod_cannot_view(self, app, noncontrib, write_contrib, admin, pre_request, post_request, none_request):
for request in [pre_request, post_request, none_request]:
for user in [noncontrib, write_contrib, admin]:
res = app.get(self.url(request), auth=user.auth, expect_errors=True)
assert res.status_code == 403
def test_mod_can_view(self, app, moderator, pre_request, post_request, auto_approved_pre_request):
for request in [pre_request, post_request]:
res = app.get(self.url(request), auth=moderator.auth)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert res.json['data'][0]['attributes']['auto'] is False
res = app.get(self.url(auto_approved_pre_request), auth=moderator.auth)
assert res.status_code == 200
assert len(res.json['data']) == 2
assert res.json['data'][0]['attributes']['auto'] is True
| Add action list permissions tests
|
|
c0a5d8143b87126f78e2c836f9edb5480cb6d317 | setup.py | setup.py | #/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
| #/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
| Add dependency on Django 1.3. | Add dependency on Django 1.3.
| Python | bsd-3-clause | rmaceissoft/django-photologue,jlemaes/django-photologue,seedwithroot/django-photologue-clone,rmaceissoft/django-photologue,rmaceissoft/django-photologue,MathieuDuponchelle/my_patched_photologue,RossLYoung/django-photologue,jlemaes/django-photologue,seedwithroot/django-photologue-clone,jlemaes/django-photologue,MathieuDuponchelle/my_patched_photologue,RossLYoung/django-photologue,RossLYoung/django-photologue | #/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['Django>=1.3', # Change to class-based views means 1.3 minimum.
],
)
| Add dependency on Django 1.3.
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
# Dynamically calculate the version based on photologue.VERSION
version_tuple = __import__('photologue').VERSION
if len(version_tuple) == 3:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name="django-photologue",
version=version,
description="Powerful image management for the Django web framework.",
author="Justin Driscoll, Marcos Daniel Petry, Richard Barran",
author_email="justin@driscolldev.com, marcospetry@gmail.com",
url="https://github.com/jdriscoll/django-photologue",
packages=find_packages(),
package_data={
'photologue': [
'res/*.jpg',
'locale/*/LC_MESSAGES/*',
'templates/photologue/*.html',
'templates/photologue/tags/*.html',
]
},
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
3a414d5d4763802bc4bc506a57c1f487655d470a | engineering_project/estimatedtime.py | engineering_project/estimatedtime.py | #!/usr/bin/env python3
import statistics
class estimatedtime:
def __init__(self, numberofpoints):
self.listoftimes = []
self.points = numberofpoints
def append(self, timeinseconds, inferprogress=True):
# print(timeinseconds)
self.listoftimes.append(timeinseconds)
if inferprogress is True:
self.points -= 1
def ETA(self):
return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
| #!/usr/bin/env python3
import statistics
class ETC:
''' Estimated Time to Completion '''
def __init__(self, numberofpoints):
self.listoftimes = []
self.points = numberofpoints + 1
def append(self, timeinseconds, inferprogress=True):
# print(timeinseconds)
self.listoftimes.append(timeinseconds)
if inferprogress is True:
self.points -= 1
def ETC(self):
return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
| Change estimated time class to ETC | Change estimated time class to ETC
| Python | mit | DavidLutton/EngineeringProject | #!/usr/bin/env python3
import statistics
class ETC:
''' Estimated Time to Completion '''
def __init__(self, numberofpoints):
self.listoftimes = []
self.points = numberofpoints + 1
def append(self, timeinseconds, inferprogress=True):
# print(timeinseconds)
self.listoftimes.append(timeinseconds)
if inferprogress is True:
self.points -= 1
def ETC(self):
return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
| Change estimated time class to ETC
#!/usr/bin/env python3
import statistics
class estimatedtime:
def __init__(self, numberofpoints):
self.listoftimes = []
self.points = numberofpoints
def append(self, timeinseconds, inferprogress=True):
# print(timeinseconds)
self.listoftimes.append(timeinseconds)
if inferprogress is True:
self.points -= 1
def ETA(self):
return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
|
cc21429b99c8dc6a92487081dc8422b16abad85f | zerver/management/commands/dump_messages.py | zerver/management/commands/dump_messages.py | from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import Message, Realm, Stream, Recipient
import datetime
import time
class Command(BaseCommand):
default_cutoff = time.time() - 60 * 60 * 24 * 30 # 30 days.
option_list = BaseCommand.option_list + (
make_option('--domain',
dest='domain',
type='str',
help='The domain whose public streams you want to dump.'),
make_option('--since',
dest='since',
type='int',
default=default_cutoff,
help='The time in epoch since from which to start the dump.')
)
def handle(self, *args, **options):
realm = Realm.objects.get(domain=options["domain"])
streams = Stream.objects.filter(realm=realm, invite_only=False)
recipients = Recipient.objects.filter(
type=Recipient.STREAM, type_id__in=[stream.id for stream in streams])
cutoff = datetime.datetime.fromtimestamp(options["since"])
messages = Message.objects.filter(pub_date__gt=cutoff, recipient__in=recipients)
for message in messages:
print message.to_dict(False)
| Add a management command to dump all messages on public streams for a realm. | Add a management command to dump all messages on public streams for a realm.
(imported from commit f4f8bfece408b466af4db93b2da15cf69b68e0a3)
| Python | apache-2.0 | hengqujushi/zulip,stamhe/zulip,wweiradio/zulip,dattatreya303/zulip,ashwinirudrappa/zulip,ikasumiwt/zulip,DazWorrall/zulip,ipernet/zulip,hj3938/zulip,praveenaki/zulip,hackerkid/zulip,mahim97/zulip,so0k/zulip,zofuthan/zulip,babbage/zulip,saitodisse/zulip,joyhchen/zulip,jackrzhang/zulip,Suninus/zulip,dattatreya303/zulip,mohsenSy/zulip,rht/zulip,calvinleenyc/zulip,proliming/zulip,synicalsyntax/zulip,dnmfarrell/zulip,arpitpanwar/zulip,Suninus/zulip,TigorC/zulip,EasonYi/zulip,RobotCaleb/zulip,armooo/zulip,ipernet/zulip,samatdav/zulip,krtkmj/zulip,arpitpanwar/zulip,seapasulli/zulip,ufosky-server/zulip,Gabriel0402/zulip,Qgap/zulip,tbutter/zulip,hackerkid/zulip,aps-sids/zulip,jimmy54/zulip,seapasulli/zulip,reyha/zulip,samatdav/zulip,akuseru/zulip,easyfmxu/zulip,MayB/zulip,jonesgithub/zulip,noroot/zulip,he15his/zulip,so0k/zulip,LAndreas/zulip,technicalpickles/zulip,guiquanz/zulip,xuxiao/zulip,JPJPJPOPOP/zulip,punchagan/zulip,qq1012803704/zulip,shubhamdhama/zulip,EasonYi/zulip,voidException/zulip,jrowan/zulip,MariaFaBella85/zulip,dotcool/zulip,stamhe/zulip,RobotCaleb/zulip,itnihao/zulip,umkay/zulip,m1ssou/zulip,voidException/zulip,Batterfii/zulip,tdr130/zulip,fw1121/zulip,willingc/zulip,joshisa/zulip,huangkebo/zulip,grave-w-grave/zulip,bitemyapp/zulip,xuxiao/zulip,codeKonami/zulip,gkotian/zulip,dxq-git/zulip,schatt/zulip,gkotian/zulip,rht/zulip,dattatreya303/zulip,m1ssou/zulip,Suninus/zulip,souravbadami/zulip,arpith/zulip,schatt/zulip,wdaher/zulip,peiwei/zulip,eeshangarg/zulip,brockwhittaker/zulip,yocome/zulip,zhaoweigg/zulip,moria/zulip,hj3938/zulip,AZtheAsian/zulip,Cheppers/zulip,developerfm/zulip,alliejones/zulip,Diptanshu8/zulip,Gabriel0402/zulip,Frouk/zulip,hengqujushi/zulip,sup95/zulip,suxinde2009/zulip,joshisa/zulip,dhcrzf/zulip,tdr130/zulip,karamcnair/zulip,sonali0901/zulip,LAndreas/zulip,codeKonami/zulip,amanharitsh123/zulip,Vallher/zulip,wweiradio/zulip,tommyip/zulip,pradiptad/zulip,deer-hope/zulip,lfranchi/zulip,punchagan/zulip,kokoar/zulip,jessedhillon/zulip,PhilSk/zulip,sharmaeklavya2/zulip,KingxBanana/zulip,mansilladev/zulip,Juanvulcano/zulip,stamhe/zulip,tommyip/zulip,sharmaeklavya2/zulip,fw1121/zulip,RobotCaleb/zulip,firstblade/zulip,JPJPJPOPOP/zulip,amallia/zulip,aliceriot/zulip,peguin40/zulip,codeKonami/zulip,hj3938/zulip,huangkebo/zulip,luyifan/zulip,xuanhan863/zulip,vakila/zulip,gigawhitlocks/zulip,schatt/zulip,Frouk/zulip,EasonYi/zulip,levixie/zulip,joshisa/zulip,dotcool/zulip,so0k/zulip,eeshangarg/zulip,ashwinirudrappa/zulip,MayB/zulip,dxq-git/zulip,themass/zulip,guiquanz/zulip,amallia/zulip,Jianchun1/zulip,reyha/zulip,zacps/zulip,praveenaki/zulip,aps-sids/zulip,tommyip/zulip,mdavid/zulip,bowlofstew/zulip,paxapy/zulip,deer-hope/zulip,MariaFaBella85/zulip,hafeez3000/zulip,Drooids/zulip,hengqujushi/zulip,christi3k/zulip,dnmfarrell/zulip,j831/zulip,dotcool/zulip,he15his/zulip,wdaher/zulip,synicalsyntax/zulip,andersk/zulip,bitemyapp/zulip,zorojean/zulip,voidException/zulip,glovebx/zulip,jessedhillon/zulip,qq1012803704/zulip,cosmicAsymmetry/zulip,johnnygaddarr/zulip,wavelets/zulip,TigorC/zulip,akuseru/zulip,lfranchi/zulip,Batterfii/zulip,adnanh/zulip,KJin99/zulip,esander91/zulip,alliejones/zulip,luyifan/zulip,Cheppers/zulip,nicholasbs/zulip,Diptanshu8/zulip,Juanvulcano/zulip,tommyip/zulip,j831/zulip,jerryge/zulip,sup95/zulip,ipernet/zulip,mahim97/zulip,vaidap/zulip,RobotCaleb/zulip,shubhamdhama/zulip,Cheppers/zulip,tiansiyuan/zulip,synicalsyntax/zulip,punchagan/zulip,armooo/zulip,ipernet/zulip,jonesgithub/zulip,babbage/zulip,guiquanz/zulip,thomasboyt/zulip,babbage/zulip,yuvipanda/zulip,hj3938/zulip,tdr130/zulip,aakash-cr7/zulip,showell/zulip,jainayush975/zulip,johnny9/zulip,themass/zulip,jackrzhang/zulip,babbage/zulip,LeeRisk/zulip,Drooids/zulip,zacps/zulip,tdr130/zulip,zacps/zulip,huangkebo/zulip,zulip/zulip,seapasulli/zulip,xuxiao/zulip,lfranchi/zulip,kokoar/zulip,peiwei/zulip,isht3/zulip,hayderimran7/zulip,dnmfarrell/zulip,willingc/zulip,SmartPeople/zulip,ryansnowboarder/zulip,mahim97/zulip,JanzTam/zulip,voidException/zulip,kaiyuanheshang/zulip,gigawhitlocks/zulip,amallia/zulip,tommyip/zulip,Batterfii/zulip,zachallaun/zulip,tommyip/zulip,vabs22/zulip,aliceriot/zulip,easyfmxu/zulip,moria/zulip,sharmaeklavya2/zulip,punchagan/zulip,timabbott/zulip,joshisa/zulip,ericzhou2008/zulip,natanovia/zulip,zorojean/zulip,jonesgithub/zulip,blaze225/zulip,deer-hope/zulip,calvinleenyc/zulip,JPJPJPOPOP/zulip,pradiptad/zulip,vaidap/zulip,Drooids/zulip,aps-sids/zulip,zwily/zulip,zhaoweigg/zulip,vakila/zulip,luyifan/zulip,luyifan/zulip,PhilSk/zulip,ikasumiwt/zulip,ipernet/zulip,ryanbackman/zulip,tdr130/zulip,samatdav/zulip,punchagan/zulip,technicalpickles/zulip,glovebx/zulip,mdavid/zulip,avastu/zulip,m1ssou/zulip,ikasumiwt/zulip,saitodisse/zulip,Galexrt/zulip,hafeez3000/zulip,eeshangarg/zulip,dawran6/zulip,Vallher/zulip,tbutter/zulip,so0k/zulip,suxinde2009/zulip,brainwane/zulip,brockwhittaker/zulip,dwrpayne/zulip,jonesgithub/zulip,natanovia/zulip,zulip/zulip,zulip/zulip,punchagan/zulip,SmartPeople/zulip,yuvipanda/zulip,mansilladev/zulip,glovebx/zulip,pradiptad/zulip,avastu/zulip,souravbadami/zulip,jerryge/zulip,PaulPetring/zulip,saitodisse/zulip,alliejones/zulip,eastlhu/zulip,Gabriel0402/zulip,johnny9/zulip,fw1121/zulip,joyhchen/zulip,yuvipanda/zulip,bluesea/zulip,voidException/zulip,dotcool/zulip,jphilipsen05/zulip,shaunstanislaus/zulip,hafeez3000/zulip,johnnygaddarr/zulip,Jianchun1/zulip,Jianchun1/zulip,jainayush975/zulip,Gabriel0402/zulip,moria/zulip,arpitpanwar/zulip,LAndreas/zulip,vikas-parashar/zulip,Suninus/zulip,moria/zulip,lfranchi/zulip,wweiradio/zulip,mdavid/zulip,krtkmj/zulip,praveenaki/zulip,Gabriel0402/zulip,Vallher/zulip,zwily/zulip,synicalsyntax/zulip,kou/zulip,armooo/zulip,zwily/zulip,developerfm/zulip,firstblade/zulip,amallia/zulip,swinghu/zulip,SmartPeople/zulip,moria/zulip,souravbadami/zulip,AZtheAsian/zulip,bluesea/zulip,esander91/zulip,alliejones/zulip,johnnygaddarr/zulip,ApsOps/zulip,PaulPetring/zulip,j831/zulip,Vallher/zulip,eastlhu/zulip,AZtheAsian/zulip,jeffcao/zulip,ashwinirudrappa/zulip,blaze225/zulip,cosmicAsymmetry/zulip,praveenaki/zulip,kaiyuanheshang/zulip,firstblade/zulip,dwrpayne/zulip,dotcool/zulip,jphilipsen05/zulip,proliming/zulip,jrowan/zulip,DazWorrall/zulip,vabs22/zulip,zachallaun/zulip,ApsOps/zulip,tiansiyuan/zulip,tbutter/zulip,rht/zulip,amallia/zulip,PaulPetring/zulip,hengqujushi/zulip,seapasulli/zulip,bowlofstew/zulip,moria/zulip,aakash-cr7/zulip,reyha/zulip,gigawhitlocks/zulip,lfranchi/zulip,shaunstanislaus/zulip,hafeez3000/zulip,kokoar/zulip,easyfmxu/zulip,grave-w-grave/zulip,hengqujushi/zulip,jeffcao/zulip,tdr130/zulip,vikas-parashar/zulip,hackerkid/zulip,shrikrishnaholla/zulip,hafeez3000/zulip,armooo/zulip,ericzhou2008/zulip,dnmfarrell/zulip,ryanbackman/zulip,ApsOps/zulip,cosmicAsymmetry/zulip,krtkmj/zulip,jimmy54/zulip,shubhamdhama/zulip,cosmicAsymmetry/zulip,andersk/zulip,vaidap/zulip,jphilipsen05/zulip,wangdeshui/zulip,atomic-labs/zulip,amanharitsh123/zulip,eastlhu/zulip,gkotian/zulip,MariaFaBella85/zulip,ryansnowboarder/zulip,susansls/zulip,zulip/zulip,yocome/zulip,wweiradio/zulip,dwrpayne/zulip,sup95/zulip,aakash-cr7/zulip,shaunstanislaus/zulip,tiansiyuan/zulip,levixie/zulip,verma-varsha/zulip,thomasboyt/zulip,bitemyapp/zulip,LAndreas/zulip,shrikrishnaholla/zulip,hayderimran7/zulip,mohsenSy/zulip,hackerkid/zulip,KingxBanana/zulip,nicholasbs/zulip,Frouk/zulip,technicalpickles/zulip,DazWorrall/zulip,thomasboyt/zulip,ipernet/zulip,bitemyapp/zulip,jerryge/zulip,jonesgithub/zulip,peiwei/zulip,codeKonami/zulip,brockwhittaker/zulip,noroot/zulip,nicholasbs/zulip,wangdeshui/zulip,gigawhitlocks/zulip,isht3/zulip,dwrpayne/zulip,mansilladev/zulip,PhilSk/zulip,brockwhittaker/zulip,grave-w-grave/zulip,dnmfarrell/zulip,niftynei/zulip,shrikrishnaholla/zulip,littledogboy/zulip,avastu/zulip,joyhchen/zulip,aliceriot/zulip,ryanbackman/zulip,rishig/zulip,jonesgithub/zulip,kaiyuanheshang/zulip,ericzhou2008/zulip,andersk/zulip,themass/zulip,ahmadassaf/zulip,TigorC/zulip,xuxiao/zulip,vaidap/zulip,pradiptad/zulip,huangkebo/zulip,thomasboyt/zulip,arpith/zulip,itnihao/zulip,vaidap/zulip,luyifan/zulip,wavelets/zulip,dxq-git/zulip,AZtheAsian/zulip,Batterfii/zulip,Batterfii/zulip,shubhamdhama/zulip,deer-hope/zulip,yocome/zulip,codeKonami/zulip,hustlzp/zulip,armooo/zulip,aliceriot/zulip,bluesea/zulip,sonali0901/zulip,xuanhan863/zulip,proliming/zulip,RobotCaleb/zulip,zhaoweigg/zulip,peguin40/zulip,johnnygaddarr/zulip,ikasumiwt/zulip,Juanvulcano/zulip,andersk/zulip,technicalpickles/zulip,bastianh/zulip,Qgap/zulip,bastianh/zulip,SmartPeople/zulip,krtkmj/zulip,reyha/zulip,wweiradio/zulip,calvinleenyc/zulip,ikasumiwt/zulip,joshisa/zulip,rishig/zulip,atomic-labs/zulip,amyliu345/zulip,udxxabp/zulip,Frouk/zulip,thomasboyt/zulip,he15his/zulip,noroot/zulip,kokoar/zulip,jackrzhang/zulip,karamcnair/zulip,gkotian/zulip,brainwane/zulip,wweiradio/zulip,noroot/zulip,willingc/zulip,dawran6/zulip,esander91/zulip,natanovia/zulip,firstblade/zulip,kou/zulip,zorojean/zulip,joshisa/zulip,kou/zulip,KingxBanana/zulip,glovebx/zulip,showell/zulip,gkotian/zulip,TigorC/zulip,JPJPJPOPOP/zulip,yocome/zulip,LeeRisk/zulip,shrikrishnaholla/zulip,PaulPetring/zulip,krtkmj/zulip,kaiyuanheshang/zulip,dawran6/zulip,nicholasbs/zulip,adnanh/zulip,Frouk/zulip,udxxabp/zulip,grave-w-grave/zulip,firstblade/zulip,aps-sids/zulip,bssrdf/zulip,arpith/zulip,sonali0901/zulip,EasonYi/zulip,vikas-parashar/zulip,littledogboy/zulip,guiquanz/zulip,voidException/zulip,j831/zulip,mansilladev/zulip,schatt/zulip,mohsenSy/zulip,bowlofstew/zulip,praveenaki/zulip,wavelets/zulip,udxxabp/zulip,noroot/zulip,yocome/zulip,jimmy54/zulip,mohsenSy/zulip,shaunstanislaus/zulip,Qgap/zulip,ryanbackman/zulip,RobotCaleb/zulip,avastu/zulip,hackerkid/zulip,brockwhittaker/zulip,ApsOps/zulip,atomic-labs/zulip,calvinleenyc/zulip,rishig/zulip,zachallaun/zulip,proliming/zulip,hayderimran7/zulip,brainwane/zulip,littledogboy/zulip,sup95/zulip,tbutter/zulip,mahim97/zulip,bluesea/zulip,esander91/zulip,jrowan/zulip,dnmfarrell/zulip,jainayush975/zulip,voidException/zulip,mohsenSy/zulip,ahmadassaf/zulip,codeKonami/zulip,xuanhan863/zulip,zorojean/zulip,dattatreya303/zulip,shaunstanislaus/zulip,Galexrt/zulip,Drooids/zulip,DazWorrall/zulip,zofuthan/zulip,LAndreas/zulip,dxq-git/zulip,reyha/zulip,developerfm/zulip,shaunstanislaus/zulip,TigorC/zulip,verma-varsha/zulip,niftynei/zulip,saitodisse/zulip,MariaFaBella85/zulip,zulip/zulip,guiquanz/zulip,Drooids/zulip,EasonYi/zulip,calvinleenyc/zulip,he15his/zulip,praveenaki/zulip,paxapy/zulip,natanovia/zulip,ufosky-server/zulip,developerfm/zulip,Suninus/zulip,yuvipanda/zulip,samatdav/zulip,technicalpickles/zulip,shaunstanislaus/zulip,peguin40/zulip,littledogboy/zulip,MayB/zulip,arpitpanwar/zulip,praveenaki/zulip,showell/zulip,wangdeshui/zulip,hackerkid/zulip,ikasumiwt/zulip,swinghu/zulip,aps-sids/zulip,johnny9/zulip,bowlofstew/zulip,MayB/zulip,zwily/zulip,joyhchen/zulip,niftynei/zulip,ashwinirudrappa/zulip,jainayush975/zulip,joyhchen/zulip,arpitpanwar/zulip,kou/zulip,vabs22/zulip,huangkebo/zulip,zacps/zulip,zorojean/zulip,natanovia/zulip,dhcrzf/zulip,hustlzp/zulip,bssrdf/zulip,schatt/zulip,vabs22/zulip,hackerkid/zulip,easyfmxu/zulip,bssrdf/zulip,yocome/zulip,adnanh/zulip,jainayush975/zulip,zofuthan/zulip,easyfmxu/zulip,udxxabp/zulip,bssrdf/zulip,Diptanshu8/zulip,ikasumiwt/zulip,jessedhillon/zulip,zhaoweigg/zulip,akuseru/zulip,developerfm/zulip,wdaher/zulip,wangdeshui/zulip,brainwane/zulip,noroot/zulip,stamhe/zulip,jimmy54/zulip,avastu/zulip,bluesea/zulip,atomic-labs/zulip,blaze225/zulip,SmartPeople/zulip,jainayush975/zulip,mdavid/zulip,karamcnair/zulip,jonesgithub/zulip,KJin99/zulip,KJin99/zulip,hengqujushi/zulip,amallia/zulip,Vallher/zulip,ashwinirudrappa/zulip,timabbott/zulip,shubhamdhama/zulip,technicalpickles/zulip,niftynei/zulip,DazWorrall/zulip,zachallaun/zulip,dwrpayne/zulip,fw1121/zulip,avastu/zulip,wavelets/zulip,levixie/zulip,hustlzp/zulip,peiwei/zulip,mahim97/zulip,kokoar/zulip,Cheppers/zulip,jeffcao/zulip,natanovia/zulip,dhcrzf/zulip,kou/zulip,johnnygaddarr/zulip,vabs22/zulip,isht3/zulip,MayB/zulip,DazWorrall/zulip,luyifan/zulip,paxapy/zulip,qq1012803704/zulip,bowlofstew/zulip,jerryge/zulip,karamcnair/zulip,eeshangarg/zulip,amyliu345/zulip,akuseru/zulip,eastlhu/zulip,grave-w-grave/zulip,amanharitsh123/zulip,ryansnowboarder/zulip,ipernet/zulip,codeKonami/zulip,Cheppers/zulip,MariaFaBella85/zulip,timabbott/zulip,akuseru/zulip,verma-varsha/zulip,hustlzp/zulip,he15his/zulip,ufosky-server/zulip,rht/zulip,LeeRisk/zulip,Qgap/zulip,nicholasbs/zulip,MayB/zulip,bastianh/zulip,developerfm/zulip,littledogboy/zulip,jackrzhang/zulip,karamcnair/zulip,verma-varsha/zulip,dattatreya303/zulip,MariaFaBella85/zulip,jackrzhang/zulip,hustlzp/zulip,zofuthan/zulip,LeeRisk/zulip,glovebx/zulip,itnihao/zulip,brainwane/zulip,PaulPetring/zulip,Gabriel0402/zulip,wdaher/zulip,brainwane/zulip,ApsOps/zulip,timabbott/zulip,ufosky-server/zulip,hafeez3000/zulip,ericzhou2008/zulip,Diptanshu8/zulip,hafeez3000/zulip,AZtheAsian/zulip,udxxabp/zulip,DazWorrall/zulip,zacps/zulip,dxq-git/zulip,bowlofstew/zulip,eastlhu/zulip,tiansiyuan/zulip,showell/zulip,tiansiyuan/zulip,yuvipanda/zulip,cosmicAsymmetry/zulip,j831/zulip,rishig/zulip,vakila/zulip,Galexrt/zulip,Diptanshu8/zulip,jessedhillon/zulip,xuanhan863/zulip,JanzTam/zulip,zachallaun/zulip,xuxiao/zulip,arpith/zulip,mdavid/zulip,bitemyapp/zulip,arpitpanwar/zulip,andersk/zulip,avastu/zulip,ashwinirudrappa/zulip,hj3938/zulip,pradiptad/zulip,zwily/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,guiquanz/zulip,KingxBanana/zulip,jphilipsen05/zulip,Qgap/zulip,mdavid/zulip,sharmaeklavya2/zulip,Diptanshu8/zulip,seapasulli/zulip,easyfmxu/zulip,themass/zulip,karamcnair/zulip,christi3k/zulip,qq1012803704/zulip,SmartPeople/zulip,pradiptad/zulip,christi3k/zulip,jimmy54/zulip,synicalsyntax/zulip,samatdav/zulip,bitemyapp/zulip,dawran6/zulip,amyliu345/zulip,vikas-parashar/zulip,jerryge/zulip,m1ssou/zulip,ericzhou2008/zulip,rht/zulip,swinghu/zulip,dhcrzf/zulip,m1ssou/zulip,armooo/zulip,souravbadami/zulip,isht3/zulip,amallia/zulip,ryansnowboarder/zulip,JPJPJPOPOP/zulip,xuanhan863/zulip,esander91/zulip,verma-varsha/zulip,tdr130/zulip,m1ssou/zulip,yuvipanda/zulip,JPJPJPOPOP/zulip,christi3k/zulip,dawran6/zulip,alliejones/zulip,JanzTam/zulip,zofuthan/zulip,krtkmj/zulip,arpith/zulip,wdaher/zulip,ryanbackman/zulip,jrowan/zulip,andersk/zulip,swinghu/zulip,peiwei/zulip,vikas-parashar/zulip,Juanvulcano/zulip,jerryge/zulip,hj3938/zulip,noroot/zulip,EasonYi/zulip,Drooids/zulip,mahim97/zulip,stamhe/zulip,seapasulli/zulip,jrowan/zulip,nicholasbs/zulip,thomasboyt/zulip,bastianh/zulip,johnny9/zulip,shrikrishnaholla/zulip,Qgap/zulip,littledogboy/zulip,showell/zulip,levixie/zulip,johnny9/zulip,deer-hope/zulip,proliming/zulip,alliejones/zulip,jessedhillon/zulip,Cheppers/zulip,thomasboyt/zulip,Juanvulcano/zulip,niftynei/zulip,zhaoweigg/zulip,dnmfarrell/zulip,bastianh/zulip,MayB/zulip,sonali0901/zulip,sup95/zulip,LeeRisk/zulip,susansls/zulip,umkay/zulip,eeshangarg/zulip,firstblade/zulip,Frouk/zulip,adnanh/zulip,gkotian/zulip,jerryge/zulip,brockwhittaker/zulip,umkay/zulip,LeeRisk/zulip,LAndreas/zulip,PaulPetring/zulip,JanzTam/zulip,moria/zulip,wavelets/zulip,jessedhillon/zulip,reyha/zulip,suxinde2009/zulip,samatdav/zulip,timabbott/zulip,developerfm/zulip,schatt/zulip,blaze225/zulip,jeffcao/zulip,dxq-git/zulip,PhilSk/zulip,tbutter/zulip,umkay/zulip,zachallaun/zulip,udxxabp/zulip,proliming/zulip,yocome/zulip,Qgap/zulip,arpith/zulip,babbage/zulip,hayderimran7/zulip,dxq-git/zulip,paxapy/zulip,jimmy54/zulip,Frouk/zulip,JanzTam/zulip,wdaher/zulip,amanharitsh123/zulip,mansilladev/zulip,zorojean/zulip,zulip/zulip,deer-hope/zulip,themass/zulip,fw1121/zulip,jeffcao/zulip,susansls/zulip,fw1121/zulip,zulip/zulip,vakila/zulip,adnanh/zulip,wweiradio/zulip,showell/zulip,shrikrishnaholla/zulip,swinghu/zulip,synicalsyntax/zulip,sharmaeklavya2/zulip,xuxiao/zulip,johnny9/zulip,willingc/zulip,rishig/zulip,zorojean/zulip,saitodisse/zulip,dotcool/zulip,levixie/zulip,adnanh/zulip,kou/zulip,so0k/zulip,Jianchun1/zulip,grave-w-grave/zulip,niftynei/zulip,susansls/zulip,aps-sids/zulip,vabs22/zulip,yuvipanda/zulip,esander91/zulip,Vallher/zulip,ryansnowboarder/zulip,jimmy54/zulip,Galexrt/zulip,qq1012803704/zulip,mdavid/zulip,stamhe/zulip,udxxabp/zulip,verma-varsha/zulip,sonali0901/zulip,hustlzp/zulip,esander91/zulip,RobotCaleb/zulip,paxapy/zulip,showell/zulip,armooo/zulip,wangdeshui/zulip,j831/zulip,saitodisse/zulip,aliceriot/zulip,glovebx/zulip,amanharitsh123/zulip,wdaher/zulip,joyhchen/zulip,peiwei/zulip,firstblade/zulip,hayderimran7/zulip,amyliu345/zulip,mansilladev/zulip,itnihao/zulip,KJin99/zulip,timabbott/zulip,ryansnowboarder/zulip,xuanhan863/zulip,Galexrt/zulip,Vallher/zulip,atomic-labs/zulip,eeshangarg/zulip,vikas-parashar/zulip,tommyip/zulip,gkotian/zulip,zofuthan/zulip,xuanhan863/zulip,dhcrzf/zulip,ufosky-server/zulip,suxinde2009/zulip,ericzhou2008/zulip,ufosky-server/zulip,Batterfii/zulip,JanzTam/zulip,bowlofstew/zulip,fw1121/zulip,aliceriot/zulip,jphilipsen05/zulip,zwily/zulip,themass/zulip,ryanbackman/zulip,bluesea/zulip,susansls/zulip,kokoar/zulip,wavelets/zulip,peiwei/zulip,vakila/zulip,Cheppers/zulip,vaidap/zulip,johnnygaddarr/zulip,zwily/zulip,shubhamdhama/zulip,jessedhillon/zulip,umkay/zulip,jackrzhang/zulip,m1ssou/zulip,bastianh/zulip,sharmaeklavya2/zulip,willingc/zulip,blaze225/zulip,punchagan/zulip,hengqujushi/zulip,KJin99/zulip,amyliu345/zulip,natanovia/zulip,gigawhitlocks/zulip,guiquanz/zulip,akuseru/zulip,dhcrzf/zulip,LeeRisk/zulip,KJin99/zulip,dwrpayne/zulip,dotcool/zulip,cosmicAsymmetry/zulip,amyliu345/zulip,Gabriel0402/zulip,ApsOps/zulip,he15his/zulip,deer-hope/zulip,zhaoweigg/zulip,huangkebo/zulip,babbage/zulip,peguin40/zulip,mohsenSy/zulip,MariaFaBella85/zulip,umkay/zulip,shrikrishnaholla/zulip,aakash-cr7/zulip,Galexrt/zulip,rht/zulip,zachallaun/zulip,pradiptad/zulip,wangdeshui/zulip,PhilSk/zulip,isht3/zulip,babbage/zulip,bluesea/zulip,tiansiyuan/zulip,levixie/zulip,so0k/zulip,souravbadami/zulip,peguin40/zulip,hustlzp/zulip,schatt/zulip,levixie/zulip,xuxiao/zulip,susansls/zulip,christi3k/zulip,tiansiyuan/zulip,so0k/zulip,zhaoweigg/zulip,seapasulli/zulip,mansilladev/zulip,aliceriot/zulip,rishig/zulip,dawran6/zulip,huangkebo/zulip,lfranchi/zulip,kaiyuanheshang/zulip,willingc/zulip,Batterfii/zulip,johnny9/zulip,ahmadassaf/zulip,technicalpickles/zulip,ufosky-server/zulip,aakash-cr7/zulip,blaze225/zulip,eastlhu/zulip,bssrdf/zulip,wavelets/zulip,bssrdf/zulip,amanharitsh123/zulip,EasonYi/zulip,souravbadami/zulip,Galexrt/zulip,jackrzhang/zulip,itnihao/zulip,aakash-cr7/zulip,synicalsyntax/zulip,Suninus/zulip,suxinde2009/zulip,lfranchi/zulip,PhilSk/zulip,suxinde2009/zulip,isht3/zulip,PaulPetring/zulip,kokoar/zulip,ashwinirudrappa/zulip,timabbott/zulip,he15his/zulip,glovebx/zulip,littledogboy/zulip,ApsOps/zulip,jphilipsen05/zulip,hj3938/zulip,jrowan/zulip,Jianchun1/zulip,luyifan/zulip,ryansnowboarder/zulip,qq1012803704/zulip,eastlhu/zulip,ahmadassaf/zulip,KingxBanana/zulip,themass/zulip,kou/zulip,wangdeshui/zulip,itnihao/zulip,aps-sids/zulip,akuseru/zulip,saitodisse/zulip,JanzTam/zulip,umkay/zulip,tbutter/zulip,Suninus/zulip,Drooids/zulip,KingxBanana/zulip,itnihao/zulip,joshisa/zulip,adnanh/zulip,proliming/zulip,TigorC/zulip,swinghu/zulip,easyfmxu/zulip,ericzhou2008/zulip,qq1012803704/zulip,vakila/zulip,zofuthan/zulip,stamhe/zulip,shubhamdhama/zulip,arpitpanwar/zulip,andersk/zulip,AZtheAsian/zulip,zacps/zulip,Juanvulcano/zulip,dwrpayne/zulip,peguin40/zulip,hayderimran7/zulip,atomic-labs/zulip,Jianchun1/zulip,ahmadassaf/zulip,dattatreya303/zulip,rishig/zulip,kaiyuanheshang/zulip,sup95/zulip,paxapy/zulip,bssrdf/zulip,hayderimran7/zulip,kaiyuanheshang/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,swinghu/zulip,bitemyapp/zulip,krtkmj/zulip,sonali0901/zulip,bastianh/zulip,suxinde2009/zulip,jeffcao/zulip,dhcrzf/zulip,jeffcao/zulip,vakila/zulip,nicholasbs/zulip,calvinleenyc/zulip,LAndreas/zulip,johnnygaddarr/zulip,KJin99/zulip,rht/zulip,eeshangarg/zulip,alliejones/zulip,christi3k/zulip,karamcnair/zulip,tbutter/zulip,atomic-labs/zulip,brainwane/zulip,willingc/zulip | from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import Message, Realm, Stream, Recipient
import datetime
import time
class Command(BaseCommand):
default_cutoff = time.time() - 60 * 60 * 24 * 30 # 30 days.
option_list = BaseCommand.option_list + (
make_option('--domain',
dest='domain',
type='str',
help='The domain whose public streams you want to dump.'),
make_option('--since',
dest='since',
type='int',
default=default_cutoff,
help='The time in epoch since from which to start the dump.')
)
def handle(self, *args, **options):
realm = Realm.objects.get(domain=options["domain"])
streams = Stream.objects.filter(realm=realm, invite_only=False)
recipients = Recipient.objects.filter(
type=Recipient.STREAM, type_id__in=[stream.id for stream in streams])
cutoff = datetime.datetime.fromtimestamp(options["since"])
messages = Message.objects.filter(pub_date__gt=cutoff, recipient__in=recipients)
for message in messages:
print message.to_dict(False)
| Add a management command to dump all messages on public streams for a realm.
(imported from commit f4f8bfece408b466af4db93b2da15cf69b68e0a3)
|
|
31c7be100ed36a39231b302d6306df51375384d1 | setup.py | setup.py | from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
| from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| Add automagic package finding and classifiers. | Add automagic package finding and classifiers.
| Python | bsd-3-clause | amorphic/braubuddy,amorphic/braubuddy,amorphic/braubuddy | from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| Add automagic package finding and classifiers.
from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
|
77f820fe1286a5d39f2704c3821251bcbe20a2ba | indra/tests/test_rlimsp.py | indra/tests/test_rlimsp.py | from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
| from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
for s in stmts:
assert len(s.evidence) == 1, "Wrong amount of evidence."
ev = s.evidence[0]
assert ev.annotations, "Missing annotations."
assert 'agents' in ev.annotations.keys()
assert 'trigger' in ev.annotations.keys()
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
| Make basic test more particular. | Make basic test more particular.
| Python | bsd-2-clause | sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,johnbachman/indra | from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
for s in stmts:
assert len(s.evidence) == 1, "Wrong amount of evidence."
ev = s.evidence[0]
assert ev.annotations, "Missing annotations."
assert 'agents' in ev.annotations.keys()
assert 'trigger' in ev.annotations.keys()
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
| Make basic test more particular.
from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
|
991c6164ac5577ce74754a40a33db878d5cd6a6a | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
name='django-sirtrevor',
version= '0.2.0',
packages=['sirtrevor'],
include_package_data=True,
license='MIT License',
description='A simple Django app that provides a model field and corresponding widget based on the fantastic Sir Trevor project',
long_description=open('README.rst', 'r').read(),
url='https://github.com/philippbosch/django-sirtrevor/',
author='Philipp Bosch',
author_email='hello@pb.io',
install_requires=['markdown2', 'django-appconf', 'django', 'six', 'importlib'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
name='django-sirtrevor',
version= '0.2.1',
packages=['sirtrevor'],
include_package_data=True,
license='MIT License',
description='A simple Django app that provides a model field and corresponding widget based on the fantastic Sir Trevor project',
long_description=open('README.rst', 'r').read(),
url='https://github.com/philippbosch/django-sirtrevor/',
author='Philipp Bosch',
author_email='hello@pb.io',
install_requires=['markdown2', 'django-appconf', 'django', 'six'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| Remove importlib from install_requires because of issues with py3k. This breaks compatibility with Python 2.6, but these users just need to 'pip install importlib' to fix this. | Remove importlib from install_requires because of issues with py3k.
This breaks compatibility with Python 2.6, but these users just need to 'pip install importlib' to fix this.
| Python | mit | zerc/django-sirtrevor,rense/django-sirtrevor,philippbosch/django-sirtrevor,zerc/django-sirtrevor,rense/django-sirtrevor,zerc/django-sirtrevor,zerc/django-sirtrevor,rense/django-sirtrevor,philippbosch/django-sirtrevor,philippbosch/django-sirtrevor,rense/django-sirtrevor | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
name='django-sirtrevor',
version= '0.2.1',
packages=['sirtrevor'],
include_package_data=True,
license='MIT License',
description='A simple Django app that provides a model field and corresponding widget based on the fantastic Sir Trevor project',
long_description=open('README.rst', 'r').read(),
url='https://github.com/philippbosch/django-sirtrevor/',
author='Philipp Bosch',
author_email='hello@pb.io',
install_requires=['markdown2', 'django-appconf', 'django', 'six'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| Remove importlib from install_requires because of issues with py3k.
This breaks compatibility with Python 2.6, but these users just need to 'pip install importlib' to fix this.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
name='django-sirtrevor',
version= '0.2.0',
packages=['sirtrevor'],
include_package_data=True,
license='MIT License',
description='A simple Django app that provides a model field and corresponding widget based on the fantastic Sir Trevor project',
long_description=open('README.rst', 'r').read(),
url='https://github.com/philippbosch/django-sirtrevor/',
author='Philipp Bosch',
author_email='hello@pb.io',
install_requires=['markdown2', 'django-appconf', 'django', 'six', 'importlib'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
c93f4ded0b3eb33a9a06c784963845dd80144989 | setup.py | setup.py | import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
requires = ['pyyaml', 'requests']
setup_requires = ('minimock', 'mock', 'nose', 'pyyaml')
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
install_requires=requires,
setup_requires=setup_requires,
test_suite='nose.collector',
)
| import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
requires = ['pyyaml', 'requests']
setup_requires = ('minimock', 'mock', 'nose', 'pyyaml')
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
install_requires=requires,
setup_requires=setup_requires,
test_suite='nose.collector',
)
| Update trove classifiers with generic language versions | Update trove classifiers with generic language versions
| Python | mit | ghickman/tvrenamr,wintersandroid/tvrenamr | import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
requires = ['pyyaml', 'requests']
setup_requires = ('minimock', 'mock', 'nose', 'pyyaml')
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
install_requires=requires,
setup_requires=setup_requires,
test_suite='nose.collector',
)
| Update trove classifiers with generic language versions
import multiprocessing # noqa # stop tests breaking tox
from setuptools import setup
import tvrenamr
requires = ['pyyaml', 'requests']
setup_requires = ('minimock', 'mock', 'nose', 'pyyaml')
setup(
name=tvrenamr.__title__,
version=tvrenamr.__version__,
description='Rename tv show files using online databases',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGELOG.rst').read(),
author=tvrenamr.__author__,
author_email='george@ghickman.co.uk',
url='http://tvrenamr.info',
license='MIT',
packages=['tvrenamr'],
entry_points={'console_scripts': ['tvr=tvrenamr.frontend:run']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Utilities',
],
install_requires=requires,
setup_requires=setup_requires,
test_suite='nose.collector',
)
|
e8d99b27864d32ad149ffc276dfa78bfdff22c56 | __main__.py | __main__.py | #!/usr/bin/env python3
import token
import lexer as l
import parser as p
import evaluator as e
import context as c
def main(ctx):
string = input(">> ")
string = string.replace("\\n", "\n") + ";"
tokens = l.lex(string)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
# print(program)
print(e.eval(program, ctx))
if __name__ == "__main__":
ctx = c.Context()
while True:
try:
main(ctx)
except (KeyboardInterrupt, EOFError):
print('Goodbye!')
break
| #!/usr/bin/env python3
import token
import lexer as l
import parser as p
import evaluator as e
import context as c
def main(ctx):
string = input("⧫ ")
string = string.replace("\\n", "\n") + ";"
tokens = l.lex(string)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
# print(program)
print(e.eval(program, ctx))
if __name__ == "__main__":
ctx = c.Context()
while True:
try:
main(ctx)
except (KeyboardInterrupt, EOFError):
print('Goodbye!')
break
| Change prompt to a diamond | Change prompt to a diamond
| Python | mit | Zac-Garby/pluto-lang | #!/usr/bin/env python3
import token
import lexer as l
import parser as p
import evaluator as e
import context as c
def main(ctx):
string = input("⧫ ")
string = string.replace("\\n", "\n") + ";"
tokens = l.lex(string)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
# print(program)
print(e.eval(program, ctx))
if __name__ == "__main__":
ctx = c.Context()
while True:
try:
main(ctx)
except (KeyboardInterrupt, EOFError):
print('Goodbye!')
break
| Change prompt to a diamond
#!/usr/bin/env python3
import token
import lexer as l
import parser as p
import evaluator as e
import context as c
def main(ctx):
string = input(">> ")
string = string.replace("\\n", "\n") + ";"
tokens = l.lex(string)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
# print(program)
print(e.eval(program, ctx))
if __name__ == "__main__":
ctx = c.Context()
while True:
try:
main(ctx)
except (KeyboardInterrupt, EOFError):
print('Goodbye!')
break
|
01edb715a7716627fe3c73af74fa3c5bdd30995e | acq4/modules/MultiPatch/tests/test_logfile.py | acq4/modules/MultiPatch/tests/test_logfile.py | import numpy as np
from acq4.modules.MultiPatch.logfile import MultiPatchLog, IrregularTimeSeries
def test_timeseries_index():
ts1 = [
(10, 0.5),
(12, 13.4),
(29.8, 5),
(29.9, 6),
(30.0, 7),
(30.1, 8),
(35, 0),
]
ts2 = [
(10, (0.5, 13.4)),
(12, (13.4, 5)),
(29.8, (5, 0)),
(29.9, (6, -102.7)),
(30.0, (7, 23.)),
(30.1, (8, 0)),
(35, (0, 0)),
]
ts3 = [
(10, 'a'),
(12, 'b'),
(29.8, 'c'),
(29.9, 'd'),
(30.0, 'e'),
(30.1, 'f'),
(35, 'g'),
]
def lookup(t, ts):
# inefficient (but easier to test) method for doing timeseries lookup
# for comparison
low = None
for i,ev in enumerate(ts.events):
if ev[0] <= t:
low = i
else:
break
if low is None:
return None
if low+1 >= len(ts.events) or ts.interpolate is False:
return ts.events[low][1]
else:
t1, v1 = ts.events[low]
t2, v2 = ts.events[low+1]
return ts._interpolate(t, v1, v2, t1, t2)
for tsdata in (ts1, ts2, ts3):
for interp in (True, False):
if interp and isinstance(tsdata[0][1], str):
# don't test interpolation on strings
continue
for res in (0.1, 1.0, 10.0):
ts = IrregularTimeSeries(interpolate=interp, resolution=res)
for t,v in tsdata:
ts[t] = v
for t in np.arange(-1, 40, 0.1):
assert ts[t] == lookup(t, ts)
| Add multipatch logfile unit tests | Add multipatch logfile unit tests
| Python | mit | pbmanis/acq4,meganbkratz/acq4,meganbkratz/acq4,meganbkratz/acq4,acq4/acq4,pbmanis/acq4,pbmanis/acq4,pbmanis/acq4,acq4/acq4,campagnola/acq4,acq4/acq4,campagnola/acq4,acq4/acq4,campagnola/acq4,meganbkratz/acq4,campagnola/acq4 | import numpy as np
from acq4.modules.MultiPatch.logfile import MultiPatchLog, IrregularTimeSeries
def test_timeseries_index():
ts1 = [
(10, 0.5),
(12, 13.4),
(29.8, 5),
(29.9, 6),
(30.0, 7),
(30.1, 8),
(35, 0),
]
ts2 = [
(10, (0.5, 13.4)),
(12, (13.4, 5)),
(29.8, (5, 0)),
(29.9, (6, -102.7)),
(30.0, (7, 23.)),
(30.1, (8, 0)),
(35, (0, 0)),
]
ts3 = [
(10, 'a'),
(12, 'b'),
(29.8, 'c'),
(29.9, 'd'),
(30.0, 'e'),
(30.1, 'f'),
(35, 'g'),
]
def lookup(t, ts):
# inefficient (but easier to test) method for doing timeseries lookup
# for comparison
low = None
for i,ev in enumerate(ts.events):
if ev[0] <= t:
low = i
else:
break
if low is None:
return None
if low+1 >= len(ts.events) or ts.interpolate is False:
return ts.events[low][1]
else:
t1, v1 = ts.events[low]
t2, v2 = ts.events[low+1]
return ts._interpolate(t, v1, v2, t1, t2)
for tsdata in (ts1, ts2, ts3):
for interp in (True, False):
if interp and isinstance(tsdata[0][1], str):
# don't test interpolation on strings
continue
for res in (0.1, 1.0, 10.0):
ts = IrregularTimeSeries(interpolate=interp, resolution=res)
for t,v in tsdata:
ts[t] = v
for t in np.arange(-1, 40, 0.1):
assert ts[t] == lookup(t, ts)
| Add multipatch logfile unit tests
|
|
156093f3b4872d68663897b8525f4706ec5a555c | pyfr/template.py | pyfr/template.py | # -*- coding: utf-8 -*-
import os
import pkgutil
from mako.lookup import TemplateLookup
from mako.template import Template
class DottedTemplateLookup(TemplateLookup):
def __init__(self, pkg):
self.dfltpkg = pkg
def adjust_uri(self, uri, relto):
return uri
def get_template(self, name):
div = name.rfind('.')
# Break apart name into a package and base file name
if div >= 0:
pkg = name[:div]
basename = name[div + 1:]
else:
pkg = self.dfltpkg
basename = name
# Attempt to load the template
try:
tpl = pkgutil.get_data(pkg, basename + '.mako')
return Template(tpl, lookup=self)
except IOError:
raise RuntimeError('Template "{}" not found'.format(name))
| # -*- coding: utf-8 -*-
import os
import pkgutil
from mako.lookup import TemplateLookup
from mako.template import Template
class DottedTemplateLookup(TemplateLookup):
def __init__(self, pkg):
self.dfltpkg = pkg
def adjust_uri(self, uri, relto):
return uri
def get_template(self, name):
div = name.rfind('.')
# Break apart name into a package and base file name
if div >= 0:
pkg = name[:div]
basename = name[div + 1:]
else:
pkg = self.dfltpkg
basename = name
# Attempt to load the template
src = pkgutil.get_data(pkg, basename + '.mako')
if not src:
raise RuntimeError('Template "{}" not found'.format(name))
return Template(src, lookup=self)
| Enhance the dotted name lookup functionality. | Enhance the dotted name lookup functionality.
| Python | bsd-3-clause | tjcorona/PyFR,tjcorona/PyFR,tjcorona/PyFR,BrianVermeire/PyFR,iyer-arvind/PyFR,Aerojspark/PyFR | # -*- coding: utf-8 -*-
import os
import pkgutil
from mako.lookup import TemplateLookup
from mako.template import Template
class DottedTemplateLookup(TemplateLookup):
def __init__(self, pkg):
self.dfltpkg = pkg
def adjust_uri(self, uri, relto):
return uri
def get_template(self, name):
div = name.rfind('.')
# Break apart name into a package and base file name
if div >= 0:
pkg = name[:div]
basename = name[div + 1:]
else:
pkg = self.dfltpkg
basename = name
# Attempt to load the template
src = pkgutil.get_data(pkg, basename + '.mako')
if not src:
raise RuntimeError('Template "{}" not found'.format(name))
return Template(src, lookup=self)
| Enhance the dotted name lookup functionality.
# -*- coding: utf-8 -*-
import os
import pkgutil
from mako.lookup import TemplateLookup
from mako.template import Template
class DottedTemplateLookup(TemplateLookup):
def __init__(self, pkg):
self.dfltpkg = pkg
def adjust_uri(self, uri, relto):
return uri
def get_template(self, name):
div = name.rfind('.')
# Break apart name into a package and base file name
if div >= 0:
pkg = name[:div]
basename = name[div + 1:]
else:
pkg = self.dfltpkg
basename = name
# Attempt to load the template
try:
tpl = pkgutil.get_data(pkg, basename + '.mako')
return Template(tpl, lookup=self)
except IOError:
raise RuntimeError('Template "{}" not found'.format(name))
|
621fc3e10ad296c21a27160a8a1263cf69e3079f | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from pip.req import parse_requirements
from setuptools import setup, find_packages
requirements = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in requirements]
readme = open('README.rst').read()
setup(name='nuts',
version='1.1',
description='A Network Unit Test System',
author='Andreas Stalder, David Meister, Matthias Gabriel, Urs Baumann',
author_email='astalder@hsr.ch, dmeister@hsr.ch, mgabriel@hsr.ch, ubaumann@ins.hsr.ch',
url='https://github.com/HSRNetwork/Nuts',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
license='MIT',
keywords='network testing unit system',
long_description=readme,
install_requires=reqs,
entry_points={
'console_scripts': [
'nuts = nuts.main:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: System :: Networking',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from pip.req import parse_requirements
from setuptools import setup, find_packages
requirements = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in requirements]
readme = open('README.rst').read()
setup(name='nuts',
version='1.1.1',
description='A Network Unit Test System',
author='Andreas Stalder, David Meister, Matthias Gabriel, Urs Baumann',
author_email='astalder@hsr.ch, dmeister@hsr.ch, mgabriel@hsr.ch, ubaumann@ins.hsr.ch',
url='https://github.com/HSRNetwork/Nuts',
packages=find_packages(),
data_files=[('lib/python2.7/site-packages/nuts/service', ['nuts/service/testSchema.yaml'])],
zip_safe=False,
include_package_data=True,
license='MIT',
keywords='network testing unit system',
long_description=readme,
install_requires=reqs,
entry_points={
'console_scripts': [
'nuts = nuts.main:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: System :: Networking',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
)
| Fix missing testSchema in package | Fix missing testSchema in package
| Python | mit | HSRNetwork/Nuts | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from pip.req import parse_requirements
from setuptools import setup, find_packages
requirements = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in requirements]
readme = open('README.rst').read()
setup(name='nuts',
version='1.1.1',
description='A Network Unit Test System',
author='Andreas Stalder, David Meister, Matthias Gabriel, Urs Baumann',
author_email='astalder@hsr.ch, dmeister@hsr.ch, mgabriel@hsr.ch, ubaumann@ins.hsr.ch',
url='https://github.com/HSRNetwork/Nuts',
packages=find_packages(),
data_files=[('lib/python2.7/site-packages/nuts/service', ['nuts/service/testSchema.yaml'])],
zip_safe=False,
include_package_data=True,
license='MIT',
keywords='network testing unit system',
long_description=readme,
install_requires=reqs,
entry_points={
'console_scripts': [
'nuts = nuts.main:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: System :: Networking',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
)
| Fix missing testSchema in package
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from pip.req import parse_requirements
from setuptools import setup, find_packages
requirements = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in requirements]
readme = open('README.rst').read()
setup(name='nuts',
version='1.1',
description='A Network Unit Test System',
author='Andreas Stalder, David Meister, Matthias Gabriel, Urs Baumann',
author_email='astalder@hsr.ch, dmeister@hsr.ch, mgabriel@hsr.ch, ubaumann@ins.hsr.ch',
url='https://github.com/HSRNetwork/Nuts',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
license='MIT',
keywords='network testing unit system',
long_description=readme,
install_requires=reqs,
entry_points={
'console_scripts': [
'nuts = nuts.main:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: System :: Networking',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
)
|
5e8d64bcbb53da0984ac0b41a470417a05c530d7 | microcosm_postgres/factories.py | microcosm_postgres/factories.py | """
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
| """
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
| Rename factory to match what it creates | Rename factory to match what it creates
| Python | apache-2.0 | globality-corp/microcosm-postgres,globality-corp/microcosm-postgres | """
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
| Rename factory to match what it creates
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
55e506489e93bad1d000acd747a272103e789a59 | rml/element.py | rml/element.py | ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| Add support for y field of a pv | Add support for y field of a pv
| Python | apache-2.0 | willrogers/pml,razvanvasile/RML,willrogers/pml | ''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
| Add support for y field of a pv
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
07823ae7f7368f4bc4a4e4436129319f7215150b | faker/utils/distribution.py | faker/utils/distribution.py | # coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
| # coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
| Use random.choices when available for better performance | Use random.choices when available for better performance
| Python | mit | joke2k/faker,joke2k/faker,danhuss/faker | # coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
| Use random.choices when available for better performance
# coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
c26ebf61079fc783d23000ee4e023e1111d8a75e | blog/manage.py | blog/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
if socket.gethostname() == 'blog':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.production")
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local"
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
if socket.gethostname() == 'blog':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Switch settings used to just settings/base.py | Switch settings used to just settings/base.py
| Python | bsd-3-clause | giovannicode/giovanniblog,giovannicode/giovanniblog | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
if socket.gethostname() == 'blog':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.base")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Switch settings used to just settings/base.py
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
if socket.gethostname() == 'blog':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.production")
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local"
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
3b7dcc4d2a19b5ac03eebae35600c25dd038fe33 | tests/test_server.py | tests/test_server.py | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| Test against dictionary, not a string | Test against dictionary, not a string
| Python | bsd-2-clause | algorithmic-music-exploration/amen-server,algorithmic-music-exploration/amen-server | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| Test against dictionary, not a string
import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
|
2ef0571e5468ac72f712a69180fa5dc18652e8d7 | app/applier.py | app/applier.py | import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
| import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
# filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def intersecting(set_1, set_2):
'''Return true if the intersection of the two sets isn't empty, false
otherwise.
'''
return (len(set_1.intersection(set_2)) != 0)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
word_phonemes = set(''.join(words))
return [rule for rule in rules if intersecting(word_phonemes,
set(rule.changes.keys()))]
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
| Implement rule filtering by phoneme. | Implement rule filtering by phoneme.
| Python | mit | kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve | import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
# filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def intersecting(set_1, set_2):
'''Return true if the intersection of the two sets isn't empty, false
otherwise.
'''
return (len(set_1.intersection(set_2)) != 0)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
word_phonemes = set(''.join(words))
return [rule for rule in rules if intersecting(word_phonemes,
set(rule.changes.keys()))]
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
| Implement rule filtering by phoneme.
import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
e2479e3f8748fbfa34c89ecda7d2f3e72e94fa57 | pydata/urls.py | pydata/urls.py | from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^events/import/?$',
views.ConferenceImport.as_view(),
name='event_import'),
url(r'^persons/import/?$',
views.PersonImport.as_view(),
name='person_import'),
url(r'^tasks/import/?$',
views.TaskImport.as_view(),
name='task_import'),
url(r'^sponsorships/import/?$',
views.SponsorshipImport.as_view(),
name='sponsorship_import'),
url(r'^bulk-import/?', include([
url(r'^$',
views.BulkImportEventSelect.as_view(),
name='bulk_import_select'),
url(r'^(?P<slug>[\w-]+)/person/?$',
views.PersonBulkImport.as_view(),
name='bulk_import_person'),
url(r'^(?P<slug>[\w-]+)/task/?$',
views.TaskBulkImport.as_view(),
name='bulk_import_task'),
url(r'^(?P<slug>[\w-]+)/sponsorship/?$',
views.SponsorshipBulkImport.as_view(),
name='bulk_import_sponsorship'),
])),
]
| from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^events/import/?$',
views.ConferenceImport.as_view(),
name='event_import'),
url(r'^persons/import/?$',
views.PersonImport.as_view(),
name='person_import'),
url(r'^tasks/import/?$',
views.TaskImport.as_view(),
name='task_import'),
url(r'^sponsorships/import/?$',
views.SponsorshipImport.as_view(),
name='sponsorship_import'),
url(r'^bulk-import/', include([
url(r'^$',
views.BulkImportEventSelect.as_view(),
name='bulk_import_select'),
url(r'^(?P<slug>[\w-]+)/person/?$',
views.PersonBulkImport.as_view(),
name='bulk_import_person'),
url(r'^(?P<slug>[\w-]+)/task/?$',
views.TaskBulkImport.as_view(),
name='bulk_import_task'),
url(r'^(?P<slug>[\w-]+)/sponsorship/?$',
views.SponsorshipBulkImport.as_view(),
name='bulk_import_sponsorship'),
])),
]
| Fix malformed URLs in bulk import | Fix malformed URLs in bulk import
| Python | mit | swcarpentry/amy,vahtras/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,swcarpentry/amy,vahtras/amy,swcarpentry/amy,vahtras/amy,pbanaszkiewicz/amy | from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^events/import/?$',
views.ConferenceImport.as_view(),
name='event_import'),
url(r'^persons/import/?$',
views.PersonImport.as_view(),
name='person_import'),
url(r'^tasks/import/?$',
views.TaskImport.as_view(),
name='task_import'),
url(r'^sponsorships/import/?$',
views.SponsorshipImport.as_view(),
name='sponsorship_import'),
url(r'^bulk-import/', include([
url(r'^$',
views.BulkImportEventSelect.as_view(),
name='bulk_import_select'),
url(r'^(?P<slug>[\w-]+)/person/?$',
views.PersonBulkImport.as_view(),
name='bulk_import_person'),
url(r'^(?P<slug>[\w-]+)/task/?$',
views.TaskBulkImport.as_view(),
name='bulk_import_task'),
url(r'^(?P<slug>[\w-]+)/sponsorship/?$',
views.SponsorshipBulkImport.as_view(),
name='bulk_import_sponsorship'),
])),
]
| Fix malformed URLs in bulk import
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^events/import/?$',
views.ConferenceImport.as_view(),
name='event_import'),
url(r'^persons/import/?$',
views.PersonImport.as_view(),
name='person_import'),
url(r'^tasks/import/?$',
views.TaskImport.as_view(),
name='task_import'),
url(r'^sponsorships/import/?$',
views.SponsorshipImport.as_view(),
name='sponsorship_import'),
url(r'^bulk-import/?', include([
url(r'^$',
views.BulkImportEventSelect.as_view(),
name='bulk_import_select'),
url(r'^(?P<slug>[\w-]+)/person/?$',
views.PersonBulkImport.as_view(),
name='bulk_import_person'),
url(r'^(?P<slug>[\w-]+)/task/?$',
views.TaskBulkImport.as_view(),
name='bulk_import_task'),
url(r'^(?P<slug>[\w-]+)/sponsorship/?$',
views.SponsorshipBulkImport.as_view(),
name='bulk_import_sponsorship'),
])),
]
|
5beba531b85d719039c2faf371d83d2957cea5c3 | rpifake/__init__.py | rpifake/__init__.py | from __future__ import print_function
import sys
is_active = False
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
import sys
import mock
from .gpio import Gpio as FakeGpio
global is_active
print('Warning, not in RPi, using mock GPIO', file=sys.stderr)
# Idea taken from RPLCD who commented it as being from:
# reddit.com/r/Python/comments/5eddp5/mock_testing_rpigpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Test if we have RPi.GPIO or not
rpi_gpio_exists = False
if sys.version_info < (3,):
import imp
try:
imp.find_module('RPi')
except ImportError:
rpi_gpio_exists = False
else:
import importlib.util
if importlib.util.find_spec('RPi') is not None:
rpi_gpio_exists = False
if not rpi_gpio_exists:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
| from __future__ import print_function
import sys
is_active = False
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
import sys
import mock
from .gpio import Gpio as FakeGpio
global is_active
print('Warning, not in RPi, using mock GPIO', file=sys.stderr)
# Idea taken from RPLCD who commented it as being from:
# reddit.com/r/Python/comments/5eddp5/mock_testing_rpigpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Test if we have RPi.GPIO or not
rpi_gpio_exists = True
if sys.version_info < (3,):
import imp
try:
imp.find_module('RPi')
except ImportError:
rpi_gpio_exists = False
else:
import importlib.util
if importlib.util.find_spec('RPi') is None:
rpi_gpio_exists = False
if not rpi_gpio_exists:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
| Fix bad logic for missing RPi package | Fix bad logic for missing RPi package
| Python | mit | rfarley3/lcd-restful,rfarley3/lcd-restful | from __future__ import print_function
import sys
is_active = False
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
import sys
import mock
from .gpio import Gpio as FakeGpio
global is_active
print('Warning, not in RPi, using mock GPIO', file=sys.stderr)
# Idea taken from RPLCD who commented it as being from:
# reddit.com/r/Python/comments/5eddp5/mock_testing_rpigpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Test if we have RPi.GPIO or not
rpi_gpio_exists = True
if sys.version_info < (3,):
import imp
try:
imp.find_module('RPi')
except ImportError:
rpi_gpio_exists = False
else:
import importlib.util
if importlib.util.find_spec('RPi') is None:
rpi_gpio_exists = False
if not rpi_gpio_exists:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
| Fix bad logic for missing RPi package
from __future__ import print_function
import sys
is_active = False
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
import sys
import mock
from .gpio import Gpio as FakeGpio
global is_active
print('Warning, not in RPi, using mock GPIO', file=sys.stderr)
# Idea taken from RPLCD who commented it as being from:
# reddit.com/r/Python/comments/5eddp5/mock_testing_rpigpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Test if we have RPi.GPIO or not
rpi_gpio_exists = False
if sys.version_info < (3,):
import imp
try:
imp.find_module('RPi')
except ImportError:
rpi_gpio_exists = False
else:
import importlib.util
if importlib.util.find_spec('RPi') is not None:
rpi_gpio_exists = False
if not rpi_gpio_exists:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
|
212aaed11103a9442745715ae88573fa8fcf3a2c | trac/upgrades/db43.py | trac/upgrades/db43.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from trac.upgrades import backup_config_file
from trac.util.text import printout
from trac.util.translation import _
_old_default = ['ReadonlyWikiPolicy', 'DefaultPermissionPolicy',
'LegacyAttachmentPolicy']
_new_default = ['DefaultWikiPolicy', 'DefaultTicketPolicy',
'DefaultPermissionPolicy', 'LegacyAttachmentPolicy']
def do_upgrade(env, version, cursor):
"""Automatically append DefaultWikiPolicy and DefaultTicketPolicy
if permission_policies is the default value. Otherwise, echo a message
about the need to manually add DefaultWikiPolicy and DefaultTicketPolicy
to the list of permission_policies.
"""
policies = env.config.getlist('trac', 'permission_policies')
if policies == _old_default:
backup_config_file(env, '.db43.bak')
env.config.set('trac', 'permission_policies', ', '.join(_new_default))
env.config.save()
env.log.info("Enabled DefaultWikiPolicy and DefaultTicketPolicy.")
else:
print_upgrade_msg = False
if 'DefaultWikiPolicy' not in policies:
env.log.info("DefaultWikiPolicy must be manually enabled.")
# TRANSLATOR: Wrap message to 80 columns
printout(_("""\
Notice: To enable the default permission policy for the *wiki* system,
trac.ini must be manually edited to add DefaultWikiPolicy to the list
of permission_policies in the [trac] section.
"""))
print_upgrade_msg = True
else:
env.log.info("DefaultWikiPolicy already enabled.")
if 'DefaultTicketPolicy' not in policies:
env.log.info("DefaultTicketPolicy must be manually enabled.")
# TRANSLATOR: Wrap message to 80 columns
printout(_("""\
Notice: To enable the default permission policy for the *ticket* system,
trac.ini must be manually edited to add DefaultTicketPolicy to the list
of permission_policies in the [trac] section.
"""))
print_upgrade_msg = True
else:
env.log.info("DefaultTicketPolicy already enabled.")
if print_upgrade_msg:
printout(_("""\
For more details see: http://trac.edgewall.org/wiki/TracUpgrade
"""))
| Add upgrade script missing from r15749 | 1.3.2dev: Add upgrade script missing from r15749
Refs #12719.
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@15765 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac | # -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from trac.upgrades import backup_config_file
from trac.util.text import printout
from trac.util.translation import _
_old_default = ['ReadonlyWikiPolicy', 'DefaultPermissionPolicy',
'LegacyAttachmentPolicy']
_new_default = ['DefaultWikiPolicy', 'DefaultTicketPolicy',
'DefaultPermissionPolicy', 'LegacyAttachmentPolicy']
def do_upgrade(env, version, cursor):
"""Automatically append DefaultWikiPolicy and DefaultTicketPolicy
if permission_policies is the default value. Otherwise, echo a message
about the need to manually add DefaultWikiPolicy and DefaultTicketPolicy
to the list of permission_policies.
"""
policies = env.config.getlist('trac', 'permission_policies')
if policies == _old_default:
backup_config_file(env, '.db43.bak')
env.config.set('trac', 'permission_policies', ', '.join(_new_default))
env.config.save()
env.log.info("Enabled DefaultWikiPolicy and DefaultTicketPolicy.")
else:
print_upgrade_msg = False
if 'DefaultWikiPolicy' not in policies:
env.log.info("DefaultWikiPolicy must be manually enabled.")
# TRANSLATOR: Wrap message to 80 columns
printout(_("""\
Notice: To enable the default permission policy for the *wiki* system,
trac.ini must be manually edited to add DefaultWikiPolicy to the list
of permission_policies in the [trac] section.
"""))
print_upgrade_msg = True
else:
env.log.info("DefaultWikiPolicy already enabled.")
if 'DefaultTicketPolicy' not in policies:
env.log.info("DefaultTicketPolicy must be manually enabled.")
# TRANSLATOR: Wrap message to 80 columns
printout(_("""\
Notice: To enable the default permission policy for the *ticket* system,
trac.ini must be manually edited to add DefaultTicketPolicy to the list
of permission_policies in the [trac] section.
"""))
print_upgrade_msg = True
else:
env.log.info("DefaultTicketPolicy already enabled.")
if print_upgrade_msg:
printout(_("""\
For more details see: http://trac.edgewall.org/wiki/TracUpgrade
"""))
| 1.3.2dev: Add upgrade script missing from r15749
Refs #12719.
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@15765 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
|
d559edb42f7a60958a4861e1cdb504e658f5f279 | python2/setup.py | python2/setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
| Bump version number for Python 3.2-matching release | Bump version number for Python 3.2-matching release
| Python | bsd-2-clause | danielj7/pythonfutures,danielj7/pythonfutures | #!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
| Bump version number for Python 3.2-matching release
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|