commit
stringlengths
40
40
old_file
stringlengths
5
117
new_file
stringlengths
5
117
old_contents
stringlengths
0
1.93k
new_contents
stringlengths
19
3.3k
subject
stringlengths
17
320
message
stringlengths
18
3.28k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
42.4k
completion
stringlengths
19
3.3k
prompt
stringlengths
21
3.65k
0748838525cb2c2ee838da3a3e906ebf8dd25a3b
setup.py
setup.py
from setuptools import setup import curtsies setup(name='curtsies', version=curtsies.__version__, description='Curses-like terminal wrapper, with colored strings!', url='https://github.com/thomasballinger/curtsies', author='Thomas Ballinger', author_email='thomasballinger@gmail.com', license='MIT', packages=['curtsies'], install_requires = [ 'blessings==1.5.1' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python', ], zip_safe=False)
from setuptools import setup import ast import os def version(): """Return version string.""" with open(os.path.join('curtsies', '__init__.py')) as input_file: for line in input_file: if line.startswith('__version__'): return ast.parse(line).body[0].value.s setup(name='curtsies', version=version(), description='Curses-like terminal wrapper, with colored strings!', url='https://github.com/thomasballinger/curtsies', author='Thomas Ballinger', author_email='thomasballinger@gmail.com', license='MIT', packages=['curtsies'], install_requires = [ 'blessings==1.5.1' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python', ], zip_safe=False)
Fix installation, broken since started doing import in __init__
Fix installation, broken since started doing import in __init__ Thanks @myint for the catch and code suggestion
Python
mit
sebastinas/curtsies,thomasballinger/curtsies,spthaolt/curtsies
from setuptools import setup import ast import os def version(): """Return version string.""" with open(os.path.join('curtsies', '__init__.py')) as input_file: for line in input_file: if line.startswith('__version__'): return ast.parse(line).body[0].value.s setup(name='curtsies', version=version(), description='Curses-like terminal wrapper, with colored strings!', url='https://github.com/thomasballinger/curtsies', author='Thomas Ballinger', author_email='thomasballinger@gmail.com', license='MIT', packages=['curtsies'], install_requires = [ 'blessings==1.5.1' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python', ], zip_safe=False)
Fix installation, broken since started doing import in __init__ Thanks @myint for the catch and code suggestion from setuptools import setup import curtsies setup(name='curtsies', version=curtsies.__version__, description='Curses-like terminal wrapper, with colored strings!', url='https://github.com/thomasballinger/curtsies', author='Thomas Ballinger', author_email='thomasballinger@gmail.com', license='MIT', packages=['curtsies'], install_requires = [ 'blessings==1.5.1' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python', ], zip_safe=False)
ee85d2fffc0e42022be66bf667005eb44391cb9e
django/similarities/utils.py
django/similarities/utils.py
import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) return Artist.objects.filter(similarity__other_artist=artist, similarity__weight__gt=0)
from django.db.models import Q import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) similar = Q(similarity__other_artist=artist, similarity__weight__gt=0) return Artist.objects.filter(similar).order_by('-similarity__weight')
Order similar artist results properly
Order similar artist results properly
Python
bsd-3-clause
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
from django.db.models import Q import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) similar = Q(similarity__other_artist=artist, similarity__weight__gt=0) return Artist.objects.filter(similar).order_by('-similarity__weight')
Order similar artist results properly import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) return Artist.objects.filter(similarity__other_artist=artist, similarity__weight__gt=0)
041123e7348cf05dd1432d8550cc497a1995351d
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup import os.path ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) README_FILE = os.path.join(ROOT_DIR, "README.rst") with open(README_FILE) as f: long_description = f.read() setup( name="xutils", version="0.8.2", description="A Fragmentary Python Library.", long_description=long_description, author="xgfone", author_email="xgfone@126.com", maintainer="xgfone", maintainer_email="xgfone@126.com", url="https://github.com/xgfone/xutils", packages=["xutils"], classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], )
try: from setuptools import setup except ImportError: from distutils.core import setup import os.path ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) README_FILE = os.path.join(ROOT_DIR, "README.rst") with open(README_FILE) as f: long_description = f.read() setup( name="xutils", version="0.9", description="A Fragmentary Python Library.", long_description=long_description, author="xgfone", author_email="xgfone@126.com", maintainer="xgfone", maintainer_email="xgfone@126.com", url="https://github.com/xgfone/xutils", packages=["xutils"], classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], )
Set the version to 0.9
Set the version to 0.9
Python
mit
xgfone/xutils,xgfone/pycom
try: from setuptools import setup except ImportError: from distutils.core import setup import os.path ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) README_FILE = os.path.join(ROOT_DIR, "README.rst") with open(README_FILE) as f: long_description = f.read() setup( name="xutils", version="0.9", description="A Fragmentary Python Library.", long_description=long_description, author="xgfone", author_email="xgfone@126.com", maintainer="xgfone", maintainer_email="xgfone@126.com", url="https://github.com/xgfone/xutils", packages=["xutils"], classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], )
Set the version to 0.9 try: from setuptools import setup except ImportError: from distutils.core import setup import os.path ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) README_FILE = os.path.join(ROOT_DIR, "README.rst") with open(README_FILE) as f: long_description = f.read() setup( name="xutils", version="0.8.2", description="A Fragmentary Python Library.", long_description=long_description, author="xgfone", author_email="xgfone@126.com", maintainer="xgfone", maintainer_email="xgfone@126.com", url="https://github.com/xgfone/xutils", packages=["xutils"], classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], )
7be606951b22d77a53274d014cd94aae30af93f5
samples/oauth2_for_devices.py
samples/oauth2_for_devices.py
# -*- coding: utf-8 -*- # See: https://developers.google.com/accounts/docs/OAuth2ForDevices import httplib2 from six.moves import input from oauth2client.client import OAuth2WebServerFlow from googleapiclient.discovery import build CLIENT_ID = "some+client+id" CLIENT_SECRET = "some+client+secret" SCOPES = ("https://www.googleapis.com/auth/youtube",) flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES)) # Step 1: get user code and verification URL # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode flow_info = flow.step1_get_device_and_user_codes() print "Enter the following code at %s: %s" % (flow_info.verification_url, flow_info.user_code) print "Then press Enter." input() # Step 2: get credentials # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken credentials = flow.step2_exchange(device_flow_info=flow_info) print "Access token:", credentials.access_token print "Refresh token:", credentials.refresh_token # Get YouTube service # https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
# -*- coding: utf-8 -*- # See: https://developers.google.com/accounts/docs/OAuth2ForDevices import httplib2 from six.moves import input from oauth2client.client import OAuth2WebServerFlow from googleapiclient.discovery import build CLIENT_ID = "some+client+id" CLIENT_SECRET = "some+client+secret" SCOPES = ("https://www.googleapis.com/auth/youtube",) flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES)) # Step 1: get user code and verification URL # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode flow_info = flow.step1_get_device_and_user_codes() print("Enter the following code at {0}: {1}".format(flow_info.verification_url, flow_info.user_code)) print("Then press Enter.") input() # Step 2: get credentials # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken credentials = flow.step2_exchange(device_flow_info=flow_info) print("Access token: {0}".format(credentials.access_token)) print("Refresh token: {0}".format(credentials.refresh_token)) # Get YouTube service # https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
Fix example to be Python3 compatible, use format()
Fix example to be Python3 compatible, use format() Both print() and format() are compatible from 2.6. Also, format() is much nicer to use for internationalization since you can define the location of your substitutions. It works similarly to Java and .net's format() as well. Great stuff! Should I tackle the other examples as well, or is piece meal all right?
Python
apache-2.0
googleapis/oauth2client,jonparrott/oauth2client,google/oauth2client,jonparrott/oauth2client,clancychilds/oauth2client,googleapis/oauth2client,google/oauth2client,clancychilds/oauth2client
# -*- coding: utf-8 -*- # See: https://developers.google.com/accounts/docs/OAuth2ForDevices import httplib2 from six.moves import input from oauth2client.client import OAuth2WebServerFlow from googleapiclient.discovery import build CLIENT_ID = "some+client+id" CLIENT_SECRET = "some+client+secret" SCOPES = ("https://www.googleapis.com/auth/youtube",) flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES)) # Step 1: get user code and verification URL # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode flow_info = flow.step1_get_device_and_user_codes() print("Enter the following code at {0}: {1}".format(flow_info.verification_url, flow_info.user_code)) print("Then press Enter.") input() # Step 2: get credentials # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken credentials = flow.step2_exchange(device_flow_info=flow_info) print("Access token: {0}".format(credentials.access_token)) print("Refresh token: {0}".format(credentials.refresh_token)) # Get YouTube service # https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
Fix example to be Python3 compatible, use format() Both print() and format() are compatible from 2.6. Also, format() is much nicer to use for internationalization since you can define the location of your substitutions. It works similarly to Java and .net's format() as well. Great stuff! Should I tackle the other examples as well, or is piece meal all right? # -*- coding: utf-8 -*- # See: https://developers.google.com/accounts/docs/OAuth2ForDevices import httplib2 from six.moves import input from oauth2client.client import OAuth2WebServerFlow from googleapiclient.discovery import build CLIENT_ID = "some+client+id" CLIENT_SECRET = "some+client+secret" SCOPES = ("https://www.googleapis.com/auth/youtube",) flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES)) # Step 1: get user code and verification URL # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode flow_info = flow.step1_get_device_and_user_codes() print "Enter the following code at %s: %s" % (flow_info.verification_url, flow_info.user_code) print "Then press Enter." input() # Step 2: get credentials # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken credentials = flow.step2_exchange(device_flow_info=flow_info) print "Access token:", credentials.access_token print "Refresh token:", credentials.refresh_token # Get YouTube service # https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
04182bff7a097b8842073f96bac834abb34f7118
setup.py
setup.py
from setuptools import setup, find_packages long_description = ( open('README.rst').read() + '\n' + open('CHANGES.txt').read()) setup( name='more.static', version='0.10.dev0', description="BowerStatic integration for Morepath", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", keywords='morepath bowerstatic bower', license="BSD", url="http://pypi.python.org/pypi/more.static", namespace_packages=['more'], packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'morepath >= 0.13', 'bowerstatic >= 0.8', ], extras_require=dict( test=[ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14' ], ), )
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) setup( name='more.static', version='0.10.dev0', description="BowerStatic integration for Morepath", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", keywords='morepath bowerstatic bower', license="BSD", url="http://pypi.python.org/pypi/more.static", namespace_packages=['more'], packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'morepath >= 0.13', 'bowerstatic >= 0.8', ], extras_require=dict( test=[ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14' ], ), )
Use io.open with encoding='utf-8' and flake8 compliance
Use io.open with encoding='utf-8' and flake8 compliance
Python
bsd-3-clause
morepath/more.static
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) setup( name='more.static', version='0.10.dev0', description="BowerStatic integration for Morepath", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", keywords='morepath bowerstatic bower', license="BSD", url="http://pypi.python.org/pypi/more.static", namespace_packages=['more'], packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'morepath >= 0.13', 'bowerstatic >= 0.8', ], extras_require=dict( test=[ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14' ], ), )
Use io.open with encoding='utf-8' and flake8 compliance from setuptools import setup, find_packages long_description = ( open('README.rst').read() + '\n' + open('CHANGES.txt').read()) setup( name='more.static', version='0.10.dev0', description="BowerStatic integration for Morepath", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", keywords='morepath bowerstatic bower', license="BSD", url="http://pypi.python.org/pypi/more.static", namespace_packages=['more'], packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'morepath >= 0.13', 'bowerstatic >= 0.8', ], extras_require=dict( test=[ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14' ], ), )
4a817aff14ca6bc9717bd617d5bc49d15e698272
teuthology/orchestra/test/test_console.py
teuthology/orchestra/test/test_console.py
from teuthology.config import config as teuth_config from .. import console class TestConsole(object): pass class TestPhysicalConsole(TestConsole): klass = console.PhysicalConsole def setup(self): teuth_config.ipmi_domain = 'ipmi_domain' teuth_config.ipmi_user = 'ipmi_user' teuth_config.ipmi_password = 'ipmi_pass' self.hostname = 'host' def test_build_command(self): cmd_templ = 'ipmitool -H {h}.{d} -I lanplus -U {u} -P {p} {c}' cons = self.klass( self.hostname, teuth_config.ipmi_user, teuth_config.ipmi_password, teuth_config.ipmi_domain, ) sol_cmd = cons._build_command('sol activate') assert sol_cmd == cmd_templ.format( h=self.hostname, d=teuth_config.ipmi_domain, u=teuth_config.ipmi_user, p=teuth_config.ipmi_password, c='sol activate', ) pc_cmd = cons._build_command('power cycle') assert pc_cmd == sol_cmd.replace('sol activate', 'power cycle')
Add some tests for the console module
Add some tests for the console module ... better late than never? Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
Python
mit
ceph/teuthology,dmick/teuthology,SUSE/teuthology,dmick/teuthology,SUSE/teuthology,ktdreyer/teuthology,dmick/teuthology,ktdreyer/teuthology,ceph/teuthology,SUSE/teuthology
from teuthology.config import config as teuth_config from .. import console class TestConsole(object): pass class TestPhysicalConsole(TestConsole): klass = console.PhysicalConsole def setup(self): teuth_config.ipmi_domain = 'ipmi_domain' teuth_config.ipmi_user = 'ipmi_user' teuth_config.ipmi_password = 'ipmi_pass' self.hostname = 'host' def test_build_command(self): cmd_templ = 'ipmitool -H {h}.{d} -I lanplus -U {u} -P {p} {c}' cons = self.klass( self.hostname, teuth_config.ipmi_user, teuth_config.ipmi_password, teuth_config.ipmi_domain, ) sol_cmd = cons._build_command('sol activate') assert sol_cmd == cmd_templ.format( h=self.hostname, d=teuth_config.ipmi_domain, u=teuth_config.ipmi_user, p=teuth_config.ipmi_password, c='sol activate', ) pc_cmd = cons._build_command('power cycle') assert pc_cmd == sol_cmd.replace('sol activate', 'power cycle')
Add some tests for the console module ... better late than never? Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
c41115875ce46be3eacc1ec7c539010b430b0374
kegg_adapter/kegg.py
kegg_adapter/kegg.py
import urllib2 import json #response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath') #html = response.read() #lines = html.split('\n'); #data = {}; #for line in lines: # parts = line.split('\t'); # if len(parts) >= 2: # data[parts[0]] = parts[1] #json_data = json.dumps(data) #print json_data def search(args): if not 'operation' in args.keys(): exit(1); if not 'argument' in args.keys(): exit(1); url = 'http://rest.kegg.jp/' operation = args['operation'] argument = args['argument'] url+= operation + '/' + argument if 'argument2' in args.keys(): url+= '/' + args['argument2'] if 'option' in args.keys(): url+= '/' + args['option'] response = urllib2.urlopen(url) html = response.read() data = {} if operation == 'find' or operation == 'list'\ or operation == 'link' or operation == 'conv': print "jsonizing" lines = html.split('\n') for line in lines: parts = line.split('\t'); if len(parts) >= 2: data[parts[0]] = parts[1] result = {} result['results'] = data result['args'] = args print json.dumps(result);
import urllib2 import json #response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath') #html = response.read() #lines = html.split('\n'); #data = {}; #for line in lines: # parts = line.split('\t'); # if len(parts) >= 2: # data[parts[0]] = parts[1] #json_data = json.dumps(data) #print json_data def search(args): if not 'operation' in args.keys(): exit(0); if not 'argument' in args.keys(): exit(0); url = 'http://rest.kegg.jp/' operation = args['operation'] argument = args['argument'] url+= operation + '/' + argument if 'argument2' in args.keys(): url+= '/' + args['argument2'] if 'option' in args.keys(): url+= '/' + args['option'] response = urllib2.urlopen(url) html = response.read() data = {} if operation == 'find' or operation == 'list'\ or operation == 'link' or operation == 'conv': lines = html.split('\n') for line in lines: parts = line.split('\t'); if len(parts) >= 2: data[parts[0]] = parts[1] result = {} result['results'] = data result['args'] = args print json.dumps(result);
Remove debugging print statements changed exit status from 1 to 0
Remove debugging print statements changed exit status from 1 to 0
Python
artistic-2.0
Arabidopsis-Information-Portal/Intern-Hello-World,Arabidopsis-Information-Portal/KEGG-Pathway-API
import urllib2 import json #response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath') #html = response.read() #lines = html.split('\n'); #data = {}; #for line in lines: # parts = line.split('\t'); # if len(parts) >= 2: # data[parts[0]] = parts[1] #json_data = json.dumps(data) #print json_data def search(args): if not 'operation' in args.keys(): exit(0); if not 'argument' in args.keys(): exit(0); url = 'http://rest.kegg.jp/' operation = args['operation'] argument = args['argument'] url+= operation + '/' + argument if 'argument2' in args.keys(): url+= '/' + args['argument2'] if 'option' in args.keys(): url+= '/' + args['option'] response = urllib2.urlopen(url) html = response.read() data = {} if operation == 'find' or operation == 'list'\ or operation == 'link' or operation == 'conv': lines = html.split('\n') for line in lines: parts = line.split('\t'); if len(parts) >= 2: data[parts[0]] = parts[1] result = {} result['results'] = data result['args'] = args print json.dumps(result);
Remove debugging print statements changed exit status from 1 to 0 import urllib2 import json #response = urllib2.urlopen('http://rest.kegg.jp/list/pathway/ath') #html = response.read() #lines = html.split('\n'); #data = {}; #for line in lines: # parts = line.split('\t'); # if len(parts) >= 2: # data[parts[0]] = parts[1] #json_data = json.dumps(data) #print json_data def search(args): if not 'operation' in args.keys(): exit(1); if not 'argument' in args.keys(): exit(1); url = 'http://rest.kegg.jp/' operation = args['operation'] argument = args['argument'] url+= operation + '/' + argument if 'argument2' in args.keys(): url+= '/' + args['argument2'] if 'option' in args.keys(): url+= '/' + args['option'] response = urllib2.urlopen(url) html = response.read() data = {} if operation == 'find' or operation == 'list'\ or operation == 'link' or operation == 'conv': print "jsonizing" lines = html.split('\n') for line in lines: parts = line.split('\t'); if len(parts) >= 2: data[parts[0]] = parts[1] result = {} result['results'] = data result['args'] = args print json.dumps(result);
6358f3fb8a3ece53adeb71f9b59f96a5a3a9ca70
examples/system/ulp_adc/example_test.py
examples/system/ulp_adc/example_test.py
from __future__ import unicode_literals from tiny_test_fw import Utility import re import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_ulp_adc(env, extra_data): dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc') dut.start_app() dut.expect_all('Not ULP wakeup', 'Entering deep sleep', timeout=30) for _ in range(5): dut.expect('Deep sleep wakeup', timeout=60) measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10) Utility.console_log('ULP did {} measurements'.format(measurements)) dut.expect('Thresholds: low=1500 high=2000', timeout=5) value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10) Utility.console_log('Value {} was outside the boundaries'.format(value)) dut.expect('Entering deep sleep', timeout=60) if __name__ == '__main__': test_examples_ulp_adc()
from __future__ import unicode_literals from tiny_test_fw import Utility import re import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_ulp_adc(env, extra_data): dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc') dut.start_app() dut.expect_all('Not ULP wakeup', 'Entering deep sleep', timeout=30) for _ in range(5): dut.expect('Deep sleep wakeup', timeout=60) measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0] assert measurements_str is not None measurements = int(measurements_str) Utility.console_log('ULP did {} measurements'.format(measurements)) dut.expect('Thresholds: low=1500 high=2000', timeout=5) value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0] assert value_str is not None value = int(value_str) Utility.console_log('Value {} was outside the boundaries'.format(value)) dut.expect('Entering deep sleep', timeout=60) if __name__ == '__main__': test_examples_ulp_adc()
Fix regex in ulp_adc example test
CI: Fix regex in ulp_adc example test
Python
apache-2.0
espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf
from __future__ import unicode_literals from tiny_test_fw import Utility import re import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_ulp_adc(env, extra_data): dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc') dut.start_app() dut.expect_all('Not ULP wakeup', 'Entering deep sleep', timeout=30) for _ in range(5): dut.expect('Deep sleep wakeup', timeout=60) measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0] assert measurements_str is not None measurements = int(measurements_str) Utility.console_log('ULP did {} measurements'.format(measurements)) dut.expect('Thresholds: low=1500 high=2000', timeout=5) value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0] assert value_str is not None value = int(value_str) Utility.console_log('Value {} was outside the boundaries'.format(value)) dut.expect('Entering deep sleep', timeout=60) if __name__ == '__main__': test_examples_ulp_adc()
CI: Fix regex in ulp_adc example test from __future__ import unicode_literals from tiny_test_fw import Utility import re import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_ulp_adc(env, extra_data): dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc') dut.start_app() dut.expect_all('Not ULP wakeup', 'Entering deep sleep', timeout=30) for _ in range(5): dut.expect('Deep sleep wakeup', timeout=60) measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10) Utility.console_log('ULP did {} measurements'.format(measurements)) dut.expect('Thresholds: low=1500 high=2000', timeout=5) value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10) Utility.console_log('Value {} was outside the boundaries'.format(value)) dut.expect('Entering deep sleep', timeout=60) if __name__ == '__main__': test_examples_ulp_adc()
a565235303e1f2572ed34490e25c7e0f31aba74c
turngeneration/serializers.py
turngeneration/serializers.py
from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from . import models class ContentTypeField(serializers.Field): def to_representation(self, obj): ct = ContentType.objects.get_for_model(obj) return u'{ct.app_label}.{ct.model}'.format(ct=ct) def get_attribute(self, obj): return obj class RealmSerializer(serializers.Serializer): content_type = ContentTypeField() object_id = serializers.IntegerField(source='pk') repr = serializers.CharField(source='__repr__')
from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from . import models class ContentTypeField(serializers.Field): def to_representation(self, value): return u'{value.app_label}.{value.model}'.format(value=value) def to_internal_value(self, data): app_label, model = data.split('.') return ContentType.objects.get_by_natural_key(app_label, model) class ReadOnlyDefault(object): def set_context(self, serializer_field): self.current_value = getattr(serializer_field.parent.instance, serializer_field.name, None) def __call__(self): return self.current_value def __repr__(self): return '%s()' % (self.__class__.__name__,) class GeneratorSerializer(serializers.ModelSerializer): content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault()) object_id = serializers.IntegerField(read_only=True, default=ReadOnlyDefault()) class Meta(object): model = models.Generator fields = ('content_type', 'object_id', 'generating', 'generation_time', 'autogenerate', 'allow_pauses', 'minimum_between_generations') read_only_fields = ('generating', 'generation_time') class RealmSerializer(serializers.Serializer): content_type = serializers.SerializerMethodField() object_id = serializers.IntegerField(source='pk') repr = serializers.CharField(source='__repr__') generator = serializers.SerializerMethodField(required=False) def get_content_type(self, obj): ct = ContentType.objects.get_for_model(obj) return u'{ct.app_label}.{ct.model}'.format(ct=ct) def get_generator(self, obj): ct = ContentType.objects.get_for_model(obj) try: generator = models.Generator.objects.get( content_type=ct, object_id=obj.pk) except models.Generator.DoesNotExist: return None return GeneratorSerializer(generator).data
Support nested generator inside the realm.
Support nested generator inside the realm.
Python
mit
jbradberry/django-turn-generation,jbradberry/django-turn-generation
from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from . import models class ContentTypeField(serializers.Field): def to_representation(self, value): return u'{value.app_label}.{value.model}'.format(value=value) def to_internal_value(self, data): app_label, model = data.split('.') return ContentType.objects.get_by_natural_key(app_label, model) class ReadOnlyDefault(object): def set_context(self, serializer_field): self.current_value = getattr(serializer_field.parent.instance, serializer_field.name, None) def __call__(self): return self.current_value def __repr__(self): return '%s()' % (self.__class__.__name__,) class GeneratorSerializer(serializers.ModelSerializer): content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault()) object_id = serializers.IntegerField(read_only=True, default=ReadOnlyDefault()) class Meta(object): model = models.Generator fields = ('content_type', 'object_id', 'generating', 'generation_time', 'autogenerate', 'allow_pauses', 'minimum_between_generations') read_only_fields = ('generating', 'generation_time') class RealmSerializer(serializers.Serializer): content_type = serializers.SerializerMethodField() object_id = serializers.IntegerField(source='pk') repr = serializers.CharField(source='__repr__') generator = serializers.SerializerMethodField(required=False) def get_content_type(self, obj): ct = ContentType.objects.get_for_model(obj) return u'{ct.app_label}.{ct.model}'.format(ct=ct) def get_generator(self, obj): ct = ContentType.objects.get_for_model(obj) try: generator = models.Generator.objects.get( content_type=ct, object_id=obj.pk) except models.Generator.DoesNotExist: return None return GeneratorSerializer(generator).data
Support nested generator inside the realm. from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from . import models class ContentTypeField(serializers.Field): def to_representation(self, obj): ct = ContentType.objects.get_for_model(obj) return u'{ct.app_label}.{ct.model}'.format(ct=ct) def get_attribute(self, obj): return obj class RealmSerializer(serializers.Serializer): content_type = ContentTypeField() object_id = serializers.IntegerField(source='pk') repr = serializers.CharField(source='__repr__')
7531ed0c9ae25f04884250c84b39a630ae7ef34b
raiden/storage/migrations/v20_to_v21.py
raiden/storage/migrations/v20_to_v21.py
import json from raiden.storage.sqlite import SQLiteStorage SOURCE_VERSION = 20 TARGET_VERSION = 21 def _transform_snapshot(raw_snapshot: str) -> str: snapshot = json.loads(raw_snapshot) for task in snapshot['payment_mapping']['secrethashes_to_task'].values(): if 'raiden.transfer.state.InitiatorTask' in task['_type']: for initiator in task['manager_task']['initiator_transfers'].values(): initiator['transfer_description']['allocated_fee'] = 0 ids_to_addrs = dict() for payment_network in snapshot['identifiers_to_paymentnetworks'].values(): for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values(): ids_to_addrs[payment_network['address']] = token_network['token_address'] snapshot['tokennetworkaddresses_to_paymentnetworkaddresses'] = ids_to_addrs for payment_network in snapshot['identifiers_to_paymentnetworks'].values(): for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values(): for channel_state in token_network['channelidentifiers_to_channels'].values(): channel_state['mediation_fee'] = 0 return json.dumps(snapshot) def _update_snapshots(storage: SQLiteStorage): updated_snapshots_data = [] for snapshot in storage.get_snapshots(): new_snapshot = _transform_snapshot(snapshot.data) updated_snapshots_data.append((new_snapshot, snapshot.identifier)) storage.update_snapshots(updated_snapshots_data) def _update_statechanges(storage: SQLiteStorage): batch_size = 50 batch_query = storage.batch_query_state_changes( batch_size=batch_size, filters=[ ('_type', 'raiden.transfer.state_change.ContractReceiveChannelNew'), ], ) for state_changes_batch in batch_query: for state_change in state_changes_batch: state_change['channel_state']['mediation_fee'] = 0 storage.update_state_changes(state_changes_batch) batch_query = storage.batch_query_state_changes( batch_size=batch_size, filters=[ ('_type', 'raiden.transfer.mediated_transfer.state_change.ActionInitInitiator'), ], ) for state_changes_batch in batch_query: for state_change in state_changes_batch: state_change['transfer']['allocated_fee'] = 0 storage.update_state_changes(state_changes_batch) def upgrade_v19_to_v20( storage: SQLiteStorage, old_version: int, **kwargs, # pylint: disable=unused-argument ) -> int: if old_version == SOURCE_VERSION: _update_snapshots(storage) _update_statechanges(storage) return TARGET_VERSION
Move migration 21 to it's proper file
Move migration 21 to it's proper file
Python
mit
hackaugusto/raiden,hackaugusto/raiden
import json from raiden.storage.sqlite import SQLiteStorage SOURCE_VERSION = 20 TARGET_VERSION = 21 def _transform_snapshot(raw_snapshot: str) -> str: snapshot = json.loads(raw_snapshot) for task in snapshot['payment_mapping']['secrethashes_to_task'].values(): if 'raiden.transfer.state.InitiatorTask' in task['_type']: for initiator in task['manager_task']['initiator_transfers'].values(): initiator['transfer_description']['allocated_fee'] = 0 ids_to_addrs = dict() for payment_network in snapshot['identifiers_to_paymentnetworks'].values(): for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values(): ids_to_addrs[payment_network['address']] = token_network['token_address'] snapshot['tokennetworkaddresses_to_paymentnetworkaddresses'] = ids_to_addrs for payment_network in snapshot['identifiers_to_paymentnetworks'].values(): for token_network in payment_network['tokenidentifiers_to_tokennetworks'].values(): for channel_state in token_network['channelidentifiers_to_channels'].values(): channel_state['mediation_fee'] = 0 return json.dumps(snapshot) def _update_snapshots(storage: SQLiteStorage): updated_snapshots_data = [] for snapshot in storage.get_snapshots(): new_snapshot = _transform_snapshot(snapshot.data) updated_snapshots_data.append((new_snapshot, snapshot.identifier)) storage.update_snapshots(updated_snapshots_data) def _update_statechanges(storage: SQLiteStorage): batch_size = 50 batch_query = storage.batch_query_state_changes( batch_size=batch_size, filters=[ ('_type', 'raiden.transfer.state_change.ContractReceiveChannelNew'), ], ) for state_changes_batch in batch_query: for state_change in state_changes_batch: state_change['channel_state']['mediation_fee'] = 0 storage.update_state_changes(state_changes_batch) batch_query = storage.batch_query_state_changes( batch_size=batch_size, filters=[ ('_type', 'raiden.transfer.mediated_transfer.state_change.ActionInitInitiator'), ], ) for state_changes_batch in batch_query: for state_change in state_changes_batch: state_change['transfer']['allocated_fee'] = 0 storage.update_state_changes(state_changes_batch) def upgrade_v19_to_v20( storage: SQLiteStorage, old_version: int, **kwargs, # pylint: disable=unused-argument ) -> int: if old_version == SOURCE_VERSION: _update_snapshots(storage) _update_statechanges(storage) return TARGET_VERSION
Move migration 21 to it's proper file
5545bd1df34e6d3bb600b78b92d757ea12e3861b
printer/PlatformPhysicsOperation.py
printer/PlatformPhysicsOperation.py
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
Use GroupedOperation for merging PlatformPhyisicsOperation
Use GroupedOperation for merging PlatformPhyisicsOperation
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
Use GroupedOperation for merging PlatformPhyisicsOperation from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._translation = translation def undo(self): pass def redo(self): pass def mergeWith(self, other): if type(other) is AddSceneNodeOperation: other._node.translate(self._translation) return other elif type(other) is TranslateOperation: other._translation += self._translation return other else: return False
b1963f00e5290c11654eefbd24fbce185bbcd8b4
packages/Preferences/define.py
packages/Preferences/define.py
import os _CURRENTPATH = os.path.dirname(os.path.realpath(__file__)) preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg') preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui') version = '0.1.0'
import os _CURRENTPATH = os.path.dirname(os.path.realpath(__file__)) config_name = 'mantle_config.ini' preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg') preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui') version = '0.1.0'
Add config ini file name.
Add config ini file name.
Python
mit
takavfx/Mantle
import os _CURRENTPATH = os.path.dirname(os.path.realpath(__file__)) config_name = 'mantle_config.ini' preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg') preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui') version = '0.1.0'
Add config ini file name. import os _CURRENTPATH = os.path.dirname(os.path.realpath(__file__)) preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg') preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui') version = '0.1.0'
567e12bfb8d0f4e2a4f6fddf0fab9ffbcbf6d49f
requests/_bug.py
requests/_bug.py
"""Module containing bug report helper(s).""" from __future__ import print_function import json import platform import sys import ssl from . import __version__ as requests_version try: from .packages.urllib3.contrib import pyopenssl except ImportError: pyopenssl = None OpenSSL = None cryptography = None else: import OpenSSL import cryptography def _implementation(): """Return a dict with the Python implementation and verison. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. """ implementation = platform.python_implementation() if implementation == 'CPython': implementation_version = platform.python_version() elif implementation == 'PyPy': implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro) if sys.pypy_version_info.releaselevel != 'final': implementation_version = ''.join([ implementation_version, sys.pypy_version_info.releaselevel ]) elif implementation == 'Jython': implementation_version = platform.python_version() # Complete Guess elif implementation == 'IronPython': implementation_version = platform.python_version() # Complete Guess else: implementation_version = 'Unknown' return {'name': implementation, 'version': implementation_version} def information(): """Generate information for a bug report.""" try: platform_info = { 'system': platform.system(), 'release': platform.release(), } except IOError: platform_info = { 'system': 'Unknown', 'release': 'Unknown', } implementation_info = _implementation() pyopenssl_info = { 'version': None, 'openssl_version': '', } if OpenSSL: pyopenssl_info = { 'version': OpenSSL.__version__, 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, } cryptography_info = { 'version': getattr(cryptography, '__version__', ''), } return { 'platform': platform_info, 'implementation': implementation_info, 'system_ssl': { 'version': '%x' % ssl.OPENSSL_VERSION_NUMBER, }, 'using_pyopenssl': pyopenssl is not None, 'pyOpenSSL': pyopenssl_info, 'cryptography': cryptography_info, 'requests': { 'version': requests_version, }, } def print_information(): """Pretty-print the bug information as JSON.""" print(json.dumps(information(), sort_keys=True, indent=2))
Add debugging submodule for bug reporters
Add debugging submodule for bug reporters The suggested usage in a bug report would be python -c 'from requests import _bug; _bug.print_information()' This should generate most of the information we tend to ask for repeatedly from bug reporters.
Python
apache-2.0
psf/requests
"""Module containing bug report helper(s).""" from __future__ import print_function import json import platform import sys import ssl from . import __version__ as requests_version try: from .packages.urllib3.contrib import pyopenssl except ImportError: pyopenssl = None OpenSSL = None cryptography = None else: import OpenSSL import cryptography def _implementation(): """Return a dict with the Python implementation and verison. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. """ implementation = platform.python_implementation() if implementation == 'CPython': implementation_version = platform.python_version() elif implementation == 'PyPy': implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro) if sys.pypy_version_info.releaselevel != 'final': implementation_version = ''.join([ implementation_version, sys.pypy_version_info.releaselevel ]) elif implementation == 'Jython': implementation_version = platform.python_version() # Complete Guess elif implementation == 'IronPython': implementation_version = platform.python_version() # Complete Guess else: implementation_version = 'Unknown' return {'name': implementation, 'version': implementation_version} def information(): """Generate information for a bug report.""" try: platform_info = { 'system': platform.system(), 'release': platform.release(), } except IOError: platform_info = { 'system': 'Unknown', 'release': 'Unknown', } implementation_info = _implementation() pyopenssl_info = { 'version': None, 'openssl_version': '', } if OpenSSL: pyopenssl_info = { 'version': OpenSSL.__version__, 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, } cryptography_info = { 'version': getattr(cryptography, '__version__', ''), } return { 'platform': platform_info, 'implementation': implementation_info, 'system_ssl': { 'version': '%x' % ssl.OPENSSL_VERSION_NUMBER, }, 'using_pyopenssl': pyopenssl is not None, 'pyOpenSSL': pyopenssl_info, 'cryptography': cryptography_info, 'requests': { 'version': requests_version, }, } def print_information(): """Pretty-print the bug information as JSON.""" print(json.dumps(information(), sort_keys=True, indent=2))
Add debugging submodule for bug reporters The suggested usage in a bug report would be python -c 'from requests import _bug; _bug.print_information()' This should generate most of the information we tend to ask for repeatedly from bug reporters.
e561c1354d2f9a550f2b27bb88d8e4d0f3f76203
common/djangoapps/student/management/commands/recover_truncated_anonymous_ids.py
common/djangoapps/student/management/commands/recover_truncated_anonymous_ids.py
""" Generate sql commands to fix truncated anonymous student ids in the ORA database """ import sys from django.core.management.base import NoArgsCommand from student.models import AnonymousUserId, anonymous_id_for_user class Command(NoArgsCommand): help = __doc__ def handle_noargs(self, **options): """ Reads a list of ids (newline separated) from stdin, and dumps sql queries to run on the ORA database to fix those ids from their truncated form to the full 32 character change. The following query will generate the list of ids needed to be fixed from the ORA database: SELECT student_id FROM peer_grading_calibrationhistory WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM controller_submission WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_timing WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_studentcourseprofile WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_studentprofile WHERE LENGTH(student_id) = 16; """ ids = [line.strip() for line in sys.stdin] old_ids = AnonymousUserId.objects.raw( """ SELECT * FROM student_anonymoususerid_temp_archive WHERE anonymous_user_id IN ({}) """.format(','.join(['%s']*len(ids))), ids ) for old_id in old_ids: new_id = anonymous_id_for_user(old_id.user, old_id.course_id) for table in ('peer_grading_calibrationhistory', 'controller_submission', 'metrics_timing'): self.stdout.write( "UPDATE {} " "SET student_id = '{}' " "WHERE student_id = '{}';\n".format( table, new_id, old_id.anonymous_user_id, ) ) self.stdout.write( "DELETE FROM metrics_studentcourseprofile " "WHERE student_id = '{}' " "AND problems_attempted = 0;\n".format(old_id.anonymous_user_id) ) self.stdout.write( "DELETE FROM metrics_studentprofile " "WHERE student_id = '{}' " "AND messages_sent = 0 " "AND messages_received = 0 " "AND average_message_feedback_length = 0 " "AND student_is_staff_banned = 0 " "AND student_cannot_submit_more_for_peer_grading = 0;\n".format(old_id.anonymous_user_id) )
Add managemant command to generate sql to clean up tp truncated student ids in ORA db
Add managemant command to generate sql to clean up tp truncated student ids in ORA db
Python
agpl-3.0
openfun/edx-platform,synergeticsedx/deployment-wipro,shashank971/edx-platform,bigdatauniversity/edx-platform,shabab12/edx-platform,philanthropy-u/edx-platform,openfun/edx-platform,motion2015/edx-platform,ubc/edx-platform,jolyonb/edx-platform,cognitiveclass/edx-platform,ferabra/edx-platform,jswope00/griffinx,proversity-org/edx-platform,ampax/edx-platform,proversity-org/edx-platform,jazztpt/edx-platform,yokose-ks/edx-platform,UXE/local-edx,pabloborrego93/edx-platform,ahmadiga/min_edx,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-1,arifsetiawan/edx-platform,alu042/edx-platform,alexthered/kienhoc-platform,appliedx/edx-platform,IndonesiaX/edx-platform,4eek/edx-platform,ak2703/edx-platform,mahendra-r/edx-platform,LearnEra/LearnEraPlaftform,jswope00/griffinx,zerobatu/edx-platform,doganov/edx-platform,xuxiao19910803/edx-platform,jbassen/edx-platform,nanolearning/edx-platform,mitocw/edx-platform,WatanabeYasumasa/edx-platform,jbassen/edx-platform,abdoosh00/edraak,alexthered/kienhoc-platform,antonve/s4-project-mooc,Edraak/edraak-platform,longmen21/edx-platform,edx/edx-platform,chrisndodge/edx-platform,sameetb-cuelogic/edx-platform-test,cselis86/edx-platform,jamesblunt/edx-platform,hkawasaki/kawasaki-aio8-2,jonathan-beard/edx-platform,hamzehd/edx-platform,jonathan-beard/edx-platform,shubhdev/edx-platform,mushtaqak/edx-platform,romain-li/edx-platform,Stanford-Online/edx-platform,nagyistoce/edx-platform,shubhdev/edx-platform,longmen21/edx-platform,motion2015/edx-platform,AkA84/edx-platform,analyseuc3m/ANALYSE-v1,shurihell/testasia,Stanford-Online/edx-platform,torchingloom/edx-platform,chauhanhardik/populo,bdero/edx-platform,jamesblunt/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,peterm-itr/edx-platform,Unow/edx-platform,franosincic/edx-platform,Edraak/circleci-edx-platform,rhndg/openedx,ahmadiga/min_edx,proversity-org/edx-platform,beni55/edx-platform,etzhou/edx-platform,jswope00/griffinx,olexiim/edx-platform,kamalx/edx-platform,JioEducation/edx-platform,auferack08/edx-platform,doismellburning/edx-platform,utecuy/edx-platform,kmoocdev2/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,chudaol/edx-platform,edx-solutions/edx-platform,JioEducation/edx-platform,10clouds/edx-platform,4eek/edx-platform,alu042/edx-platform,jruiperezv/ANALYSE,vikas1885/test1,chauhanhardik/populo_2,nanolearningllc/edx-platform-cypress,eemirtekin/edx-platform,benpatterson/edx-platform,iivic/BoiseStateX,shubhdev/openedx,etzhou/edx-platform,louyihua/edx-platform,dsajkl/123,shubhdev/openedx,itsjeyd/edx-platform,10clouds/edx-platform,chand3040/cloud_that,CourseTalk/edx-platform,jzoldak/edx-platform,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,unicri/edx-platform,IONISx/edx-platform,motion2015/a3,Semi-global/edx-platform,zadgroup/edx-platform,halvertoluke/edx-platform,motion2015/edx-platform,ovnicraft/edx-platform,J861449197/edx-platform,benpatterson/edx-platform,vasyarv/edx-platform,arifsetiawan/edx-platform,philanthropy-u/edx-platform,romain-li/edx-platform,valtech-mooc/edx-platform,ampax/edx-platform-backup,synergeticsedx/deployment-wipro,zadgroup/edx-platform,ferabra/edx-platform,J861449197/edx-platform,dsajkl/reqiop,mbareta/edx-platform-ft,chrisndodge/edx-platform,morenopc/edx-platform,JioEducation/edx-platform,mjg2203/edx-platform-seas,DNFcode/edx-platform,LICEF/edx-platform,playm2mboy/edx-platform,shurihell/testasia,Shrhawk/edx-platform,unicri/edx-platform,mushtaqak/edx-platform,nikolas/edx-platform,4eek/edx-platform,mushtaqak/edx-platform,Softmotions/edx-platform,don-github/edx-platform,DefyVentures/edx-platform,shubhdev/edx-platform,WatanabeYasumasa/edx-platform,pelikanchik/edx-platform,nttks/jenkins-test,defance/edx-platform,IndonesiaX/edx-platform,edry/edx-platform,arifsetiawan/edx-platform,raccoongang/edx-platform,doismellburning/edx-platform,nanolearning/edx-platform,etzhou/edx-platform,olexiim/edx-platform,motion2015/a3,bdero/edx-platform,JCBarahona/edX,xuxiao19910803/edx,procangroup/edx-platform,torchingloom/edx-platform,hkawasaki/kawasaki-aio8-1,dkarakats/edx-platform,eduNEXT/edunext-platform,ampax/edx-platform-backup,Livit/Livit.Learn.EdX,simbs/edx-platform,zerobatu/edx-platform,ak2703/edx-platform,ubc/edx-platform,jonathan-beard/edx-platform,miptliot/edx-platform,a-parhom/edx-platform,dsajkl/123,carsongee/edx-platform,J861449197/edx-platform,openfun/edx-platform,knehez/edx-platform,jbzdak/edx-platform,hkawasaki/kawasaki-aio8-0,beacloudgenius/edx-platform,Softmotions/edx-platform,unicri/edx-platform,ovnicraft/edx-platform,Semi-global/edx-platform,B-MOOC/edx-platform,louyihua/edx-platform,jswope00/GAI,doganov/edx-platform,beni55/edx-platform,IONISx/edx-platform,IndonesiaX/edx-platform,deepsrijit1105/edx-platform,Edraak/edx-platform,shubhdev/edxOnBaadal,chauhanhardik/populo_2,nttks/edx-platform,SravanthiSinha/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,Kalyzee/edx-platform,hastexo/edx-platform,mjg2203/edx-platform-seas,Stanford-Online/edx-platform,beni55/edx-platform,kmoocdev2/edx-platform,eduNEXT/edx-platform,simbs/edx-platform,playm2mboy/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,hamzehd/edx-platform,kamalx/edx-platform,jazkarta/edx-platform,pomegranited/edx-platform,SravanthiSinha/edx-platform,naresh21/synergetics-edx-platform,procangroup/edx-platform,zofuthan/edx-platform,chand3040/cloud_that,torchingloom/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,MakeHer/edx-platform,OmarIthawi/edx-platform,jamiefolsom/edx-platform,bigdatauniversity/edx-platform,miptliot/edx-platform,Shrhawk/edx-platform,ferabra/edx-platform,apigee/edx-platform,mjirayu/sit_academy,nanolearningllc/edx-platform-cypress,xinjiguaike/edx-platform,nanolearningllc/edx-platform-cypress-2,olexiim/edx-platform,vikas1885/test1,caesar2164/edx-platform,pku9104038/edx-platform,appsembler/edx-platform,leansoft/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,B-MOOC/edx-platform,EDUlib/edx-platform,B-MOOC/edx-platform,don-github/edx-platform,jazkarta/edx-platform-for-isc,eemirtekin/edx-platform,openfun/edx-platform,msegado/edx-platform,motion2015/a3,arbrandes/edx-platform,appliedx/edx-platform,gymnasium/edx-platform,JCBarahona/edX,zhenzhai/edx-platform,shubhdev/edxOnBaadal,JCBarahona/edX,DNFcode/edx-platform,LICEF/edx-platform,xingyepei/edx-platform,polimediaupv/edx-platform,franosincic/edx-platform,pomegranited/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,UOMx/edx-platform,doganov/edx-platform,abdoosh00/edx-rtl-final,polimediaupv/edx-platform,jazkarta/edx-platform,itsjeyd/edx-platform,cselis86/edx-platform,edx/edx-platform,edry/edx-platform,hastexo/edx-platform,sameetb-cuelogic/edx-platform-test,kursitet/edx-platform,jazkarta/edx-platform,morenopc/edx-platform,pku9104038/edx-platform,jjmiranda/edx-platform,doismellburning/edx-platform,LearnEra/LearnEraPlaftform,pomegranited/edx-platform,pepeportela/edx-platform,kamalx/edx-platform,chauhanhardik/populo,xingyepei/edx-platform,nttks/edx-platform,sameetb-cuelogic/edx-platform-test,lduarte1991/edx-platform,miptliot/edx-platform,chand3040/cloud_that,tanmaykm/edx-platform,teltek/edx-platform,mbareta/edx-platform-ft,ahmadio/edx-platform,iivic/BoiseStateX,cecep-edu/edx-platform,TeachAtTUM/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,SivilTaram/edx-platform,Endika/edx-platform,xuxiao19910803/edx-platform,gsehub/edx-platform,beni55/edx-platform,MSOpenTech/edx-platform,antoviaque/edx-platform,10clouds/edx-platform,a-parhom/edx-platform,4eek/edx-platform,MSOpenTech/edx-platform,fintech-circle/edx-platform,arbrandes/edx-platform,yokose-ks/edx-platform,jswope00/GAI,angelapper/edx-platform,vismartltd/edx-platform,franosincic/edx-platform,martynovp/edx-platform,jamiefolsom/edx-platform,nikolas/edx-platform,antonve/s4-project-mooc,stvstnfrd/edx-platform,nttks/edx-platform,carsongee/edx-platform,prarthitm/edxplatform,nagyistoce/edx-platform,Edraak/circleci-edx-platform,vasyarv/edx-platform,angelapper/edx-platform,rue89-tech/edx-platform,waheedahmed/edx-platform,alu042/edx-platform,vismartltd/edx-platform,mitocw/edx-platform,Edraak/circleci-edx-platform,jbassen/edx-platform,Edraak/edraak-platform,atsolakid/edx-platform,zhenzhai/edx-platform,sudheerchintala/LearnEraPlatForm,ovnicraft/edx-platform,teltek/edx-platform,torchingloom/edx-platform,kursitet/edx-platform,jbassen/edx-platform,jazkarta/edx-platform-for-isc,dcosentino/edx-platform,valtech-mooc/edx-platform,devs1991/test_edx_docmode,mbareta/edx-platform-ft,LearnEra/LearnEraPlaftform,alexthered/kienhoc-platform,cpennington/edx-platform,fly19890211/edx-platform,doganov/edx-platform,CredoReference/edx-platform,shubhdev/openedx,chudaol/edx-platform,atsolakid/edx-platform,mahendra-r/edx-platform,deepsrijit1105/edx-platform,alu042/edx-platform,simbs/edx-platform,dkarakats/edx-platform,zubair-arbi/edx-platform,DefyVentures/edx-platform,DNFcode/edx-platform,knehez/edx-platform,andyzsf/edx,antonve/s4-project-mooc,J861449197/edx-platform,simbs/edx-platform,Livit/Livit.Learn.EdX,jelugbo/tundex,utecuy/edx-platform,rue89-tech/edx-platform,appliedx/edx-platform,edry/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,SravanthiSinha/edx-platform,peterm-itr/edx-platform,J861449197/edx-platform,apigee/edx-platform,wwj718/ANALYSE,pomegranited/edx-platform,procangroup/edx-platform,rhndg/openedx,mjg2203/edx-platform-seas,LICEF/edx-platform,beacloudgenius/edx-platform,eduNEXT/edunext-platform,ahmadio/edx-platform,ferabra/edx-platform,dsajkl/reqiop,TeachAtTUM/edx-platform,inares/edx-platform,wwj718/edx-platform,TeachAtTUM/edx-platform,Ayub-Khan/edx-platform,LICEF/edx-platform,antoviaque/edx-platform,longmen21/edx-platform,defance/edx-platform,fintech-circle/edx-platform,SivilTaram/edx-platform,leansoft/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,zerobatu/edx-platform,ahmedaljazzar/edx-platform,franosincic/edx-platform,xingyepei/edx-platform,ampax/edx-platform,kmoocdev2/edx-platform,DefyVentures/edx-platform,prarthitm/edxplatform,cognitiveclass/edx-platform,adoosii/edx-platform,Edraak/edx-platform,carsongee/edx-platform,msegado/edx-platform,jazkarta/edx-platform,jswope00/GAI,inares/edx-platform,motion2015/a3,eestay/edx-platform,Endika/edx-platform,pabloborrego93/edx-platform,kursitet/edx-platform,abdoosh00/edx-rtl-final,peterm-itr/edx-platform,jonathan-beard/edx-platform,itsjeyd/edx-platform,peterm-itr/edx-platform,wwj718/ANALYSE,wwj718/edx-platform,jelugbo/tundex,amir-qayyum-khan/edx-platform,eduNEXT/edunext-platform,jbzdak/edx-platform,prarthitm/edxplatform,y12uc231/edx-platform,xuxiao19910803/edx,appliedx/edx-platform,kamalx/edx-platform,jamesblunt/edx-platform,bigdatauniversity/edx-platform,MakeHer/edx-platform,polimediaupv/edx-platform,rhndg/openedx,jamiefolsom/edx-platform,MakeHer/edx-platform,AkA84/edx-platform,atsolakid/edx-platform,Kalyzee/edx-platform,Lektorium-LLC/edx-platform,xuxiao19910803/edx-platform,IONISx/edx-platform,mcgachey/edx-platform,mjirayu/sit_academy,jazztpt/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,jzoldak/edx-platform,lduarte1991/edx-platform,procangroup/edx-platform,shubhdev/openedx,motion2015/edx-platform,cselis86/edx-platform,chand3040/cloud_that,hamzehd/edx-platform,tiagochiavericosta/edx-platform,zubair-arbi/edx-platform,teltek/edx-platform,y12uc231/edx-platform,nttks/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,nikolas/edx-platform,shubhdev/edxOnBaadal,utecuy/edx-platform,jazztpt/edx-platform,zubair-arbi/edx-platform,ahmadio/edx-platform,antonve/s4-project-mooc,shashank971/edx-platform,jelugbo/tundex,lduarte1991/edx-platform,waheedahmed/edx-platform,UOMx/edx-platform,dcosentino/edx-platform,benpatterson/edx-platform,sudheerchintala/LearnEraPlatForm,SivilTaram/edx-platform,inares/edx-platform,wwj718/edx-platform,pelikanchik/edx-platform,mbareta/edx-platform-ft,kmoocdev/edx-platform,kxliugang/edx-platform,benpatterson/edx-platform,eduNEXT/edx-platform,pelikanchik/edx-platform,pku9104038/edx-platform,synergeticsedx/deployment-wipro,IONISx/edx-platform,motion2015/edx-platform,JCBarahona/edX,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress-2,jazztpt/edx-platform,UOMx/edx-platform,DNFcode/edx-platform,kxliugang/edx-platform,iivic/BoiseStateX,apigee/edx-platform,Endika/edx-platform,JCBarahona/edX,EDUlib/edx-platform,chudaol/edx-platform,beni55/edx-platform,rismalrv/edx-platform,xinjiguaike/edx-platform,mcgachey/edx-platform,MSOpenTech/edx-platform,dkarakats/edx-platform,nttks/jenkins-test,analyseuc3m/ANALYSE-v1,nikolas/edx-platform,dsajkl/reqiop,jolyonb/edx-platform,solashirai/edx-platform,nikolas/edx-platform,y12uc231/edx-platform,zofuthan/edx-platform,shashank971/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,halvertoluke/edx-platform,itsjeyd/edx-platform,Unow/edx-platform,kxliugang/edx-platform,antoviaque/edx-platform,bitifirefly/edx-platform,CredoReference/edx-platform,shurihell/testasia,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,SravanthiSinha/edx-platform,olexiim/edx-platform,ovnicraft/edx-platform,jelugbo/tundex,bdero/edx-platform,kamalx/edx-platform,abdoosh00/edx-rtl-final,mjg2203/edx-platform-seas,proversity-org/edx-platform,polimediaupv/edx-platform,morenopc/edx-platform,jbzdak/edx-platform,SravanthiSinha/edx-platform,leansoft/edx-platform,hamzehd/edx-platform,DefyVentures/edx-platform,pku9104038/edx-platform,caesar2164/edx-platform,TeachAtTUM/edx-platform,halvertoluke/edx-platform,edx-solutions/edx-platform,zofuthan/edx-platform,andyzsf/edx,fly19890211/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,jbzdak/edx-platform,cognitiveclass/edx-platform,jelugbo/tundex,angelapper/edx-platform,SivilTaram/edx-platform,defance/edx-platform,nanolearningllc/edx-platform-cypress,zofuthan/edx-platform,xuxiao19910803/edx,angelapper/edx-platform,naresh21/synergetics-edx-platform,pepeportela/edx-platform,nanolearningllc/edx-platform-cypress-2,Semi-global/edx-platform,iivic/BoiseStateX,y12uc231/edx-platform,a-parhom/edx-platform,ampax/edx-platform-backup,hkawasaki/kawasaki-aio8-0,Ayub-Khan/edx-platform,msegado/edx-platform,wwj718/edx-platform,jswope00/GAI,shubhdev/edxOnBaadal,CredoReference/edx-platform,waheedahmed/edx-platform,edx/edx-platform,ampax/edx-platform,rue89-tech/edx-platform,cecep-edu/edx-platform,jbzdak/edx-platform,shabab12/edx-platform,simbs/edx-platform,Shrhawk/edx-platform,andyzsf/edx,playm2mboy/edx-platform,halvertoluke/edx-platform,Unow/edx-platform,leansoft/edx-platform,inares/edx-platform,arifsetiawan/edx-platform,jazkarta/edx-platform-for-isc,jolyonb/edx-platform,pabloborrego93/edx-platform,playm2mboy/edx-platform,edx/edx-platform,abdoosh00/edraak,mushtaqak/edx-platform,chand3040/cloud_that,msegado/edx-platform,hkawasaki/kawasaki-aio8-0,Kalyzee/edx-platform,utecuy/edx-platform,hmcmooc/muddx-platform,chauhanhardik/populo_2,ESOedX/edx-platform,zerobatu/edx-platform,shurihell/testasia,rue89-tech/edx-platform,MSOpenTech/edx-platform,don-github/edx-platform,UOMx/edx-platform,devs1991/test_edx_docmode,vasyarv/edx-platform,jazztpt/edx-platform,tiagochiavericosta/edx-platform,louyihua/edx-platform,dcosentino/edx-platform,hmcmooc/muddx-platform,IONISx/edx-platform,rismalrv/edx-platform,RPI-OPENEDX/edx-platform,fintech-circle/edx-platform,mitocw/edx-platform,raccoongang/edx-platform,shubhdev/edx-platform,vikas1885/test1,RPI-OPENEDX/edx-platform,DefyVentures/edx-platform,shubhdev/edxOnBaadal,etzhou/edx-platform,antonve/s4-project-mooc,nttks/jenkins-test,jruiperezv/ANALYSE,cecep-edu/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,dkarakats/edx-platform,jswope00/griffinx,UXE/local-edx,CredoReference/edx-platform,cyanna/edx-platform,martynovp/edx-platform,mjirayu/sit_academy,fly19890211/edx-platform,Edraak/edx-platform,Shrhawk/edx-platform,ahmedaljazzar/edx-platform,bdero/edx-platform,zadgroup/edx-platform,dsajkl/123,rismalrv/edx-platform,hkawasaki/kawasaki-aio8-2,gsehub/edx-platform,JioEducation/edx-platform,philanthropy-u/edx-platform,mtlchun/edx,jzoldak/edx-platform,ahmedaljazzar/edx-platform,jbassen/edx-platform,jamesblunt/edx-platform,ahmadiga/min_edx,analyseuc3m/ANALYSE-v1,AkA84/edx-platform,gymnasium/edx-platform,nttks/edx-platform,stvstnfrd/edx-platform,10clouds/edx-platform,dcosentino/edx-platform,dsajkl/123,beacloudgenius/edx-platform,AkA84/edx-platform,openfun/edx-platform,raccoongang/edx-platform,kmoocdev/edx-platform,nanolearning/edx-platform,motion2015/a3,Semi-global/edx-platform,vikas1885/test1,hmcmooc/muddx-platform,nttks/jenkins-test,rue89-tech/edx-platform,ak2703/edx-platform,eestay/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress,tanmaykm/edx-platform,4eek/edx-platform,yokose-ks/edx-platform,romain-li/edx-platform,rhndg/openedx,ampax/edx-platform-backup,ubc/edx-platform,bigdatauniversity/edx-platform,fly19890211/edx-platform,bitifirefly/edx-platform,nanolearning/edx-platform,zerobatu/edx-platform,ubc/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,marcore/edx-platform,RPI-OPENEDX/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,hkawasaki/kawasaki-aio8-1,WatanabeYasumasa/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadio/edx-platform,bigdatauniversity/edx-platform,gymnasium/edx-platform,pepeportela/edx-platform,carsongee/edx-platform,yokose-ks/edx-platform,cpennington/edx-platform,polimediaupv/edx-platform,kxliugang/edx-platform,jazkarta/edx-platform-for-isc,rhndg/openedx,zadgroup/edx-platform,auferack08/edx-platform,vikas1885/test1,defance/edx-platform,edry/edx-platform,appliedx/edx-platform,atsolakid/edx-platform,naresh21/synergetics-edx-platform,Ayub-Khan/edx-platform,alexthered/kienhoc-platform,playm2mboy/edx-platform,etzhou/edx-platform,zhenzhai/edx-platform,nanolearningllc/edx-platform-cypress,marcore/edx-platform,pabloborrego93/edx-platform,wwj718/edx-platform,miptliot/edx-platform,rismalrv/edx-platform,sameetb-cuelogic/edx-platform-test,caesar2164/edx-platform,shubhdev/openedx,halvertoluke/edx-platform,mtlchun/edx,tanmaykm/edx-platform,doismellburning/edx-platform,appsembler/edx-platform,marcore/edx-platform,mcgachey/edx-platform,DNFcode/edx-platform,cecep-edu/edx-platform,bitifirefly/edx-platform,BehavioralInsightsTeam/edx-platform,kmoocdev/edx-platform,vismartltd/edx-platform,kmoocdev2/edx-platform,valtech-mooc/edx-platform,fly19890211/edx-platform,kursitet/edx-platform,fintech-circle/edx-platform,pepeportela/edx-platform,IndonesiaX/edx-platform,atsolakid/edx-platform,UXE/local-edx,Softmotions/edx-platform,cyanna/edx-platform,gymnasium/edx-platform,xuxiao19910803/edx-platform,CourseTalk/edx-platform,caesar2164/edx-platform,Livit/Livit.Learn.EdX,Kalyzee/edx-platform,jonathan-beard/edx-platform,eemirtekin/edx-platform,Lektorium-LLC/edx-platform,xinjiguaike/edx-platform,cselis86/edx-platform,Edraak/edx-platform,Edraak/edraak-platform,ZLLab-Mooc/edx-platform,beacloudgenius/edx-platform,sudheerchintala/LearnEraPlatForm,vasyarv/edx-platform,Edraak/edraak-platform,morenopc/edx-platform,solashirai/edx-platform,antoviaque/edx-platform,cognitiveclass/edx-platform,valtech-mooc/edx-platform,y12uc231/edx-platform,shubhdev/edx-platform,MakeHer/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-1,chauhanhardik/populo,mitocw/edx-platform,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,kmoocdev/edx-platform,Unow/edx-platform,jamesblunt/edx-platform,gsehub/edx-platform,zhenzhai/edx-platform,devs1991/test_edx_docmode,tiagochiavericosta/edx-platform,Stanford-Online/edx-platform,eduNEXT/edx-platform,xuxiao19910803/edx,jamiefolsom/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,xingyepei/edx-platform,jzoldak/edx-platform,martynovp/edx-platform,chrisndodge/edx-platform,nanolearning/edx-platform,philanthropy-u/edx-platform,jjmiranda/edx-platform,mtlchun/edx,nanolearningllc/edx-platform-cypress-2,hastexo/edx-platform,olexiim/edx-platform,zofuthan/edx-platform,mtlchun/edx,mcgachey/edx-platform,ak2703/edx-platform,MSOpenTech/edx-platform,Shrhawk/edx-platform,appsembler/edx-platform,ak2703/edx-platform,shashank971/edx-platform,B-MOOC/edx-platform,raccoongang/edx-platform,Lektorium-LLC/edx-platform,Edraak/circleci-edx-platform,adoosii/edx-platform,hkawasaki/kawasaki-aio8-2,nanolearningllc/edx-platform-cypress-2,Ayub-Khan/edx-platform,xinjiguaike/edx-platform,CourseTalk/edx-platform,mahendra-r/edx-platform,nttks/jenkins-test,teltek/edx-platform,ferabra/edx-platform,auferack08/edx-platform,ampax/edx-platform-backup,andyzsf/edx,eemirtekin/edx-platform,Endika/edx-platform,Softmotions/edx-platform,bitifirefly/edx-platform,kursitet/edx-platform,jruiperezv/ANALYSE,marcore/edx-platform,jjmiranda/edx-platform,ubc/edx-platform,abdoosh00/edraak,knehez/edx-platform,vismartltd/edx-platform,sudheerchintala/LearnEraPlatForm,zadgroup/edx-platform,chudaol/edx-platform,Edraak/edx-platform,hastexo/edx-platform,CourseTalk/edx-platform,B-MOOC/edx-platform,shabab12/edx-platform,mjirayu/sit_academy,xinjiguaike/edx-platform,eestay/edx-platform,arifsetiawan/edx-platform,UXE/local-edx,jazkarta/edx-platform,torchingloom/edx-platform,morenopc/edx-platform,jazkarta/edx-platform-for-isc,OmarIthawi/edx-platform,cyanna/edx-platform,iivic/BoiseStateX,jolyonb/edx-platform,solashirai/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,louyihua/edx-platform,mushtaqak/edx-platform,OmarIthawi/edx-platform,wwj718/ANALYSE,dsajkl/123,yokose-ks/edx-platform,auferack08/edx-platform,gsehub/edx-platform,ovnicraft/edx-platform,analyseuc3m/ANALYSE-v1,solashirai/edx-platform,doismellburning/edx-platform,hmcmooc/muddx-platform,chudaol/edx-platform,adoosii/edx-platform,Edraak/circleci-edx-platform,AkA84/edx-platform,kxliugang/edx-platform,chauhanhardik/populo,nagyistoce/edx-platform,tiagochiavericosta/edx-platform,Semi-global/edx-platform,martynovp/edx-platform,prarthitm/edxplatform,chauhanhardik/populo_2,dkarakats/edx-platform,Kalyzee/edx-platform,jamiefolsom/edx-platform,ZLLab-Mooc/edx-platform,EDUlib/edx-platform,adoosii/edx-platform,mahendra-r/edx-platform,mcgachey/edx-platform,OmarIthawi/edx-platform,unicri/edx-platform,pelikanchik/edx-platform,MakeHer/edx-platform,doganov/edx-platform,leansoft/edx-platform,stvstnfrd/edx-platform,ZLLab-Mooc/edx-platform,eestay/edx-platform,xingyepei/edx-platform,chauhanhardik/populo_2,Lektorium-LLC/edx-platform,deepsrijit1105/edx-platform,ZLLab-Mooc/edx-platform,zhenzhai/edx-platform,inares/edx-platform,jjmiranda/edx-platform,kmoocdev/edx-platform,BehavioralInsightsTeam/edx-platform,cpennington/edx-platform,bitifirefly/edx-platform,vismartltd/edx-platform,pomegranited/edx-platform,utecuy/edx-platform,mahendra-r/edx-platform,abdoosh00/edx-rtl-final,WatanabeYasumasa/edx-platform,nagyistoce/edx-platform,Ayub-Khan/edx-platform,vasyarv/edx-platform,mtlchun/edx,LearnEra/LearnEraPlaftform,hkawasaki/kawasaki-aio8-0,nagyistoce/edx-platform,devs1991/test_edx_docmode,mjirayu/sit_academy,abdoosh00/edraak,Livit/Livit.Learn.EdX,franosincic/edx-platform,don-github/edx-platform,LICEF/edx-platform,waheedahmed/edx-platform,arbrandes/edx-platform,naresh21/synergetics-edx-platform,tiagochiavericosta/edx-platform,ahmadiga/min_edx,solashirai/edx-platform,tanmaykm/edx-platform,valtech-mooc/edx-platform,appsembler/edx-platform,Softmotions/edx-platform,dsajkl/reqiop,chrisndodge/edx-platform,rismalrv/edx-platform,amir-qayyum-khan/edx-platform,ampax/edx-platform,RPI-OPENEDX/edx-platform,jswope00/griffinx,cselis86/edx-platform,wwj718/ANALYSE,shabab12/edx-platform,dcosentino/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,hkawasaki/kawasaki-aio8-2,shashank971/edx-platform,apigee/edx-platform,devs1991/test_edx_docmode,don-github/edx-platform,knehez/edx-platform,beacloudgenius/edx-platform,eestay/edx-platform
""" Generate sql commands to fix truncated anonymous student ids in the ORA database """ import sys from django.core.management.base import NoArgsCommand from student.models import AnonymousUserId, anonymous_id_for_user class Command(NoArgsCommand): help = __doc__ def handle_noargs(self, **options): """ Reads a list of ids (newline separated) from stdin, and dumps sql queries to run on the ORA database to fix those ids from their truncated form to the full 32 character change. The following query will generate the list of ids needed to be fixed from the ORA database: SELECT student_id FROM peer_grading_calibrationhistory WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM controller_submission WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_timing WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_studentcourseprofile WHERE LENGTH(student_id) = 16 UNION SELECT student_id FROM metrics_studentprofile WHERE LENGTH(student_id) = 16; """ ids = [line.strip() for line in sys.stdin] old_ids = AnonymousUserId.objects.raw( """ SELECT * FROM student_anonymoususerid_temp_archive WHERE anonymous_user_id IN ({}) """.format(','.join(['%s']*len(ids))), ids ) for old_id in old_ids: new_id = anonymous_id_for_user(old_id.user, old_id.course_id) for table in ('peer_grading_calibrationhistory', 'controller_submission', 'metrics_timing'): self.stdout.write( "UPDATE {} " "SET student_id = '{}' " "WHERE student_id = '{}';\n".format( table, new_id, old_id.anonymous_user_id, ) ) self.stdout.write( "DELETE FROM metrics_studentcourseprofile " "WHERE student_id = '{}' " "AND problems_attempted = 0;\n".format(old_id.anonymous_user_id) ) self.stdout.write( "DELETE FROM metrics_studentprofile " "WHERE student_id = '{}' " "AND messages_sent = 0 " "AND messages_received = 0 " "AND average_message_feedback_length = 0 " "AND student_is_staff_banned = 0 " "AND student_cannot_submit_more_for_peer_grading = 0;\n".format(old_id.anonymous_user_id) )
Add managemant command to generate sql to clean up tp truncated student ids in ORA db
52189e2161e92b36df47a04c2150dff38f81f5e9
tests/unit/tests/test_activations.py
tests/unit/tests/test_activations.py
from unittest import mock from django.test import TestCase from viewflow import activation, flow from viewflow.models import Task class TestActivations(TestCase): def test_start_activation_lifecycle(self): flow_task_mock = mock.Mock(spec=flow.Start()) act = activation.StartActivation() act.initialize(flow_task_mock) act.prepare() act.done() act.task.prepare.assert_called_once_with() act.task.done.assert_called_once_with() act.process.start.assert_called_once_with() flow_task_mock.activate_next.assert_any_call(act) def test_view_activation_activate(self): flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None)) prev_activation_mock = mock.Mock(spec=activation.StartActivation()) act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock) act.task.save.assert_has_calls(()) def test_view_activation_lifecycle(self): flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None)) task_mock = mock.Mock(spec=Task()) act = activation.ViewActivation() act.initialize(flow_task_mock, task_mock) act.prepare() act.done() act.task.prepare.assert_called_once_with() act.task.done.assert_called_once_with() flow_task_mock.activate_next.assert_any_call(act)
Add mocked tests for activation
Add mocked tests for activation
Python
agpl-3.0
pombredanne/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,ribeiro-ucl/viewflow
from unittest import mock from django.test import TestCase from viewflow import activation, flow from viewflow.models import Task class TestActivations(TestCase): def test_start_activation_lifecycle(self): flow_task_mock = mock.Mock(spec=flow.Start()) act = activation.StartActivation() act.initialize(flow_task_mock) act.prepare() act.done() act.task.prepare.assert_called_once_with() act.task.done.assert_called_once_with() act.process.start.assert_called_once_with() flow_task_mock.activate_next.assert_any_call(act) def test_view_activation_activate(self): flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None)) prev_activation_mock = mock.Mock(spec=activation.StartActivation()) act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock) act.task.save.assert_has_calls(()) def test_view_activation_lifecycle(self): flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None)) task_mock = mock.Mock(spec=Task()) act = activation.ViewActivation() act.initialize(flow_task_mock, task_mock) act.prepare() act.done() act.task.prepare.assert_called_once_with() act.task.done.assert_called_once_with() flow_task_mock.activate_next.assert_any_call(act)
Add mocked tests for activation
c78c4b4bd56453fe1f3a7db71222c12336c2dcf5
future/tests/test_str_is_unicode.py
future/tests/test_str_is_unicode.py
from __future__ import absolute_import from future import str_is_unicode import unittest class TestIterators(unittest.TestCase): def test_str(self): self.assertIsNot(str, bytes) # Py2: assertIsNot only in 2.7 self.assertEqual(str('blah'), u'blah') # Py3.3 and Py2 only unittest.main()
Add tests for str_is_unicode module
Add tests for str_is_unicode module
Python
mit
michaelpacer/python-future,michaelpacer/python-future,krischer/python-future,QuLogic/python-future,QuLogic/python-future,PythonCharmers/python-future,PythonCharmers/python-future,krischer/python-future
from __future__ import absolute_import from future import str_is_unicode import unittest class TestIterators(unittest.TestCase): def test_str(self): self.assertIsNot(str, bytes) # Py2: assertIsNot only in 2.7 self.assertEqual(str('blah'), u'blah') # Py3.3 and Py2 only unittest.main()
Add tests for str_is_unicode module
83e0394dc837e55a3ed544e54f6e84954f9311b0
onepercentclub/settings/travis.py
onepercentclub/settings/travis.py
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' from .test_runner import * # Use firefox for running tests on Travis SELENIUM_WEBDRIVER = 'firefox' ROOT_URLCONF = 'onepercentclub.urls'
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' from .test_runner import * # Use firefox for running tests on Travis SELENIUM_WEBDRIVER = 'remote' SELENIUM_TESTS = False ROOT_URLCONF = 'onepercentclub.urls'
Disable front end tests on Travis for now.
Disable front end tests on Travis for now.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' from .test_runner import * # Use firefox for running tests on Travis SELENIUM_WEBDRIVER = 'remote' SELENIUM_TESTS = False ROOT_URLCONF = 'onepercentclub.urls'
Disable front end tests on Travis for now. # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' from .test_runner import * # Use firefox for running tests on Travis SELENIUM_WEBDRIVER = 'firefox' ROOT_URLCONF = 'onepercentclub.urls'
1c397202b6df7b62cbd22509ee7cc366c2c09d6c
setup.py
setup.py
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian==0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian>=0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
Make library dependencies python-debian a bit more sane
Make library dependencies python-debian a bit more sane
Python
mit
jadonk/debexpo,jonnylamb/debexpo,jadonk/debexpo,jonnylamb/debexpo,swvist/Debexpo,jadonk/debexpo,swvist/Debexpo,swvist/Debexpo,jonnylamb/debexpo
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian>=0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
Make library dependencies python-debian a bit more sane try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian==0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
78821f2df84bbb822e076fb1591dfccc09bcb43c
cpm_data/migrations/0004_add_seasons_data.py
cpm_data/migrations/0004_add_seasons_data.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2016-08-27 22:21 from __future__ import unicode_literals from django.db import migrations def _get_seasons(): return '2012 2013 2014 2015 2016 2017'.split() def add_seasons(apps, schema_editor): Season = apps.get_model('cpm_data.Season') Season.objects.bulk_create( [Season(name_en=s, name_be=s, name_ru=s) for s in _get_seasons()] ) def remove_seasons(apps, schema_editor): Season = apps.get_model('cpm_data.Season') Season.objects.delete(name_en__in=_get_seasons()) class Migration(migrations.Migration): dependencies = [ ('cpm_data', '0003_seasonrelatedjurymember_seasonrelatedpartner'), ] operations = [ migrations.RunPython(add_seasons, remove_seasons), ]
Add migrations for adding seasons
Add migrations for adding seasons
Python
unlicense
kinaklub/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by
# -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2016-08-27 22:21 from __future__ import unicode_literals from django.db import migrations def _get_seasons(): return '2012 2013 2014 2015 2016 2017'.split() def add_seasons(apps, schema_editor): Season = apps.get_model('cpm_data.Season') Season.objects.bulk_create( [Season(name_en=s, name_be=s, name_ru=s) for s in _get_seasons()] ) def remove_seasons(apps, schema_editor): Season = apps.get_model('cpm_data.Season') Season.objects.delete(name_en__in=_get_seasons()) class Migration(migrations.Migration): dependencies = [ ('cpm_data', '0003_seasonrelatedjurymember_seasonrelatedpartner'), ] operations = [ migrations.RunPython(add_seasons, remove_seasons), ]
Add migrations for adding seasons
5e9c6c527902fd8361391f111a88a8f4b4ce71df
aospy/proj.py
aospy/proj.py
"""proj.py: aospy.Proj class for organizing work in single project.""" import time from .utils import dict_name_keys class Proj(object): """Project parameters: models, regions, directories, etc.""" def __init__(self, name, vars={}, models={}, default_models={}, regions={}, direc_out='', nc_dir_struc=False, verbose=True): self.verbose = verbose if self.verbose: print ("Initializing Project instance: %s (%s)" % (name, time.ctime())) self.name = name self.direc_out = direc_out self.nc_dir_struc = nc_dir_struc self.vars = dict_name_keys(vars) if models: self.models = dict_name_keys(models) else: self.models = {} if default_models == 'all': self.default_models = self.models elif default_models: self.default_models = dict_name_keys(default_models) else: self.default_models = {} if regions: self.regions = dict_name_keys(regions) else: self.regions = {} for obj_dict in (self.vars, self.models, self.regions): for obj in obj_dict.values(): setattr(obj, 'proj', self) def __str__(self): return 'Project instance "' + self.name + '"' __repr__ = __str__
"""proj.py: aospy.Proj class for organizing work in single project.""" import time from .utils import dict_name_keys class Proj(object): """Project parameters: models, regions, directories, etc.""" def __init__(self, name, vars={}, models={}, default_models={}, regions={}, direc_out='', nc_dir_struc=False, verbose=True): self.verbose = verbose if self.verbose: print ("Initializing Project instance: %s (%s)" % (name, time.ctime())) self.name = name self.direc_out = direc_out self.nc_dir_struc = nc_dir_struc if models: self.models = dict_name_keys(models) else: self.models = {} if default_models == 'all': self.default_models = self.models elif default_models: self.default_models = dict_name_keys(default_models) else: self.default_models = {} if regions: self.regions = dict_name_keys(regions) else: self.regions = {} for obj_dict in (self.models, self.regions): for obj in obj_dict.values(): setattr(obj, 'proj', self) def __str__(self): return 'Project instance "' + self.name + '"' __repr__ = __str__
Delete unnecessary vars attr of Proj
Delete unnecessary vars attr of Proj
Python
apache-2.0
spencerkclark/aospy,spencerahill/aospy
"""proj.py: aospy.Proj class for organizing work in single project.""" import time from .utils import dict_name_keys class Proj(object): """Project parameters: models, regions, directories, etc.""" def __init__(self, name, vars={}, models={}, default_models={}, regions={}, direc_out='', nc_dir_struc=False, verbose=True): self.verbose = verbose if self.verbose: print ("Initializing Project instance: %s (%s)" % (name, time.ctime())) self.name = name self.direc_out = direc_out self.nc_dir_struc = nc_dir_struc if models: self.models = dict_name_keys(models) else: self.models = {} if default_models == 'all': self.default_models = self.models elif default_models: self.default_models = dict_name_keys(default_models) else: self.default_models = {} if regions: self.regions = dict_name_keys(regions) else: self.regions = {} for obj_dict in (self.models, self.regions): for obj in obj_dict.values(): setattr(obj, 'proj', self) def __str__(self): return 'Project instance "' + self.name + '"' __repr__ = __str__
Delete unnecessary vars attr of Proj """proj.py: aospy.Proj class for organizing work in single project.""" import time from .utils import dict_name_keys class Proj(object): """Project parameters: models, regions, directories, etc.""" def __init__(self, name, vars={}, models={}, default_models={}, regions={}, direc_out='', nc_dir_struc=False, verbose=True): self.verbose = verbose if self.verbose: print ("Initializing Project instance: %s (%s)" % (name, time.ctime())) self.name = name self.direc_out = direc_out self.nc_dir_struc = nc_dir_struc self.vars = dict_name_keys(vars) if models: self.models = dict_name_keys(models) else: self.models = {} if default_models == 'all': self.default_models = self.models elif default_models: self.default_models = dict_name_keys(default_models) else: self.default_models = {} if regions: self.regions = dict_name_keys(regions) else: self.regions = {} for obj_dict in (self.vars, self.models, self.regions): for obj in obj_dict.values(): setattr(obj, 'proj', self) def __str__(self): return 'Project instance "' + self.name + '"' __repr__ = __str__
cb08d632fac453403bc8b91391b14669dbe932cc
circonus/__init__.py
circonus/__init__.py
from __future__ import absolute_import __title__ = "circonus" __version__ = "0.0.0" from logging import NullHandler import logging from circonus.client import CirconusClient logging.getLogger(__name__).addHandler(NullHandler())
__title__ = "circonus" __version__ = "0.0.0" from logging import NullHandler import logging from circonus.client import CirconusClient logging.getLogger(__name__).addHandler(NullHandler())
Remove unnecessary absolute import statement.
Remove unnecessary absolute import statement.
Python
mit
monetate/circonus,monetate/circonus
__title__ = "circonus" __version__ = "0.0.0" from logging import NullHandler import logging from circonus.client import CirconusClient logging.getLogger(__name__).addHandler(NullHandler())
Remove unnecessary absolute import statement. from __future__ import absolute_import __title__ = "circonus" __version__ = "0.0.0" from logging import NullHandler import logging from circonus.client import CirconusClient logging.getLogger(__name__).addHandler(NullHandler())
14d223068e2d8963dfe1f4e71854e9ea9c194bc5
Datasnakes/Tools/sge/qsubber.py
Datasnakes/Tools/sge/qsubber.py
import argparse import textwrap from qstat import Qstat __author__ = 'Datasnakes' parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent('''\ This is a command line wrapper for the SGE module. ' ''')) parser.add_argument("-o", "--output", help="Qstat info output type", required=True) q = Qstat() args = parser.parse_args(namespace=q)
Set up shell argparser for sge module
Set up shell argparser for sge module
Python
mit
datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts
import argparse import textwrap from qstat import Qstat __author__ = 'Datasnakes' parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent('''\ This is a command line wrapper for the SGE module. ' ''')) parser.add_argument("-o", "--output", help="Qstat info output type", required=True) q = Qstat() args = parser.parse_args(namespace=q)
Set up shell argparser for sge module
59927047347b7db3f46ab99152d2d99f60039043
trac/versioncontrol/web_ui/__init__.py
trac/versioncontrol/web_ui/__init__.py
from trac.versioncontrol.web_ui.browser import * from trac.versioncontrol.web_ui.changeset import * from trac.versioncontrol.web_ui.log import *
from trac.versioncontrol.web_ui.browser import * from trac.versioncontrol.web_ui.changeset import * from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file) git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,walty8/trac,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,netjunki/trac-Pygit2
from trac.versioncontrol.web_ui.browser import * from trac.versioncontrol.web_ui.changeset import * from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file) git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2 from trac.versioncontrol.web_ui.browser import * from trac.versioncontrol.web_ui.changeset import * from trac.versioncontrol.web_ui.log import *
f2506c07caf66b3ad42f6f1c09325097edd2e169
src/django_healthchecks/contrib.py
src/django_healthchecks/contrib.py
import uuid from django.core.cache import cache from django.db import connection def check_database(): """Check if the application can perform a dummy sql query""" cursor = connection.cursor() cursor.execute('SELECT 1; -- Healthcheck') row = cursor.fetchone() return row[0] == 1 def check_cache_default(): """Check if the application can connect to the default cached and read/write some dummy data. """ dummy = str(uuid.uuid4()) key = 'healthcheck:%s' % dummy cache.set(key, dummy, timeout=5) cached_value = cache.get(key) return cached_value == dummy def check_dummy_true(): return True def check_dummy_false(): return False def check_remote_addr(request): return request.META['REMOTE_ADDR']
import uuid from django.core.cache import cache from django.db import connection def check_database(): """Check if the application can perform a dummy sql query""" with connection.cursor() as cursor: cursor.execute('SELECT 1; -- Healthcheck') row = cursor.fetchone() return row[0] == 1 def check_cache_default(): """Check if the application can connect to the default cached and read/write some dummy data. """ dummy = str(uuid.uuid4()) key = 'healthcheck:%s' % dummy cache.set(key, dummy, timeout=5) cached_value = cache.get(key) return cached_value == dummy def check_dummy_true(): return True def check_dummy_false(): return False def check_remote_addr(request): return request.META['REMOTE_ADDR']
Make sure the cursor is properly closed after usage
Make sure the cursor is properly closed after usage
Python
mit
mvantellingen/django-healthchecks
import uuid from django.core.cache import cache from django.db import connection def check_database(): """Check if the application can perform a dummy sql query""" with connection.cursor() as cursor: cursor.execute('SELECT 1; -- Healthcheck') row = cursor.fetchone() return row[0] == 1 def check_cache_default(): """Check if the application can connect to the default cached and read/write some dummy data. """ dummy = str(uuid.uuid4()) key = 'healthcheck:%s' % dummy cache.set(key, dummy, timeout=5) cached_value = cache.get(key) return cached_value == dummy def check_dummy_true(): return True def check_dummy_false(): return False def check_remote_addr(request): return request.META['REMOTE_ADDR']
Make sure the cursor is properly closed after usage import uuid from django.core.cache import cache from django.db import connection def check_database(): """Check if the application can perform a dummy sql query""" cursor = connection.cursor() cursor.execute('SELECT 1; -- Healthcheck') row = cursor.fetchone() return row[0] == 1 def check_cache_default(): """Check if the application can connect to the default cached and read/write some dummy data. """ dummy = str(uuid.uuid4()) key = 'healthcheck:%s' % dummy cache.set(key, dummy, timeout=5) cached_value = cache.get(key) return cached_value == dummy def check_dummy_true(): return True def check_dummy_false(): return False def check_remote_addr(request): return request.META['REMOTE_ADDR']
54a345eb96bce8c3035b402ce009b1e3fda46a42
quran_text/serializers.py
quran_text/serializers.py
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text']
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
Change label and add Sura name to Ayah Serlialzer
Change label and add Sura name to Ayah Serlialzer
Python
mit
EmadMokhtar/tafseer_api
from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): sura_id = serializers.IntegerField(source='sura.pk') sura_name = serializers.CharField(source='sura.name') ayah_number = serializers.IntegerField(source='number') class Meta: model = Ayah fields = ['sura_id', 'sura_name', 'ayah_number', 'text']
Change label and add Sura name to Ayah Serlialzer from rest_framework import serializers from .models import Sura, Ayah class SuraSerializer(serializers.ModelSerializer): class Meta: model = Sura fields = ['index', 'name'] class AyahSerializer(serializers.ModelSerializer): class Meta: model = Ayah fields = ['sura', 'number', 'text']
e68b8146c6ae509489fde97faf10d5748904a20c
sentrylogs/helpers.py
sentrylogs/helpers.py
""" Helper functions for Sentry Logs """ from sentry_sdk import capture_message, configure_scope from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS def send_message(message, level, data): """Send a message to the Sentry server""" # Only send messages for desired log level if (SENTRY_LOG_LEVELS.index(level) < SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)): return with configure_scope() as scope: for key, value in data.items(): scope.set_extra(key, value) capture_message(message, level)
""" Helper functions for Sentry Logs """ from sentry_sdk import capture_message, configure_scope from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS def send_message(message, level, data): """Send a message to the Sentry server""" # Only send messages for desired log level if (SENTRY_LOG_LEVELS.index(level) < SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)): return with configure_scope() as scope: for key, value in data.items(): scope.set_context(key, value) capture_message(message, level)
Use structured context instead of additional data
Use structured context instead of additional data Additional Data is deprecated https://docs.sentry.io/platforms/python/enriching-events/context/#additional-data
Python
bsd-3-clause
mdgart/sentrylogs
""" Helper functions for Sentry Logs """ from sentry_sdk import capture_message, configure_scope from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS def send_message(message, level, data): """Send a message to the Sentry server""" # Only send messages for desired log level if (SENTRY_LOG_LEVELS.index(level) < SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)): return with configure_scope() as scope: for key, value in data.items(): scope.set_context(key, value) capture_message(message, level)
Use structured context instead of additional data Additional Data is deprecated https://docs.sentry.io/platforms/python/enriching-events/context/#additional-data """ Helper functions for Sentry Logs """ from sentry_sdk import capture_message, configure_scope from .conf.settings import SENTRY_LOG_LEVEL, SENTRY_LOG_LEVELS def send_message(message, level, data): """Send a message to the Sentry server""" # Only send messages for desired log level if (SENTRY_LOG_LEVELS.index(level) < SENTRY_LOG_LEVELS.index(SENTRY_LOG_LEVEL)): return with configure_scope() as scope: for key, value in data.items(): scope.set_extra(key, value) capture_message(message, level)
cbe773d051168e05118774708ff7a0ce881617f4
ganglia/settings.py
ganglia/settings.py
DEBUG = True GANGLIA_PATH = '/usr/local/etc' # where gmetad.conf is located API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted BASE_URL = '/ganglia/api/v2' LOGFILE = '/var/log/ganglia-api.log' PIDFILE = '/var/run/ganglia-api.pid'
DEBUG = True GANGLIA_PATH = '/etc/ganglia' # where gmetad.conf is located API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted BASE_URL = '/ganglia/api/v2' LOGFILE = '/var/log/ganglia-api.log' PIDFILE = '/var/run/ganglia-api.pid'
Make GANGLIA_PATH default to /etc/ganglia
Make GANGLIA_PATH default to /etc/ganglia
Python
apache-2.0
guardian/ganglia-api
DEBUG = True GANGLIA_PATH = '/etc/ganglia' # where gmetad.conf is located API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted BASE_URL = '/ganglia/api/v2' LOGFILE = '/var/log/ganglia-api.log' PIDFILE = '/var/run/ganglia-api.pid'
Make GANGLIA_PATH default to /etc/ganglia DEBUG = True GANGLIA_PATH = '/usr/local/etc' # where gmetad.conf is located API_SERVER = 'http://ganglia-api.example.com:8080' # where ganglia-api.py is hosted BASE_URL = '/ganglia/api/v2' LOGFILE = '/var/log/ganglia-api.log' PIDFILE = '/var/run/ganglia-api.pid'
df89f96113d73017a9e18964bfd456b06a2e2a6d
jsk_apc2015_common/scripts/create_mask_applied_dataset.py
jsk_apc2015_common/scripts/create_mask_applied_dataset.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os import re import cv2 from jsk_recognition_utils import bounding_rect_of_mask parser = argparse.ArgumentParser() parser.add_argument('container_path') args = parser.parse_args() container_path = args.container_path output_dir = os.path.abspath(container_path + '_mask_applied') if not os.path.exists(output_dir): print('creating output directory: {}'.format(output_dir)) os.mkdir(output_dir) categs = os.listdir(container_path) os.chdir(container_path) for categ in categs: os.chdir(categ) print('processing category: {}'.format(categ)) files = os.listdir('.') img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files) print('found {} images'.format(len(img_files))) categ_output_dir = os.path.join(output_dir, categ) if not os.path.exists(categ_output_dir): os.mkdir(categ_output_dir) for img_file in img_files: base, _ = os.path.splitext(img_file) mask_file = os.path.join('masks', base + '_mask.pbm') img = cv2.imread(img_file) mask = cv2.imread(mask_file, 0) applied = bounding_rect_of_mask(img, ~mask) cv2.imwrite(os.path.join(output_dir, categ, img_file), applied) os.chdir('..') os.chdir('..')
Add script to create mask applied dataset
Add script to create mask applied dataset
Python
bsd-3-clause
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os import re import cv2 from jsk_recognition_utils import bounding_rect_of_mask parser = argparse.ArgumentParser() parser.add_argument('container_path') args = parser.parse_args() container_path = args.container_path output_dir = os.path.abspath(container_path + '_mask_applied') if not os.path.exists(output_dir): print('creating output directory: {}'.format(output_dir)) os.mkdir(output_dir) categs = os.listdir(container_path) os.chdir(container_path) for categ in categs: os.chdir(categ) print('processing category: {}'.format(categ)) files = os.listdir('.') img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files) print('found {} images'.format(len(img_files))) categ_output_dir = os.path.join(output_dir, categ) if not os.path.exists(categ_output_dir): os.mkdir(categ_output_dir) for img_file in img_files: base, _ = os.path.splitext(img_file) mask_file = os.path.join('masks', base + '_mask.pbm') img = cv2.imread(img_file) mask = cv2.imread(mask_file, 0) applied = bounding_rect_of_mask(img, ~mask) cv2.imwrite(os.path.join(output_dir, categ, img_file), applied) os.chdir('..') os.chdir('..')
Add script to create mask applied dataset
bd2f5a6c62e446fc8b720b94e75313b5117767cb
trac/upgrades/db11.py
trac/upgrades/db11.py
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
Fix typo in upgrade script
Fix typo in upgrade script git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
Fix typo in upgrade script git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2 import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
6037d11a8da5ea15c8de468dd730670ba10a44c6
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup import toml with open("README.rst") as readme_file: readme_string = readme_file.read() setup( name="toml", version=toml.__version__, description="Python Library for Tom's Obvious, Minimal Language", author="Uiri Noyb", author_email="uiri@xqz.ca", url="https://github.com/uiri/toml", packages=['toml'], license="License :: OSI Approved :: MIT License", long_description=readme_string, classifiers=[ 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'] )
try: from setuptools import setup except ImportError: from distutils.core import setup import toml with open("README.rst") as readme_file: readme_string = readme_file.read() setup( name="toml", version=toml.__version__, description="Python Library for Tom's Obvious, Minimal Language", author="Uiri Noyb", author_email="uiri@xqz.ca", url="https://github.com/uiri/toml", packages=['toml'], license="MIT", long_description=readme_string, classifiers=[ 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'] )
Add trove classifier for license
Add trove classifier for license The trove classifiers are listed on PyPI to help users know -- at a glance -- what license the project uses. Helps users decide if the library is appropriate for integration. A full list of available trove classifiers can be found at: https://pypi.org/pypi?%3Aaction=list_classifiers The setuptools "license" argument is not intended to use trove classifier notation. Simplify it to "MIT". Details can be found: https://docs.python.org/3/distutils/setupscript.html#additional-meta-data
Python
mit
uiri/toml,uiri/toml
try: from setuptools import setup except ImportError: from distutils.core import setup import toml with open("README.rst") as readme_file: readme_string = readme_file.read() setup( name="toml", version=toml.__version__, description="Python Library for Tom's Obvious, Minimal Language", author="Uiri Noyb", author_email="uiri@xqz.ca", url="https://github.com/uiri/toml", packages=['toml'], license="MIT", long_description=readme_string, classifiers=[ 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'] )
Add trove classifier for license The trove classifiers are listed on PyPI to help users know -- at a glance -- what license the project uses. Helps users decide if the library is appropriate for integration. A full list of available trove classifiers can be found at: https://pypi.org/pypi?%3Aaction=list_classifiers The setuptools "license" argument is not intended to use trove classifier notation. Simplify it to "MIT". Details can be found: https://docs.python.org/3/distutils/setupscript.html#additional-meta-data try: from setuptools import setup except ImportError: from distutils.core import setup import toml with open("README.rst") as readme_file: readme_string = readme_file.read() setup( name="toml", version=toml.__version__, description="Python Library for Tom's Obvious, Minimal Language", author="Uiri Noyb", author_email="uiri@xqz.ca", url="https://github.com/uiri/toml", packages=['toml'], license="License :: OSI Approved :: MIT License", long_description=readme_string, classifiers=[ 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'] )
1619c955c75f91b9d61c3195704f17fc88ef9e04
aybu/manager/utils/pshell.py
aybu/manager/utils/pshell.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright 2010 Asidev s.r.l. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from sqlalchemy import engine_from_config import aybu.manager.models def setup(env): settings = env['request'].registry.settings env['models'] = aybu.manager.models env['engine'] = engine_from_config(settings, 'sqlalchemy.') env['request'].set_db_engine = env['engine'] aybu.core.models.Base.metadata.bind = env['engine']
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright 2010 Asidev s.r.l. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from sqlalchemy import engine_from_config import aybu.manager.models def setup(env): settings = env['request'].registry.settings env['models'] = aybu.manager.models env['engine'] = engine_from_config(settings, 'sqlalchemy.') env['request'].set_db_engine = env['engine'] aybu.manager.models.Base.metadata.bind = env['engine'] aybu.manager.models.Environment.initialize(settings) env['session'] = env['request'].db_session
Initialize session and environment in shell
Initialize session and environment in shell
Python
apache-2.0
asidev/aybu-manager
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright 2010 Asidev s.r.l. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from sqlalchemy import engine_from_config import aybu.manager.models def setup(env): settings = env['request'].registry.settings env['models'] = aybu.manager.models env['engine'] = engine_from_config(settings, 'sqlalchemy.') env['request'].set_db_engine = env['engine'] aybu.manager.models.Base.metadata.bind = env['engine'] aybu.manager.models.Environment.initialize(settings) env['session'] = env['request'].db_session
Initialize session and environment in shell #!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright 2010 Asidev s.r.l. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from sqlalchemy import engine_from_config import aybu.manager.models def setup(env): settings = env['request'].registry.settings env['models'] = aybu.manager.models env['engine'] = engine_from_config(settings, 'sqlalchemy.') env['request'].set_db_engine = env['engine'] aybu.core.models.Base.metadata.bind = env['engine']
a25e6fb5f9e63ffa30a6c655a6775eead4206bcb
setup.py
setup.py
from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
Test edit - to check svn email hook
Test edit - to check svn email hook
Python
bsd-3-clause
gef756/statsmodels,kiyoto/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,detrout/debian-statsmodels,kiyoto/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,alekz112/statsmodels,hainm/statsmodels,bsipocz/statsmodels,phobson/statsmodels,huongttlan/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,nguyentu1602/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,nguyentu1602/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,bsipocz/statsmodels,bsipocz/statsmodels,bavardage/statsmodels,musically-ut/statsmodels,pprett/statsmodels,yarikoptic/pystatsmodels,wzbozon/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,astocko/statsmodels,bert9bert/statsmodels,wwf5067/statsmodels,astocko/statsmodels,pprett/statsmodels,bavardage/statsmodels,nvoron23/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,ChadFulton/statsmodels,wesm/statsmodels,wdurhamh/statsmodels,waynenilsen/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,bsipocz/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,huongttlan/statsmodels,wesm/statsmodels,wkfwkf/statsmodels,bzero/statsmodels,wdurhamh/statsmodels,adammenges/statsmodels,bashtage/statsmodels,rgommers/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,bzero/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,astocko/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,saketkc/statsmodels,detrout/debian-statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,yarikoptic/pystatsmodels,pprett/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,yl565/statsmodels,kiyoto/statsmodels,hainm/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,edhuckle/statsmodels,wzbozon/statsmodels,josef-pkt/statsmodels,musically-ut/statsmodels,bashtage/statsmodels,musically-ut/statsmodels,hlin117/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,adammenges/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bzero/statsmodels,cbmoore/statsmodels,pprett/statsmodels,hlin117/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,wwf5067/statsmodels,phobson/statsmodels,alekz112/statsmodels,hlin117/statsmodels,Averroes/statsmodels,wzbozon/statsmodels,statsmodels/statsmodels,astocko/statsmodels,nguyentu1602/statsmodels,adammenges/statsmodels,jseabold/statsmodels,edhuckle/statsmodels,YihaoLu/statsmodels,alekz112/statsmodels,hainm/statsmodels,bashtage/statsmodels,bzero/statsmodels,yl565/statsmodels,phobson/statsmodels,DonBeo/statsmodels,wesm/statsmodels,adammenges/statsmodels,phobson/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,josef-pkt/statsmodels,YihaoLu/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,waynenilsen/statsmodels,gef756/statsmodels,statsmodels/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,gef756/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,rgommers/statsmodels,gef756/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,rgommers/statsmodels,wdurhamh/statsmodels,waynenilsen/statsmodels,detrout/debian-statsmodels,huongttlan/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,bavardage/statsmodels,bavardage/statsmodels,josef-pkt/statsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,DonBeo/statsmodels,rgommers/statsmodels,phobson/statsmodels,hlin117/statsmodels,nvoron23/statsmodels
import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
Test edit - to check svn email hook from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
47dedd31b9ee0f768ca3f9f781133458ddc99f4f
setup.py
setup.py
from setuptools import setup name = 'turbasen' VERSION = '2.5.0' setup( name=name, packages=[name], version=VERSION, description='Client for Nasjonal Turbase REST API', long_description='Documentation: https://turbasenpy.readthedocs.io/', author='Ali Kaafarani', author_email='ali.kaafarani@dnt.no', url='https://github.com/Turbasen/turbasen.py', download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION), keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: Norwegian', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], install_requires=['requests>=2.10.0,<3'], extras_require={ 'dev': ['ipython', 'flake8'], } )
from setuptools import setup name = 'turbasen' VERSION = '2.5.0' setup( name=name, packages=[name], version=VERSION, description='Client for Nasjonal Turbase REST API', long_description='Documentation: https://turbasenpy.readthedocs.io/', author='Ali Kaafarani', author_email='ali.kaafarani@dnt.no', url='https://github.com/Turbasen/turbasen.py', download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION), keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: Norwegian', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], install_requires=['requests>=2.10.0,<3'], extras_require={ 'dev': ['sphinx', 'ipython', 'flake8'], } )
Add sphinx to dev requirements
Add sphinx to dev requirements
Python
mit
Turbasen/turbasen.py
from setuptools import setup name = 'turbasen' VERSION = '2.5.0' setup( name=name, packages=[name], version=VERSION, description='Client for Nasjonal Turbase REST API', long_description='Documentation: https://turbasenpy.readthedocs.io/', author='Ali Kaafarani', author_email='ali.kaafarani@dnt.no', url='https://github.com/Turbasen/turbasen.py', download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION), keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: Norwegian', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], install_requires=['requests>=2.10.0,<3'], extras_require={ 'dev': ['sphinx', 'ipython', 'flake8'], } )
Add sphinx to dev requirements from setuptools import setup name = 'turbasen' VERSION = '2.5.0' setup( name=name, packages=[name], version=VERSION, description='Client for Nasjonal Turbase REST API', long_description='Documentation: https://turbasenpy.readthedocs.io/', author='Ali Kaafarani', author_email='ali.kaafarani@dnt.no', url='https://github.com/Turbasen/turbasen.py', download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION), keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: Norwegian', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], install_requires=['requests>=2.10.0,<3'], extras_require={ 'dev': ['ipython', 'flake8'], } )
01d3027e568bcd191e7e25337c6597eb75b82789
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='hugo@barrera.io', url='https://github.com/pimutils/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:cli', ] }, install_requires=[ open('requirements.txt').readlines() ], long_description=open('README.rst').read(), use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'todoman/version.py', }, setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'], tests_require=['pytest'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Console :: Curses', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Topic :: Office/Business :: Scheduling', 'Topic :: Utilities', ] )
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='hugo@barrera.io', url='https://github.com/pimutils/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:cli', ] }, install_requires=[ open('requirements.txt').readlines() ], long_description=open('README.rst').read(), use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'todoman/version.py', }, setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'], tests_require=['pytest'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Console :: Curses', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Office/Business :: Scheduling', 'Topic :: Utilities', ] )
Add classifiers for supported python versions
Add classifiers for supported python versions
Python
isc
Sakshisaraswat/todoman,AnubhaAgrawal/todoman,hobarrera/todoman,pimutils/todoman,asalminen/todoman,rimshaakhan/todoman
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='hugo@barrera.io', url='https://github.com/pimutils/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:cli', ] }, install_requires=[ open('requirements.txt').readlines() ], long_description=open('README.rst').read(), use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'todoman/version.py', }, setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'], tests_require=['pytest'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Console :: Curses', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Office/Business :: Scheduling', 'Topic :: Utilities', ] )
Add classifiers for supported python versions #!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='hugo@barrera.io', url='https://github.com/pimutils/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:cli', ] }, install_requires=[ open('requirements.txt').readlines() ], long_description=open('README.rst').read(), use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'todoman/version.py', }, setup_requires=['setuptools_scm != 1.12.0', 'pytest-runner'], tests_require=['pytest'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: Console :: Curses', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Topic :: Office/Business :: Scheduling', 'Topic :: Utilities', ] )
8147dab8fffb8d9d9753009f43b27afc1729febc
setup.py
setup.py
from setuptools import setup, find_packages import os setup( name="cpgintegrate", version="0.2.17-SNAPSHOT", packages=find_packages(), include_package_data=True, install_requires=[ 'requests>=2.18.4', 'pandas>=0.23.0', 'xlrd', 'sqlalchemy>=1.0', 'beautifulsoup4', 'lxml<4.0', 'numpy', 'scipy', ], extras_require={'dev': [ 'pytest>=3.2.2', 'apache-airflow>=1.10.0', ], 'win_auto': [ 'pywinauto', 'patool', ], }, data_files=[ (os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'), ['cpgintegrate/airflow/cpg_airflow_plugin.py']) ], )
from setuptools import setup, find_packages import os setup( name="cpgintegrate", version="0.2.17", packages=find_packages(), include_package_data=True, install_requires=[ 'requests>=2.18.4', 'pandas>=0.23.0', 'xlrd', 'sqlalchemy>=1.0', 'beautifulsoup4', 'lxml<5.0', 'numpy', 'scipy', ], extras_require={'dev': [ 'pytest>=3.2.2', 'apache-airflow>=1.10.0', ], 'win_auto': [ 'pywinauto', 'patool', ], }, data_files=[ (os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'), ['cpgintegrate/airflow/cpg_airflow_plugin.py']) ], )
Bump version, allow newer lxml
Bump version, allow newer lxml
Python
agpl-3.0
PointyShinyBurning/cpgintegrate
from setuptools import setup, find_packages import os setup( name="cpgintegrate", version="0.2.17", packages=find_packages(), include_package_data=True, install_requires=[ 'requests>=2.18.4', 'pandas>=0.23.0', 'xlrd', 'sqlalchemy>=1.0', 'beautifulsoup4', 'lxml<5.0', 'numpy', 'scipy', ], extras_require={'dev': [ 'pytest>=3.2.2', 'apache-airflow>=1.10.0', ], 'win_auto': [ 'pywinauto', 'patool', ], }, data_files=[ (os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'), ['cpgintegrate/airflow/cpg_airflow_plugin.py']) ], )
Bump version, allow newer lxml from setuptools import setup, find_packages import os setup( name="cpgintegrate", version="0.2.17-SNAPSHOT", packages=find_packages(), include_package_data=True, install_requires=[ 'requests>=2.18.4', 'pandas>=0.23.0', 'xlrd', 'sqlalchemy>=1.0', 'beautifulsoup4', 'lxml<4.0', 'numpy', 'scipy', ], extras_require={'dev': [ 'pytest>=3.2.2', 'apache-airflow>=1.10.0', ], 'win_auto': [ 'pywinauto', 'patool', ], }, data_files=[ (os.path.join(os.environ.get('AIRFLOW_HOME', 'airflow'), 'plugins'), ['cpgintegrate/airflow/cpg_airflow_plugin.py']) ], )
ab63395c1d8c9ec6bce13811965c8335463b0b78
setup.py
setup.py
from distutils.core import setup, Extension setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
from distutils.core import setup, Extension import os os.environ['CFLAGS'] = "-Qunused-arguments" setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
Fix compile error on OS X 10.9
Fix compile error on OS X 10.9
Python
apache-2.0
pombredanne/python-rabin-fingerprint,pombredanne/python-rabin-fingerprint,cschwede/python-rabin-fingerprint,cschwede/python-rabin-fingerprint
from distutils.core import setup, Extension import os os.environ['CFLAGS'] = "-Qunused-arguments" setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
Fix compile error on OS X 10.9 from distutils.core import setup, Extension setup(name = "Indexer", version = "0.1", ext_modules = [Extension("rabin", ["src/rabin.c", ])])
638b8be8a07262803c087e796e40a51858c08983
__init__.py
__init__.py
from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): return LayerView.LayerView()
from . import LayerView def getMetaData(): return { 'type': 'view', 'plugin': { "name": "Layer View" }, 'view': { 'name': 'Layers' } } def register(app): return LayerView.LayerView()
Update plugin metadata to the new format
Update plugin metadata to the new format
Python
agpl-3.0
totalretribution/Cura,markwal/Cura,quillford/Cura,DeskboxBrazil/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,bq/Ultimaker-Cura,ad1217/Cura,fieldOfView/Cura,fieldOfView/Cura,DeskboxBrazil/Cura,Curahelper/Cura,Curahelper/Cura,hmflash/Cura,bq/Ultimaker-Cura,hmflash/Cura,markwal/Cura,quillford/Cura,derekhe/Cura,totalretribution/Cura,lo0ol/Ultimaker-Cura,ynotstartups/Wanhao,fxtentacle/Cura,fxtentacle/Cura,senttech/Cura,ynotstartups/Wanhao,derekhe/Cura,ad1217/Cura
from . import LayerView def getMetaData(): return { 'type': 'view', 'plugin': { "name": "Layer View" }, 'view': { 'name': 'Layers' } } def register(app): return LayerView.LayerView()
Update plugin metadata to the new format from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): return LayerView.LayerView()
ca6891f3b867fd691c0b682566ffec1fd7f0ac2a
pryvate/blueprints/simple/simple.py
pryvate/blueprints/simple/simple.py
"""Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): with open(os.path.join(package_path, filename), 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower())
"""Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) if os.path.isdir(package_path): files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): digest_file = os.path.join(package_path, filename) with open(digest_file, 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower()) else: return make_response('404', 404)
Return 404 if package was not found instead of raising an exception
Return 404 if package was not found instead of raising an exception
Python
mit
Dinoshauer/pryvate,Dinoshauer/pryvate
"""Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) if os.path.isdir(package_path): files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): digest_file = os.path.join(package_path, filename) with open(digest_file, 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower()) else: return make_response('404', 404)
Return 404 if package was not found instead of raising an exception """Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): with open(os.path.join(package_path, filename), 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower())
995f06a33bf92dcff185a50f84743323170a8b7a
setup.py
setup.py
from setuptools import setup, find_packages long_description = ( open('README.rst').read() + '\n' + open('CHANGES.txt').read()) tests_require = [ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14', 'mock', ] setup( name='bowerstatic', version='0.10.dev0', description="A Bower-centric static file server for WSGI", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", license="BSD", url='http://bowerstatic.readthedocs.org', keywords='wsgi bower', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'WebOb', ], tests_require=tests_require, extras_require=dict( test=tests_require, ) )
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) tests_require = [ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14', 'mock', ] setup( name='bowerstatic', version='0.10.dev0', description="A Bower-centric static file server for WSGI", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", license="BSD", url='http://bowerstatic.readthedocs.org', keywords='wsgi bower', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'WebOb', ], tests_require=tests_require, extras_require=dict( test=tests_require, ) )
Use io.open with encoding='utf-8' and flake8 compliance
Use io.open with encoding='utf-8' and flake8 compliance
Python
bsd-3-clause
faassen/bowerstatic,faassen/bowerstatic
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) tests_require = [ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14', 'mock', ] setup( name='bowerstatic', version='0.10.dev0', description="A Bower-centric static file server for WSGI", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", license="BSD", url='http://bowerstatic.readthedocs.org', keywords='wsgi bower', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'WebOb', ], tests_require=tests_require, extras_require=dict( test=tests_require, ) )
Use io.open with encoding='utf-8' and flake8 compliance from setuptools import setup, find_packages long_description = ( open('README.rst').read() + '\n' + open('CHANGES.txt').read()) tests_require = [ 'pytest >= 2.0', 'pytest-cov', 'WebTest >= 2.0.14', 'mock', ] setup( name='bowerstatic', version='0.10.dev0', description="A Bower-centric static file server for WSGI", long_description=long_description, author="Martijn Faassen", author_email="faassen@startifact.com", license="BSD", url='http://bowerstatic.readthedocs.org', keywords='wsgi bower', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'WebOb', ], tests_require=tests_require, extras_require=dict( test=tests_require, ) )
3520217e38849ad18b11245c6cac51d79db8422d
pytablereader/loadermanager/_base.py
pytablereader/loadermanager/_base.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from ..interface import TableLoaderInterface class TableLoaderManager(TableLoaderInterface): def __init__(self, loader): self.__loader = loader @property def loader(self): return self.__loader @property def format_name(self): return self.__loader.format_name @property def source_type(self): return self.__loader.source_type @property def encoding(self): try: return self.__loader.encoding except AttributeError: return None @encoding.setter def encoding(self, codec_name): self.__loader.encoding = codec_name def load(self): return self.__loader.load() def inc_table_count(self): self.__loader.inc_table_count()
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from ..interface import TableLoaderInterface class TableLoaderManager(TableLoaderInterface): def __init__(self, loader): self.__loader = loader @property def loader(self): return self.__loader @property def format_name(self): return self.__loader.format_name @property def source_type(self): return self.__loader.source_type @property def table_name(self): return self.__loader.table_name @table_name.setter def table_name(self, value): self.__loader.table_name = value @property def encoding(self): try: return self.__loader.encoding except AttributeError: return None @encoding.setter def encoding(self, codec_name): self.__loader.encoding = codec_name def load(self): return self.__loader.load() def inc_table_count(self): self.__loader.inc_table_count()
Add an interface to change table_name
Add an interface to change table_name
Python
mit
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from ..interface import TableLoaderInterface class TableLoaderManager(TableLoaderInterface): def __init__(self, loader): self.__loader = loader @property def loader(self): return self.__loader @property def format_name(self): return self.__loader.format_name @property def source_type(self): return self.__loader.source_type @property def table_name(self): return self.__loader.table_name @table_name.setter def table_name(self, value): self.__loader.table_name = value @property def encoding(self): try: return self.__loader.encoding except AttributeError: return None @encoding.setter def encoding(self, codec_name): self.__loader.encoding = codec_name def load(self): return self.__loader.load() def inc_table_count(self): self.__loader.inc_table_count()
Add an interface to change table_name # encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from ..interface import TableLoaderInterface class TableLoaderManager(TableLoaderInterface): def __init__(self, loader): self.__loader = loader @property def loader(self): return self.__loader @property def format_name(self): return self.__loader.format_name @property def source_type(self): return self.__loader.source_type @property def encoding(self): try: return self.__loader.encoding except AttributeError: return None @encoding.setter def encoding(self, codec_name): self.__loader.encoding = codec_name def load(self): return self.__loader.load() def inc_table_count(self): self.__loader.inc_table_count()
72a573c24d5234003b9eeb9e0cc487d174908a2e
typeahead_search/trie.py
typeahead_search/trie.py
"""A Trie (prefix tree) class for use in typeahead search. Every node in the TypeaheadSearchTrie is another TypeaheadSearchTrie instance. """ from weakref import WeakSet class TypeaheadSearchTrie(object): def __init__(self): # The children of this node. Because ordered traversals are not # important, these are stored in a dictionary. self.children = {} # Data entries associated with the word stored in the path to # this node. Stored in a WeakSet so that entries disappear # automatically when data entries are deleted. self.entries = WeakSet() def add(self, word, entry): """Adds the given data entry to the given Trie word. The word is created in the Trie if it doesn't already exist. """ if word: self.children.setdefault( word[0], TypeaheadSearchTrie() ).add(word[1:], entry) else: self.entries.add(entry) def search(self, word): """Return a set of all data entries represented by prefix `word`. Returns an empty set if this prefix is not in the Trie. """ if word: try: return self.children[word[0]].search(word[1:]) except KeyError: return set() else: return self.gather_entries() def gather_entries(self): """Gather all data entries stored in this node and its children.""" return set(self.entries).update( child.gather_entries() for child in self.children.itervalues() )
Add a Trie for storage of data string tokens.
[typeahead_search] Add a Trie for storage of data string tokens.
Python
mit
geekofalltrades/quora-coding-challenges
"""A Trie (prefix tree) class for use in typeahead search. Every node in the TypeaheadSearchTrie is another TypeaheadSearchTrie instance. """ from weakref import WeakSet class TypeaheadSearchTrie(object): def __init__(self): # The children of this node. Because ordered traversals are not # important, these are stored in a dictionary. self.children = {} # Data entries associated with the word stored in the path to # this node. Stored in a WeakSet so that entries disappear # automatically when data entries are deleted. self.entries = WeakSet() def add(self, word, entry): """Adds the given data entry to the given Trie word. The word is created in the Trie if it doesn't already exist. """ if word: self.children.setdefault( word[0], TypeaheadSearchTrie() ).add(word[1:], entry) else: self.entries.add(entry) def search(self, word): """Return a set of all data entries represented by prefix `word`. Returns an empty set if this prefix is not in the Trie. """ if word: try: return self.children[word[0]].search(word[1:]) except KeyError: return set() else: return self.gather_entries() def gather_entries(self): """Gather all data entries stored in this node and its children.""" return set(self.entries).update( child.gather_entries() for child in self.children.itervalues() )
[typeahead_search] Add a Trie for storage of data string tokens.
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
36
Edit dataset card