commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 19
3.3k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
b77e8f9a081517701cccf9f177c81eaca877e8c7 | pombola/images/admin.py | pombola/images/admin.py | from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
| from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
| Handle entries that have no image associated with them | Handle entries that have no image associated with them
| Python | agpl-3.0 | ken-muturi/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th | from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
| Handle entries that have no image associated with them
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from pombola.images import models
class ImageAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = [ 'thumbnail', 'content_object', 'is_primary', 'source', ]
def thumbnail(self, obj):
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
thumbnail.allow_tags = True
class ImageAdminInline(AdminImageMixin, GenericTabularInline):
model = models.Image
extra = 0
can_delete = True
admin.site.register( models.Image, ImageAdmin )
|
aaaaad4ea3109406268471b6605eb6078848db0d | falcom/api/uri/fake_mapping.py | falcom/api/uri/fake_mapping.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class FakeMappingThatRecordsAccessions:
def __init__ (self):
self.__set = set()
def __getitem__ (self, key):
self.__set.add(key)
return 0
def get_set (self):
return self.__set
def check_on_format_str (self, format_str):
format_str.format_map(self)
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class FakeMappingThatRecordsAccessions:
def __init__ (self):
self.__set = set()
def __getitem__ (self, key):
self.__set.add(key)
return 0
def get_set (self):
return self.__set
def check_on_format_str (self, format_str):
format_str.format_map(self)
def get_expected_args_from_format_str (format_str):
mapping = FakeMappingThatRecordsAccessions()
format_str.format_map(mapping)
return mapping.get_set()
| Write function for getting expected args | Write function for getting expected args
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class FakeMappingThatRecordsAccessions:
def __init__ (self):
self.__set = set()
def __getitem__ (self, key):
self.__set.add(key)
return 0
def get_set (self):
return self.__set
def check_on_format_str (self, format_str):
format_str.format_map(self)
def get_expected_args_from_format_str (format_str):
mapping = FakeMappingThatRecordsAccessions()
format_str.format_map(mapping)
return mapping.get_set()
| Write function for getting expected args
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class FakeMappingThatRecordsAccessions:
def __init__ (self):
self.__set = set()
def __getitem__ (self, key):
self.__set.add(key)
return 0
def get_set (self):
return self.__set
def check_on_format_str (self, format_str):
format_str.format_map(self)
|
6fe0966cc6c0532ee6d0076ff6306ceda3418928 | bluebottle/homepage/model.py | bluebottle/homepage/model.py | from bluebottle.slides.models import Slide
from bluebottle.projects import get_project_model
from bluebottle.quotes.models import Quote
PROJECT_MODEL = get_project_model()
class HomePage(object):
def get(self, language):
self.id = 1
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
projects = PROJECT_MODEL.objects.filter(status__viewable=True, favorite=True).order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = None
return self
| from bluebottle.slides.models import Slide
from bluebottle.projects import get_project_model
from bluebottle.quotes.models import Quote
PROJECT_MODEL = get_project_model()
class HomePage(object):
def get(self, language):
# FIXME: Hack to make sure quotes and slides load.
if language == 'en-us':
language = 'en_US'
self.id = 1
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
projects = PROJECT_MODEL.objects.filter(status__viewable=True).order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = None
return self
| Fix Slides & Quotes for home | Fix Slides & Quotes for home
| Python | bsd-3-clause | jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle | from bluebottle.slides.models import Slide
from bluebottle.projects import get_project_model
from bluebottle.quotes.models import Quote
PROJECT_MODEL = get_project_model()
class HomePage(object):
def get(self, language):
# FIXME: Hack to make sure quotes and slides load.
if language == 'en-us':
language = 'en_US'
self.id = 1
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
projects = PROJECT_MODEL.objects.filter(status__viewable=True).order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = None
return self
| Fix Slides & Quotes for home
from bluebottle.slides.models import Slide
from bluebottle.projects import get_project_model
from bluebottle.quotes.models import Quote
PROJECT_MODEL = get_project_model()
class HomePage(object):
def get(self, language):
self.id = 1
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
projects = PROJECT_MODEL.objects.filter(status__viewable=True, favorite=True).order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = None
return self
|
0ae360b675f2dd0b3607af1bc7b72864e43236b2 | userreport/settings_local.EXAMPLE.py | userreport/settings_local.EXAMPLE.py | # Fill in this file and save as settings_local.py
PROJECT_NAME = 'SuperTuxKart'
PROJECT_URL = 'http://supertuxkart.net/'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Add the name/ip of the server that is running the stats server
ALLOWED_HOSTS = ["api.stkaddons.net"]
ADMINS = (
('Your Name', 'you@example.com'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stkstats',
'USER': 'stkstats_user',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '3306',
}
}
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#################################################'
| # Fill in this file and save as settings_local.py
PROJECT_NAME = 'SuperTuxKart'
PROJECT_URL = 'http://supertuxkart.net/'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Add the name/ip of the server that is running the stats server
ALLOWED_HOSTS = ["addons.supertuxkart.net"]
ADMINS = (
('Your Name', 'you@example.com'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stkstats',
'USER': 'stkstats_user',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '3306',
}
}
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#################################################'
| Change default example for allowed hosts | Change default example for allowed hosts
| Python | mit | leyyin/stk-stats,supertuxkart/stk-stats,leyyin/stk-stats,supertuxkart/stk-stats | # Fill in this file and save as settings_local.py
PROJECT_NAME = 'SuperTuxKart'
PROJECT_URL = 'http://supertuxkart.net/'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Add the name/ip of the server that is running the stats server
ALLOWED_HOSTS = ["addons.supertuxkart.net"]
ADMINS = (
('Your Name', 'you@example.com'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stkstats',
'USER': 'stkstats_user',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '3306',
}
}
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#################################################'
| Change default example for allowed hosts
# Fill in this file and save as settings_local.py
PROJECT_NAME = 'SuperTuxKart'
PROJECT_URL = 'http://supertuxkart.net/'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Add the name/ip of the server that is running the stats server
ALLOWED_HOSTS = ["api.stkaddons.net"]
ADMINS = (
('Your Name', 'you@example.com'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stkstats',
'USER': 'stkstats_user',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '3306',
}
}
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#################################################'
|
f6013aa29fddf9883f8f0bea4b7733718b9d8846 | core/admin/migrations/versions/3f6994568962_.py | core/admin/migrations/versions/3f6994568962_.py | """ Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
| """ Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
| Fix an old migration that was reading configuration before migrating | Fix an old migration that was reading configuration before migrating
| Python | mit | kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io | """ Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
| Fix an old migration that was reading configuration before migrating
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
b69f69b0e89a49b427292ec179e12bdee6fb4743 | samples/web/content/testrtc/testrtc.py | samples/web/content/testrtc/testrtc.py | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
| #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| Add ability to download a random file with a given size from any domain. | Add ability to download a random file with a given size from any domain.
| Python | bsd-3-clause | xdumaine/adapter,mulyoved/samples,ralic/testrtc,82488059/apprtc,mauricionr/samples,smayoorans/samples,shelsonjava/testrtc,4lejandrito/adapter,TribeMedia/testrtc,YouthAndra/apprtc,JiYou/apprtc,fetterov/samples,82488059/apprtc,webrtc/samples,TribeMedia/samples,dengshaodong/docker-apprtc,4lejandrito/adapter,diddie06/webrtc,smbale/samples,jjrasche/cell-based-RC-control,bpyoung92/apprtc,todotobe1/samples,fippo/webrtc,virajs/samples,webrtc/testrtc,oliverhuangchao/samples,jjrasche/cell-based-RC-control,samdutton/webrtc,YouthAndra/apprtc,volkanh/volkanh.github.io,MahmoudFouad/samples,jjrasche/cell-based-RC-control,tsruban/samples,calebboyd/adapter,b-cuts/samples,jan-ivar/samples,tsruban/samples,Edward-Shawn/samples,JiYou/apprtc,shelsonjava/apprtc,procandi/apprtc,aadebuger/docker-apprtc,TribeMedia/apprtc,smadhusu/AppRTC,smbale/samples,jarl-alejandro/apprtc,shaohung001/samples,MahmoudFouad/samples,guoweis/webrtc,mvenkatesh431/apprtc,shelsonjava/apprtc,overtakermtg/samples,arnauorriols/apprtc,arnauorriols/apprtc,shaohung001/samples,oliverhuangchao/samples,jessetane/testrtc,todotobe1/apprtc,smadhusu/AppRTC,samdutton/webrtc,TribeMedia/testrtc,akashrchoksi/newone,shelsonjava/testrtc,dushmis/webrtc,bbandaru/samples,pquochoang/samples,myself659/samples,smadhusu/AppRTC,bemasc/samples,arnauorriols/apprtc,YouthAndra/apprtc,dengshaodong/docker-apprtc,samdutton/apprtc,kod3r/samples,martin7890/samples,webrtc/apprtc,jan-ivar/samples,TribeMedia/apprtc,fippo/apprtc,shelsonjava/samples,jarl-alejandro/apprtc,mvenkatesh431/apprtc,jarl-alejandro/apprtc,arnauorriols/apprtc,harme199497/adapter,mauricionr/samples,shelsonjava/apprtc,webrtc/adapter,TribeMedia/testrtc,82488059/apprtc,Edward-Shawn/samples,bpyoung92/apprtc,aadebuger/docker-apprtc,YouthAndra/apprtc,virajs/testrtc,Roarz/samples,pquochoang/samples,kod3r/samples,bbandaru/samples,Edward-Shawn/samples,todotobe1/samples,askdaddy/samples,4lejandrito/adapter,jiayliu/apprtc,xdumaine/samples,virajs/apprtc,dajise/samples,martin7890/samples,bpyoung92/apprtc,Roarz/samples,webrtc/apprtc,arnauorriols/apprtc,mvenkatesh431/samples,jarl-alejandro/apprtc,knightsofaa/webrtc,mulyoved/samples,leehz/samples,mvenkatesh431/apprtc,fippo/apprtc,shelsonjava/samples,jiayliu/apprtc,akashrchoksi/newone,b-cuts/samples,Acidburn0zzz/adapter,todotobe1/apprtc,dajise/samples,calebboyd/adapter,virajs/apprtc,procandi/apprtc,smayoorans/samples,JiYou/apprtc,keshwans/samples,mvenkatesh431/apprtc,TribeMedia/samples,harme199497/adapter,fippo/webrtc,TheKnarf/apprtc,virajs/samples,EmreAkkoyun/sample,fitraditya/samples,overtakermtg/samples,xdumaine/samples,virajs/apprtc,YouthAndra/apprtc,TribeMedia/apprtc,Roarz/samples,ralic/testrtc,samdutton/apprtc,fitraditya/samples,Zauberstuhl/adapter,jessetane/testrtc,juberti/samples,jiayliu/apprtc,jessetane/testrtc,shelsonjava/testrtc,shelsonjava/samples,fetterov/samples,ralic/testrtc,Acidburn0zzz/adapter,jarl-alejandro/apprtc,fippo/apprtc,TribeMedia/testrtc,JiYou/apprtc,TribeMedia/apprtc,shines/adapter,smayoorans/samples,myself659/samples,bemasc/samples,jan-ivar/adapter,EmreAkkoyun/sample,oliverhuangchao/samples,virajs/testrtc,webrtc/testrtc,smadhusu/AppRTC,overtakermtg/samples,calebboyd/adapter,procandi/apprtc,pquochoang/samples,virajs/samples,TheKnarf/apprtc,mauricionr/samples,dajise/samples,martin7890/samples,bemasc/samples,Zauberstuhl/adapter,shelsonjava/apprtc,smadhusu/AppRTC,virajs/apprtc,jjrasche/cell-based-RC-control,samdutton/apprtc,leehz/samples,todotobe1/apprtc,juberti/samples,tsruban/samples,fippo/apprtc,xdumaine/samples,dengshaodong/docker-apprtc,dengshaodong/docker-apprtc,EmreAkkoyun/sample,bpyoung92/apprtc,b-cuts/samples,guoweis/webrtc,MahmoudFouad/samples,Acidburn0zzz/adapter,taylor-b/samples,leehz/samples,jessetane/testrtc,mvenkatesh431/apprtc,todotobe1/apprtc,smbale/samples,aadebuger/docker-apprtc,samdutton/apprtc,shines/adapter,guoweis/webrtc,aadebuger/docker-apprtc,procandi/apprtc,jiayliu/apprtc,procandi/samples,webrtc/apprtc,JiYou/apprtc,mvenkatesh431/samples,harme199497/adapter,TheKnarf/apprtc,TheKnarf/apprtc,keshwans/samples,procandi/samples,jan-ivar/adapter,mvenkatesh431/samples,bbandaru/samples,82488059/apprtc,shelsonjava/apprtc,samdutton/apprtc,webrtc/adapter,volkanh/volkanh.github.io,TribeMedia/apprtc,ralic/samples,shelsonjava/testrtc,mulyoved/samples,shines/adapter,bpyoung92/apprtc,ralic/testrtc,fetterov/samples,virajs/testrtc,virajs/testrtc,fitraditya/samples,dengshaodong/docker-apprtc,82488059/apprtc,TheKnarf/apprtc,procandi/apprtc,webrtc/apprtc,myself659/samples,Zauberstuhl/adapter,fippo/apprtc,knightsofaa/webrtc,TribeMedia/samples,kod3r/samples,todotobe1/apprtc,jiayliu/apprtc,akashrchoksi/newone,askdaddy/samples,ralic/samples,volkanh/volkanh.github.io,xdumaine/adapter,virajs/apprtc,procandi/samples,diddie06/webrtc,taylor-b/samples,dushmis/webrtc,aadebuger/docker-apprtc,todotobe1/samples,webrtc/apprtc,webrtc/samples,ralic/samples | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| Add ability to download a random file with a given size from any domain.
#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
|
18ade96032dbbeaee6f96ff364cb3dc8726970da | tests/test_q_function.py | tests/test_q_function.py | import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
| Add a test for QFunction | Add a test for QFunction
| Python | mit | toslunar/chainerrl,toslunar/chainerrl | import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
| Add a test for QFunction
|
|
4dfc0c49cec86f3c03b90fa66e1fc9de2ac665e6 | samples/migrations/0012_auto_20170512_1138.py | samples/migrations/0012_auto_20170512_1138.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
| Add migration file (fix fields) | :rocket: Add migration file (fix fields)
| Python | mit | gems-uff/labsys,gems-uff/labsys,gems-uff/labsys | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
| :rocket: Add migration file (fix fields)
|
|
ea20424f2645f1157c0a7ea76b32a4f834c1e27b | pythonect/internal/__init__.py | pythonect/internal/__init__.py | # Copyright (c) 2012-2013, Itzik Kotler
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| # Copyright (c) 2012-2013, Itzik Kotler
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Delete blank line at end of file | Delete blank line at end of file
| Python | bsd-3-clause | ikotler/pythonect,fr34k8/pythonect,fr34k8/pythonect,TOSPIO/pythonect,ikotler/pythonect | # Copyright (c) 2012-2013, Itzik Kotler
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Delete blank line at end of file
# Copyright (c) 2012-2013, Itzik Kotler
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
921977589a6837575ab7aadaa6238b20d0771ae2 | mesonbuild/dependencies/platform.py | mesonbuild/dependencies/platform.py | # Copyright 2013-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
from .. import mesonlib
from .base import ExternalDependency, DependencyException
class AppleFrameworks(ExternalDependency):
def __init__(self, env, kwargs):
super().__init__('appleframeworks', env, None, kwargs)
modules = kwargs.get('modules', [])
if isinstance(modules, str):
modules = [modules]
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
# FIXME: Use self.clib_compiler to check if the frameworks are available
for f in self.frameworks:
self.link_args += ['-framework', f]
def found(self):
return mesonlib.is_osx()
def get_version(self):
return 'unknown'
| # Copyright 2013-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
from .. import mesonlib
from .base import ExternalDependency, DependencyException
class AppleFrameworks(ExternalDependency):
def __init__(self, env, kwargs):
super().__init__('appleframeworks', env, None, kwargs)
modules = kwargs.get('modules', [])
if isinstance(modules, str):
modules = [modules]
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
# FIXME: Use self.clib_compiler to check if the frameworks are available
for f in self.frameworks:
self.link_args += ['-framework', f]
self.is_found = mesonlib.is_osx()
def get_version(self):
return 'unknown'
| Set is_found in AppleFrameworks constructor | Set is_found in AppleFrameworks constructor
Set is_found in AppleFrameworks constructor, rather than overriding the
found() method, as other superclass methods may access is_found.
| Python | apache-2.0 | QuLogic/meson,pexip/meson,QuLogic/meson,pexip/meson,QuLogic/meson,pexip/meson,becm/meson,jeandet/meson,pexip/meson,becm/meson,pexip/meson,becm/meson,jpakkane/meson,pexip/meson,jeandet/meson,mesonbuild/meson,jeandet/meson,MathieuDuponchelle/meson,mesonbuild/meson,QuLogic/meson,jeandet/meson,mesonbuild/meson,jeandet/meson,jpakkane/meson,jeandet/meson,MathieuDuponchelle/meson,becm/meson,pexip/meson,QuLogic/meson,jpakkane/meson,jeandet/meson,MathieuDuponchelle/meson,becm/meson,mesonbuild/meson,MathieuDuponchelle/meson,jpakkane/meson,MathieuDuponchelle/meson,QuLogic/meson,jpakkane/meson,jeandet/meson,MathieuDuponchelle/meson,MathieuDuponchelle/meson,QuLogic/meson,pexip/meson,pexip/meson,becm/meson,mesonbuild/meson,jpakkane/meson,QuLogic/meson,mesonbuild/meson,becm/meson,mesonbuild/meson,jeandet/meson,QuLogic/meson,jpakkane/meson,jpakkane/meson,MathieuDuponchelle/meson,jpakkane/meson,becm/meson,mesonbuild/meson,becm/meson,mesonbuild/meson,becm/meson,pexip/meson,MathieuDuponchelle/meson,mesonbuild/meson | # Copyright 2013-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
from .. import mesonlib
from .base import ExternalDependency, DependencyException
class AppleFrameworks(ExternalDependency):
def __init__(self, env, kwargs):
super().__init__('appleframeworks', env, None, kwargs)
modules = kwargs.get('modules', [])
if isinstance(modules, str):
modules = [modules]
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
# FIXME: Use self.clib_compiler to check if the frameworks are available
for f in self.frameworks:
self.link_args += ['-framework', f]
self.is_found = mesonlib.is_osx()
def get_version(self):
return 'unknown'
| Set is_found in AppleFrameworks constructor
Set is_found in AppleFrameworks constructor, rather than overriding the
found() method, as other superclass methods may access is_found.
# Copyright 2013-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
from .. import mesonlib
from .base import ExternalDependency, DependencyException
class AppleFrameworks(ExternalDependency):
def __init__(self, env, kwargs):
super().__init__('appleframeworks', env, None, kwargs)
modules = kwargs.get('modules', [])
if isinstance(modules, str):
modules = [modules]
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
# FIXME: Use self.clib_compiler to check if the frameworks are available
for f in self.frameworks:
self.link_args += ['-framework', f]
def found(self):
return mesonlib.is_osx()
def get_version(self):
return 'unknown'
|
41795bf65f6d834007c7f352fd079084f5ed940f | calc.py | calc.py | # -*- coding: utf-8 -*-
def add(x, y):
"""
引数xとyを加算した結果を返す
>>> add(2, 3)
5
"""
return x + y
| Implement sample module and doctest | Implement sample module and doctest
| Python | mit | raimon49/python-local-wheels-sample | # -*- coding: utf-8 -*-
def add(x, y):
"""
引数xとyを加算した結果を返す
>>> add(2, 3)
5
"""
return x + y
| Implement sample module and doctest
|
|
cfec967be4602dff636adb951b582d1db114f578 | tensorflow_datasets/scripts/document_dataset_version.py | tensorflow_datasets/scripts/document_dataset_version.py | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Dump the list of all registered datasets/config/version in a `.txt` file.
Instructions:
```
python tensorflow_datasets/scripts/freeze_dataset_version.py
```
"""
import os
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
from tensorflow_datasets.scripts.document_datasets import document_single_builder
FLAGS = flags.FLAGS
flags.DEFINE_string('tfds_dir', tfds.core.utils.tfds_dir(),
'Path to tensorflow_datasets directory')
DATASET_TO_TESTS = ['waymo_open_dataset']
def version_doc(ds_name):
builder = tfds.builder(ds_name)
version_path = os.path.join(FLAGS.tfds_dir, 'stable_versions.txt')
catalog_dir = tfds.core.get_tfds_path('../docs/catalog/')
with tf.io.gfile.GFile(os.path.join(catalog_dir, ds_name + ".md"), 'w') as f:
doc_builder = document_single_builder(builder)
f.write(doc_builder)
def main(_):
for name in DATASET_TO_TESTS:
version_doc(name)
if __name__ == '__main__':
app.run(main) | Add single dataset doc gen | Add single dataset doc gen
| Python | apache-2.0 | tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Dump the list of all registered datasets/config/version in a `.txt` file.
Instructions:
```
python tensorflow_datasets/scripts/freeze_dataset_version.py
```
"""
import os
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
from tensorflow_datasets.scripts.document_datasets import document_single_builder
FLAGS = flags.FLAGS
flags.DEFINE_string('tfds_dir', tfds.core.utils.tfds_dir(),
'Path to tensorflow_datasets directory')
DATASET_TO_TESTS = ['waymo_open_dataset']
def version_doc(ds_name):
builder = tfds.builder(ds_name)
version_path = os.path.join(FLAGS.tfds_dir, 'stable_versions.txt')
catalog_dir = tfds.core.get_tfds_path('../docs/catalog/')
with tf.io.gfile.GFile(os.path.join(catalog_dir, ds_name + ".md"), 'w') as f:
doc_builder = document_single_builder(builder)
f.write(doc_builder)
def main(_):
for name in DATASET_TO_TESTS:
version_doc(name)
if __name__ == '__main__':
app.run(main) | Add single dataset doc gen
|
|
ee61dab40c4c3eaa1553397091a17a3292dcf9d6 | grako/ast.py | grako/ast.py | from collections import OrderedDict, Mapping
import json
class AST(Mapping):
def __init__(self, **kwargs):
self._elements = OrderedDict(**kwargs)
def add(self, key, value):
previous = self._elements.get(key, None)
if previous is None:
self._elements[key] = [value]
else:
previous.append(value)
def update(self, *args, **kwargs):
for dct in args:
for k, v in dct:
self.add(k, v)
for k, v in kwargs.items():
self.add(k, v)
@property
def first(self):
key = self.elements.keys[0]
return self.elements[key]
def __iter__(self):
return iter(self._elements)
def __contains__(self, key):
return key in self._elements
def __len__(self):
return len(self._elements)
def __getitem__(self, key):
if key not in self._elements:
self._elements[key] = list()
return self._elements[key]
def __getattr__(self, key):
return self.__getitem__(key)
if key in self._elements:
return self.__getitem__(key)
raise KeyError(key)
@staticmethod
def serializable(obj):
if isinstance(obj, AST):
return obj._elements
return obj
def __repr__(self):
return self.serializable(self._elements)
def __str__(self):
return json.dumps(self._elements, indent=4, default=self.serializable)
| from collections import OrderedDict, Mapping
import json
__all__ = ['AST']
class AST(Mapping):
def __init__(self, **kwargs):
self._elements = OrderedDict(**kwargs)
def add(self, key, value):
previous = self._elements.get(key, None)
if previous is None:
self._elements[key] = [value]
else:
previous.append(value)
def update(self, *args, **kwargs):
for dct in args:
for k, v in dct:
self.add(k, v)
for k, v in kwargs.items():
self.add(k, v)
@property
def first(self):
key = self.elements.keys[0]
return self.elements[key]
def __iter__(self):
return iter(self._elements)
def __contains__(self, key):
return key in self._elements
def __len__(self):
return len(self._elements)
def __getitem__(self, key):
if key not in self._elements:
self._elements[key] = list()
return self._elements[key]
def __setitem__(self, key, value):
self._elements[key] = value
def __getattr__(self, key):
return self.__getitem__(key)
if key in self._elements:
return self.__getitem__(key)
raise KeyError(key)
@staticmethod
def serializable(obj):
if isinstance(obj, AST):
return obj._elements
return obj
def __repr__(self):
return self.serializable(self._elements)
def __str__(self):
return json.dumps(self._elements, indent=4, default=self.serializable)
| Allow to set items in AST. | Allow to set items in AST.
| Python | bsd-2-clause | frnknglrt/grako,vmuriart/grako | from collections import OrderedDict, Mapping
import json
__all__ = ['AST']
class AST(Mapping):
def __init__(self, **kwargs):
self._elements = OrderedDict(**kwargs)
def add(self, key, value):
previous = self._elements.get(key, None)
if previous is None:
self._elements[key] = [value]
else:
previous.append(value)
def update(self, *args, **kwargs):
for dct in args:
for k, v in dct:
self.add(k, v)
for k, v in kwargs.items():
self.add(k, v)
@property
def first(self):
key = self.elements.keys[0]
return self.elements[key]
def __iter__(self):
return iter(self._elements)
def __contains__(self, key):
return key in self._elements
def __len__(self):
return len(self._elements)
def __getitem__(self, key):
if key not in self._elements:
self._elements[key] = list()
return self._elements[key]
def __setitem__(self, key, value):
self._elements[key] = value
def __getattr__(self, key):
return self.__getitem__(key)
if key in self._elements:
return self.__getitem__(key)
raise KeyError(key)
@staticmethod
def serializable(obj):
if isinstance(obj, AST):
return obj._elements
return obj
def __repr__(self):
return self.serializable(self._elements)
def __str__(self):
return json.dumps(self._elements, indent=4, default=self.serializable)
| Allow to set items in AST.
from collections import OrderedDict, Mapping
import json
class AST(Mapping):
def __init__(self, **kwargs):
self._elements = OrderedDict(**kwargs)
def add(self, key, value):
previous = self._elements.get(key, None)
if previous is None:
self._elements[key] = [value]
else:
previous.append(value)
def update(self, *args, **kwargs):
for dct in args:
for k, v in dct:
self.add(k, v)
for k, v in kwargs.items():
self.add(k, v)
@property
def first(self):
key = self.elements.keys[0]
return self.elements[key]
def __iter__(self):
return iter(self._elements)
def __contains__(self, key):
return key in self._elements
def __len__(self):
return len(self._elements)
def __getitem__(self, key):
if key not in self._elements:
self._elements[key] = list()
return self._elements[key]
def __getattr__(self, key):
return self.__getitem__(key)
if key in self._elements:
return self.__getitem__(key)
raise KeyError(key)
@staticmethod
def serializable(obj):
if isinstance(obj, AST):
return obj._elements
return obj
def __repr__(self):
return self.serializable(self._elements)
def __str__(self):
return json.dumps(self._elements, indent=4, default=self.serializable)
|
7fde39b0d4a41e8119893254d38460cf6914f028 | bashhub/view/status.py | bashhub/view/status.py | import dateutil.parser
import datetime
import humanize
status_view = """\
=== Bashhub Status
https://bashhub.com/u/{0}
Total Commands: {1}
Total Sessions: {2}
Total Systems: {3}
===
Session PID {4} Started {5}
Commands In Session: {6}
Commands Today: {7}
"""
def build_status_view(model):
date = datetime.datetime.fromtimestamp(model.session_start_time / 1000.0)
date_str = humanize.naturaltime(date)
return status_view.format(
model.username, model.total_commands, model.total_sessions,
model.total_systems, model.session_name, date_str,
model.session_total_commands, model.total_commands_today)
| import dateutil.parser
import datetime
import humanize
status_view = """\
=== Bashhub Status
https://bashhub.com/{0}
Total Commands: {1}
Total Sessions: {2}
Total Systems: {3}
===
Session PID {4} Started {5}
Commands In Session: {6}
Commands Today: {7}
"""
def build_status_view(model):
date = datetime.datetime.fromtimestamp(model.session_start_time / 1000.0)
date_str = humanize.naturaltime(date)
return status_view.format(
model.username, model.total_commands, model.total_sessions,
model.total_systems, model.session_name, date_str,
model.session_total_commands, model.total_commands_today)
| Remove /u/ from username path | Remove /u/ from username path
| Python | apache-2.0 | rcaloras/bashhub-client,rcaloras/bashhub-client | import dateutil.parser
import datetime
import humanize
status_view = """\
=== Bashhub Status
https://bashhub.com/{0}
Total Commands: {1}
Total Sessions: {2}
Total Systems: {3}
===
Session PID {4} Started {5}
Commands In Session: {6}
Commands Today: {7}
"""
def build_status_view(model):
date = datetime.datetime.fromtimestamp(model.session_start_time / 1000.0)
date_str = humanize.naturaltime(date)
return status_view.format(
model.username, model.total_commands, model.total_sessions,
model.total_systems, model.session_name, date_str,
model.session_total_commands, model.total_commands_today)
| Remove /u/ from username path
import dateutil.parser
import datetime
import humanize
status_view = """\
=== Bashhub Status
https://bashhub.com/u/{0}
Total Commands: {1}
Total Sessions: {2}
Total Systems: {3}
===
Session PID {4} Started {5}
Commands In Session: {6}
Commands Today: {7}
"""
def build_status_view(model):
date = datetime.datetime.fromtimestamp(model.session_start_time / 1000.0)
date_str = humanize.naturaltime(date)
return status_view.format(
model.username, model.total_commands, model.total_sessions,
model.total_systems, model.session_name, date_str,
model.session_total_commands, model.total_commands_today)
|
351815d8da1dc1b2227e8fd832e690f8aee47747 | setup.py | setup.py | import codecs
from setuptools import setup
lines = codecs.open('README', 'r', 'utf-8').readlines()[3:]
lines.extend(codecs.open('CHANGES', 'r', 'utf-8').readlines()[1:])
desc = ''.join(lines).lstrip()
import translitcodec
version = translitcodec.__version__
setup(name='translitcodec',
version=version,
description='Unicode to 8-bit charset transliteration codec',
long_description=desc,
author='Jason Kirtland',
author_email='jek@discorporate.us',
url='https://github.com/claudep/translitcodec',
packages=['translitcodec'],
license='MIT License',
python_requires='>=3',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
| import codecs
from setuptools import setup
lines = codecs.open('README', 'r', 'utf-8').readlines()[3:]
lines.append('\n')
lines.extend(codecs.open('CHANGES', 'r', 'utf-8').readlines()[1:])
desc = ''.join(lines).lstrip()
import translitcodec
version = translitcodec.__version__
setup(name='translitcodec',
version=version,
description='Unicode to 8-bit charset transliteration codec',
long_description=desc,
long_description_content_type='text/x-rst',
author='Jason Kirtland',
author_email='jek@discorporate.us',
url='https://github.com/claudep/translitcodec',
packages=['translitcodec'],
license='MIT License',
python_requires='>=3',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
| Add blank line between README and CHANGES in long_description | Add blank line between README and CHANGES in long_description
| Python | mit | claudep/translitcodec,claudep/translitcodec | import codecs
from setuptools import setup
lines = codecs.open('README', 'r', 'utf-8').readlines()[3:]
lines.append('\n')
lines.extend(codecs.open('CHANGES', 'r', 'utf-8').readlines()[1:])
desc = ''.join(lines).lstrip()
import translitcodec
version = translitcodec.__version__
setup(name='translitcodec',
version=version,
description='Unicode to 8-bit charset transliteration codec',
long_description=desc,
long_description_content_type='text/x-rst',
author='Jason Kirtland',
author_email='jek@discorporate.us',
url='https://github.com/claudep/translitcodec',
packages=['translitcodec'],
license='MIT License',
python_requires='>=3',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
| Add blank line between README and CHANGES in long_description
import codecs
from setuptools import setup
lines = codecs.open('README', 'r', 'utf-8').readlines()[3:]
lines.extend(codecs.open('CHANGES', 'r', 'utf-8').readlines()[1:])
desc = ''.join(lines).lstrip()
import translitcodec
version = translitcodec.__version__
setup(name='translitcodec',
version=version,
description='Unicode to 8-bit charset transliteration codec',
long_description=desc,
author='Jason Kirtland',
author_email='jek@discorporate.us',
url='https://github.com/claudep/translitcodec',
packages=['translitcodec'],
license='MIT License',
python_requires='>=3',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
|
685fa68b79b4d21f69fe55f66724191d30bbbaa8 | contact/views.py | contact/views.py | from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from django import http
from .tasks import send_contact_form_inquiry
# Serializers define the API representation.
class ContactSerializer(serializers.Serializer):
email = serializers.EmailField()
name = serializers.CharField(max_length=255)
content = serializers.CharField()
source = serializers.CharField(max_length=255)
organization = serializers.CharField(max_length=255, required=False)
def create(self, validated_data):
return validated_data
class ContactAPIView(APIView):
permission_classes = ()
def post(self, request, *args, **kwargs):
serializer = ContactSerializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
# call async task to send email
send_contact_form_inquiry.delay(**serializer.data)
if request.GET.get('next') and not request.is_ajax():
# TODO should this be validated?
return http.HttpResponseRedirect(request.GET.get('next'))
data = {"status": "sent"}
return Response(data)
| from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from django import http
from .tasks import send_contact_form_inquiry
# Serializers define the API representation.
class ContactSerializer(serializers.Serializer):
email = serializers.EmailField()
name = serializers.CharField(max_length=255)
content = serializers.CharField()
source = serializers.CharField(max_length=255)
organization = serializers.CharField(max_length=255, required=False)
def create(self, validated_data):
return validated_data
class ContactAPIView(APIView):
authentication_classes = []
permission_classes = []
def post(self, request, *args, **kwargs):
serializer = ContactSerializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
# call async task to send email
send_contact_form_inquiry.delay(**serializer.data)
if request.GET.get('next') and not request.is_ajax():
# TODO should this be validated?
return http.HttpResponseRedirect(request.GET.get('next'))
data = {"status": "sent"}
return Response(data)
| Remove all permisson for contact API | Remove all permisson for contact API
| Python | mit | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from django import http
from .tasks import send_contact_form_inquiry
# Serializers define the API representation.
class ContactSerializer(serializers.Serializer):
email = serializers.EmailField()
name = serializers.CharField(max_length=255)
content = serializers.CharField()
source = serializers.CharField(max_length=255)
organization = serializers.CharField(max_length=255, required=False)
def create(self, validated_data):
return validated_data
class ContactAPIView(APIView):
authentication_classes = []
permission_classes = []
def post(self, request, *args, **kwargs):
serializer = ContactSerializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
# call async task to send email
send_contact_form_inquiry.delay(**serializer.data)
if request.GET.get('next') and not request.is_ajax():
# TODO should this be validated?
return http.HttpResponseRedirect(request.GET.get('next'))
data = {"status": "sent"}
return Response(data)
| Remove all permisson for contact API
from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from django import http
from .tasks import send_contact_form_inquiry
# Serializers define the API representation.
class ContactSerializer(serializers.Serializer):
email = serializers.EmailField()
name = serializers.CharField(max_length=255)
content = serializers.CharField()
source = serializers.CharField(max_length=255)
organization = serializers.CharField(max_length=255, required=False)
def create(self, validated_data):
return validated_data
class ContactAPIView(APIView):
permission_classes = ()
def post(self, request, *args, **kwargs):
serializer = ContactSerializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
# call async task to send email
send_contact_form_inquiry.delay(**serializer.data)
if request.GET.get('next') and not request.is_ajax():
# TODO should this be validated?
return http.HttpResponseRedirect(request.GET.get('next'))
data = {"status": "sent"}
return Response(data)
|
8baf08fd22a0e66734e927607aaab9b1a0bdd7f4 | time-complexity/time_complexity.py | time-complexity/time_complexity.py | #Comparison of different time complexities.
#####################
#constant time - O(1)
#####################
def constant(n):
result = n * n
return result
##############################
#Logarithmic time - O(log(n))
##############################
def logarithmic(n):
result = 0
while n > 1:
n // 2
result += 1
return result
###################
#Linear Time - O(n)
###################
def linear(n,A):
for i in range(n):
if A[i] == 0:
return 0
return 1
#########################
# Quadratic time - O(n^2)
#########################
def quadratic(n):
result = 0
for i in range(n):
for j in range(1, n):
result += 1
return result
| Add time-complexity: basic python examples | Add time-complexity: basic python examples
| Python | cc0-1.0 | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | #Comparison of different time complexities.
#####################
#constant time - O(1)
#####################
def constant(n):
result = n * n
return result
##############################
#Logarithmic time - O(log(n))
##############################
def logarithmic(n):
result = 0
while n > 1:
n // 2
result += 1
return result
###################
#Linear Time - O(n)
###################
def linear(n,A):
for i in range(n):
if A[i] == 0:
return 0
return 1
#########################
# Quadratic time - O(n^2)
#########################
def quadratic(n):
result = 0
for i in range(n):
for j in range(1, n):
result += 1
return result
| Add time-complexity: basic python examples
|
|
bab5a974c78a2b7042de449c6d3b01f9297809f1 | src/python/utexas/tools/generate_ddl.py | src/python/utexas/tools/generate_ddl.py | """
utexas/tools/generate_ddl.py
Print or apply the research data schema.
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from utexas.tools.generate_ddl import main
raise SystemExit(main())
from cargo.flags import (
Flag,
Flags,
)
module_flags = \
Flags(
"Research Data Storage",
Flag(
"-a",
"--apply",
action = "store_true",
help = "create the generated schema",
),
Flag(
"-r",
"--reflect",
action = "store_true",
help = "load the reflected schema",
),
Flag(
"-t",
"--topological",
action = "store_true",
help = "print topologically sorted by dependency",
),
)
def main():
"""
Create core database metadata.
"""
# connect to the database
from cargo.flags import parse_given
from utexas.data import research_connect
parse_given(usage = "usage: %prog [options]")
engine = research_connect()
# load the appropriate schema
if module_flags.given.reflect:
# use the database's schema
from sqlalchemy.schema import MetaData
metadata = MetaData()
metadata.reflect(bind = engine)
else:
# use the project-defined schema
from utexas.data import DatumBase
metadata = DatumBase.metadata
# then do something with it
if module_flags.given.apply:
# apply the DDL to the database
metadata.create_all(engine)
else:
# print the DDL
from sqlalchemy.schema import CreateTable
if module_flags.given.topological:
sorted_tables = metadata.sorted_tables
else:
sorted_tables = sorted(metadata.sorted_tables, key = lambda t: t.name)
for table in sorted_tables:
print CreateTable(table).compile(engine)
| Add a standalone DDL print/reflection tool. | Add a standalone DDL print/reflection tool.
| Python | mit | borg-project/borg | """
utexas/tools/generate_ddl.py
Print or apply the research data schema.
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from utexas.tools.generate_ddl import main
raise SystemExit(main())
from cargo.flags import (
Flag,
Flags,
)
module_flags = \
Flags(
"Research Data Storage",
Flag(
"-a",
"--apply",
action = "store_true",
help = "create the generated schema",
),
Flag(
"-r",
"--reflect",
action = "store_true",
help = "load the reflected schema",
),
Flag(
"-t",
"--topological",
action = "store_true",
help = "print topologically sorted by dependency",
),
)
def main():
"""
Create core database metadata.
"""
# connect to the database
from cargo.flags import parse_given
from utexas.data import research_connect
parse_given(usage = "usage: %prog [options]")
engine = research_connect()
# load the appropriate schema
if module_flags.given.reflect:
# use the database's schema
from sqlalchemy.schema import MetaData
metadata = MetaData()
metadata.reflect(bind = engine)
else:
# use the project-defined schema
from utexas.data import DatumBase
metadata = DatumBase.metadata
# then do something with it
if module_flags.given.apply:
# apply the DDL to the database
metadata.create_all(engine)
else:
# print the DDL
from sqlalchemy.schema import CreateTable
if module_flags.given.topological:
sorted_tables = metadata.sorted_tables
else:
sorted_tables = sorted(metadata.sorted_tables, key = lambda t: t.name)
for table in sorted_tables:
print CreateTable(table).compile(engine)
| Add a standalone DDL print/reflection tool.
|
|
1fac10d27f00322e34c3b89527c32b1dcb02decd | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| Support Stylus blocks in Vue single-file components | Support Stylus blocks in Vue single-file components
| Python | mit | jackbrewer/SublimeLinter-contrib-stylint | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| Support Stylus blocks in Vue single-file components
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
91c35078c7a8aad153d9aabe0b02fc3c48cfc76a | hesiod.py | hesiod.py | #!/usr/bin/env python
from _hesiod import bind, resolve
| #!/usr/bin/env python
"""
Present both functional and object-oriented interfaces for executing
lookups in Hesiod, Project Athena's service name resolution protocol.
"""
from _hesiod import bind, resolve
from pwd import struct_passwd
class HesiodParseError(Exception):
pass
class Lookup(object):
"""
A Generic Hesiod lookup
"""
def __init__(self, hes_name, hes_type):
self.results = resolve(hes_name, hes_type)
self.parseRecords()
def parseRecords(self):
pass
class FilsysLookup(Lookup):
def __init__(self, name):
Lookup.__init__(self, name, 'filsys')
def parseRecords(self):
Lookup.parseRecords(self)
self.filsys = []
self.multiRecords = (len(self.results) > 1)
for result in self.results:
priority = 0
if self.multiRecords:
result, priority = result.rsplit(" ", 1)
priority = int(priority)
parts = result.split(" ")
type = parts[0]
if type == 'AFS':
self.filsys.append(dict(type=type,
location=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
elif type == 'NFS':
self.filsys.append(dict(type=type,
remote_location=parts[1],
server=parts[2],
mode=parts[3],
mountpoint=parts[4],
priority=priority))
elif type == 'ERR':
self.filsys.append(dict(type=type,
message=parts[1],
priority=priority))
elif type == 'UFS':
self.filsys.append(dict(type=type,
device=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
elif type == 'LOC':
self.filsys.append(dict(type=type,
location=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
else:
raise HesiodParseError('Unknown filsys type: %s' % type)
class PasswdLookup(Lookup):
def __init__(self, name):
Lookup.__init__(self, name, 'passwd')
def parseRecords(self):
self.passwd = struct_passwd(self.results[0].split(':'))
class UidLookup(PasswdLookup):
def __init__(self, uid):
Lookup.__init__(self, uid, 'uid')
__all__ = ['bind', 'resolve',
'Lookup', 'FilsysLookup', 'PasswdLookup', 'UidLookup',
'HesiodParseError']
| Add object-oriented-style lookups for filsys, passwd, and uid lookups | Add object-oriented-style lookups for filsys, passwd, and uid lookups
The filsys entry parsing code is taken from pyHesiodFS and was explicitly
relicensed under the MIT license by Quentin Smith <quentin@mit.edu>
| Python | mit | ebroder/python-hesiod | #!/usr/bin/env python
"""
Present both functional and object-oriented interfaces for executing
lookups in Hesiod, Project Athena's service name resolution protocol.
"""
from _hesiod import bind, resolve
from pwd import struct_passwd
class HesiodParseError(Exception):
pass
class Lookup(object):
"""
A Generic Hesiod lookup
"""
def __init__(self, hes_name, hes_type):
self.results = resolve(hes_name, hes_type)
self.parseRecords()
def parseRecords(self):
pass
class FilsysLookup(Lookup):
def __init__(self, name):
Lookup.__init__(self, name, 'filsys')
def parseRecords(self):
Lookup.parseRecords(self)
self.filsys = []
self.multiRecords = (len(self.results) > 1)
for result in self.results:
priority = 0
if self.multiRecords:
result, priority = result.rsplit(" ", 1)
priority = int(priority)
parts = result.split(" ")
type = parts[0]
if type == 'AFS':
self.filsys.append(dict(type=type,
location=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
elif type == 'NFS':
self.filsys.append(dict(type=type,
remote_location=parts[1],
server=parts[2],
mode=parts[3],
mountpoint=parts[4],
priority=priority))
elif type == 'ERR':
self.filsys.append(dict(type=type,
message=parts[1],
priority=priority))
elif type == 'UFS':
self.filsys.append(dict(type=type,
device=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
elif type == 'LOC':
self.filsys.append(dict(type=type,
location=parts[1],
mode=parts[2],
mountpoint=parts[3],
priority=priority))
else:
raise HesiodParseError('Unknown filsys type: %s' % type)
class PasswdLookup(Lookup):
def __init__(self, name):
Lookup.__init__(self, name, 'passwd')
def parseRecords(self):
self.passwd = struct_passwd(self.results[0].split(':'))
class UidLookup(PasswdLookup):
def __init__(self, uid):
Lookup.__init__(self, uid, 'uid')
__all__ = ['bind', 'resolve',
'Lookup', 'FilsysLookup', 'PasswdLookup', 'UidLookup',
'HesiodParseError']
| Add object-oriented-style lookups for filsys, passwd, and uid lookups
The filsys entry parsing code is taken from pyHesiodFS and was explicitly
relicensed under the MIT license by Quentin Smith <quentin@mit.edu>
#!/usr/bin/env python
from _hesiod import bind, resolve
|
6a36df252e5f7c17cb2dfc63062472c65645ad2f | unleashed/api.py | unleashed/api.py | import requests
from auth import UnleashedAuth
class UnleashedApi(object):
"""
Unleashed API client library.
"""
def __init__(self, api_url, api_id, api_key):
self.api_url = api_url
self.auth = UnleashedAuth(api_id, api_key)
def _get_request(self, method, params=None):
params = params or {}
headers = {
'content-type': 'application/json',
'accept': 'application/json',
}
resp = requests.get(
self.api_url + '/' + method,
headers=headers,
params=params,
auth=self.auth
)
return resp
def _post_request(self, method, body):
headers = {
'content-type': 'application/json',
'accept': 'application/json',
}
resp = requests.post(
self.api_url + '/' + method,
body,
headers=headers,
auth=self.auth
)
return resp | Add basic API which can make GET and POST requests | Add basic API which can make GET and POST requests
| Python | mit | jsok/unleashed | import requests
from auth import UnleashedAuth
class UnleashedApi(object):
"""
Unleashed API client library.
"""
def __init__(self, api_url, api_id, api_key):
self.api_url = api_url
self.auth = UnleashedAuth(api_id, api_key)
def _get_request(self, method, params=None):
params = params or {}
headers = {
'content-type': 'application/json',
'accept': 'application/json',
}
resp = requests.get(
self.api_url + '/' + method,
headers=headers,
params=params,
auth=self.auth
)
return resp
def _post_request(self, method, body):
headers = {
'content-type': 'application/json',
'accept': 'application/json',
}
resp = requests.post(
self.api_url + '/' + method,
body,
headers=headers,
auth=self.auth
)
return resp | Add basic API which can make GET and POST requests
|
|
32f4c67cda624f1840b1ab92d1d1afc826f13dd5 | examples/plot_gmm_pdf.py | examples/plot_gmm_pdf.py | """
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
| Add an example with probability distribution estimates using GMM. | Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.
| Python | bsd-3-clause | mayblue9/scikit-learn,qifeigit/scikit-learn,rrohan/scikit-learn,shusenl/scikit-learn,jseabold/scikit-learn,huobaowangxi/scikit-learn,frank-tancf/scikit-learn,shangwuhencc/scikit-learn,Srisai85/scikit-learn,RPGOne/scikit-learn,appapantula/scikit-learn,raghavrv/scikit-learn,hdmetor/scikit-learn,yask123/scikit-learn,xyguo/scikit-learn,costypetrisor/scikit-learn,giorgiop/scikit-learn,nrhine1/scikit-learn,mfjb/scikit-learn,siutanwong/scikit-learn,zorroblue/scikit-learn,Jimmy-Morzaria/scikit-learn,Srisai85/scikit-learn,hainm/scikit-learn,huzq/scikit-learn,michigraber/scikit-learn,Clyde-fare/scikit-learn,ominux/scikit-learn,hainm/scikit-learn,kashif/scikit-learn,abhishekgahlot/scikit-learn,mattilyra/scikit-learn,YinongLong/scikit-learn,IshankGulati/scikit-learn,alexeyum/scikit-learn,AIML/scikit-learn,yunfeilu/scikit-learn,cdegroc/scikit-learn,manhhomienbienthuy/scikit-learn,rsivapr/scikit-learn,ishanic/scikit-learn,aewhatley/scikit-learn,anntzer/scikit-learn,lucidfrontier45/scikit-learn,quheng/scikit-learn,aetilley/scikit-learn,belltailjp/scikit-learn,roxyboy/scikit-learn,ashhher3/scikit-learn,heli522/scikit-learn,mlyundin/scikit-learn,mojoboss/scikit-learn,luo66/scikit-learn,AlexRobson/scikit-learn,kagayakidan/scikit-learn,huzq/scikit-learn,kevin-intel/scikit-learn,smartscheduling/scikit-learn-categorical-tree,manashmndl/scikit-learn,shusenl/scikit-learn,LohithBlaze/scikit-learn,Lawrence-Liu/scikit-learn,jzt5132/scikit-learn,aetilley/scikit-learn,mrshu/scikit-learn,cauchycui/scikit-learn,hitszxp/scikit-learn,xwolf12/scikit-learn,davidgbe/scikit-learn,costypetrisor/scikit-learn,Aasmi/scikit-learn,luo66/scikit-learn,devanshdalal/scikit-learn,f3r/scikit-learn,ZenDevelopmentSystems/scikit-learn,michigraber/scikit-learn,mattilyra/scikit-learn,jmetzen/scikit-learn,dsquareindia/scikit-learn,moutai/scikit-learn,vibhorag/scikit-learn,gclenaghan/scikit-learn,mattilyra/scikit-learn,yask123/scikit-learn,h2educ/scikit-learn,costypetrisor/scikit-learn,gotomypc/scikit-learn,ephes/scikit-learn,jmschrei/scikit-learn,jorik041/scikit-learn,jorik041/scikit-learn,jm-begon/scikit-learn,B3AU/waveTree,rvraghav93/scikit-learn,arjoly/scikit-learn,petosegan/scikit-learn,potash/scikit-learn,shikhardb/scikit-learn,PatrickChrist/scikit-learn,xzh86/scikit-learn,MatthieuBizien/scikit-learn,TomDLT/scikit-learn,thientu/scikit-learn,pv/scikit-learn,heli522/scikit-learn,andaag/scikit-learn,appapantula/scikit-learn,sergeyf/scikit-learn,chrsrds/scikit-learn,sgenoud/scikit-learn,RachitKansal/scikit-learn,betatim/scikit-learn,Barmaley-exe/scikit-learn,JeanKossaifi/scikit-learn,carrillo/scikit-learn,anirudhjayaraman/scikit-learn,ElDeveloper/scikit-learn,bhargav/scikit-learn,frank-tancf/scikit-learn,Windy-Ground/scikit-learn,samuel1208/scikit-learn,jakirkham/scikit-learn,toastedcornflakes/scikit-learn,icdishb/scikit-learn,Lawrence-Liu/scikit-learn,imaculate/scikit-learn,kashif/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mhdella/scikit-learn,eickenberg/scikit-learn,beepee14/scikit-learn,aflaxman/scikit-learn,ldirer/scikit-learn,stylianos-kampakis/scikit-learn,ndingwall/scikit-learn,B3AU/waveTree,Myasuka/scikit-learn,vinayak-mehta/scikit-learn,mugizico/scikit-learn,LiaoPan/scikit-learn,spallavolu/scikit-learn,DonBeo/scikit-learn,thilbern/scikit-learn,ephes/scikit-learn,scikit-learn/scikit-learn,meduz/scikit-learn,tosolveit/scikit-learn,petosegan/scikit-learn,iismd17/scikit-learn,nomadcube/scikit-learn,MechCoder/scikit-learn,pv/scikit-learn,AlexanderFabisch/scikit-learn,etkirsch/scikit-learn,NunoEdgarGub1/scikit-learn,tomlof/scikit-learn,MohammedWasim/scikit-learn,lesteve/scikit-learn,vortex-ape/scikit-learn,madjelan/scikit-learn,tawsifkhan/scikit-learn,schets/scikit-learn,JosmanPS/scikit-learn,liangz0707/scikit-learn,bikong2/scikit-learn,ElDeveloper/scikit-learn,siutanwong/scikit-learn,nesterione/scikit-learn,victorbergelin/scikit-learn,Barmaley-exe/scikit-learn,lucidfrontier45/scikit-learn,altairpearl/scikit-learn,NunoEdgarGub1/scikit-learn,liangz0707/scikit-learn,jayflo/scikit-learn,scikit-learn/scikit-learn,JPFrancoia/scikit-learn,pianomania/scikit-learn,MohammedWasim/scikit-learn,idlead/scikit-learn,cwu2011/scikit-learn,shangwuhencc/scikit-learn,B3AU/waveTree,wazeerzulfikar/scikit-learn,Garrett-R/scikit-learn,henridwyer/scikit-learn,trungnt13/scikit-learn,3manuek/scikit-learn,abhishekkrthakur/scikit-learn,simon-pepin/scikit-learn,rajat1994/scikit-learn,evgchz/scikit-learn,larsmans/scikit-learn,florian-f/sklearn,idlead/scikit-learn,Fireblend/scikit-learn,ky822/scikit-learn,cainiaocome/scikit-learn,kylerbrown/scikit-learn,xwolf12/scikit-learn,shangwuhencc/scikit-learn,costypetrisor/scikit-learn,btabibian/scikit-learn,Achuth17/scikit-learn,PatrickOReilly/scikit-learn,anntzer/scikit-learn,russel1237/scikit-learn,zorroblue/scikit-learn,wzbozon/scikit-learn,tawsifkhan/scikit-learn,qifeigit/scikit-learn,PrashntS/scikit-learn,alexsavio/scikit-learn,AlexRobson/scikit-learn,Myasuka/scikit-learn,evgchz/scikit-learn,PatrickOReilly/scikit-learn,AnasGhrab/scikit-learn,iismd17/scikit-learn,pkruskal/scikit-learn,lenovor/scikit-learn,tosolveit/scikit-learn,massmutual/scikit-learn,anirudhjayaraman/scikit-learn,anurag313/scikit-learn,siutanwong/scikit-learn,liangz0707/scikit-learn,hrjn/scikit-learn,jpautom/scikit-learn,cainiaocome/scikit-learn,idlead/scikit-learn,samzhang111/scikit-learn,hrjn/scikit-learn,AlexanderFabisch/scikit-learn,deepesch/scikit-learn,Garrett-R/scikit-learn,JeanKossaifi/scikit-learn,kaichogami/scikit-learn,samuel1208/scikit-learn,xiaoxiamii/scikit-learn,lenovor/scikit-learn,giorgiop/scikit-learn,aewhatley/scikit-learn,rohanp/scikit-learn,alexeyum/scikit-learn,bigdataelephants/scikit-learn,jm-begon/scikit-learn,yask123/scikit-learn,kylerbrown/scikit-learn,cl4rke/scikit-learn,hitszxp/scikit-learn,abimannans/scikit-learn,schets/scikit-learn,lucidfrontier45/scikit-learn,Djabbz/scikit-learn,victorbergelin/scikit-learn,belltailjp/scikit-learn,mayblue9/scikit-learn,q1ang/scikit-learn,joshloyal/scikit-learn,fzalkow/scikit-learn,shikhardb/scikit-learn,betatim/scikit-learn,xavierwu/scikit-learn,vivekmishra1991/scikit-learn,yonglehou/scikit-learn,dsullivan7/scikit-learn,h2educ/scikit-learn,herilalaina/scikit-learn,xuewei4d/scikit-learn,mugizico/scikit-learn,cdegroc/scikit-learn,ltiao/scikit-learn,jakobworldpeace/scikit-learn,OshynSong/scikit-learn,yanlend/scikit-learn,fabianp/scikit-learn,nvoron23/scikit-learn,aminert/scikit-learn,nesterione/scikit-learn,jpautom/scikit-learn,ilo10/scikit-learn,anurag313/scikit-learn,RachitKansal/scikit-learn,voxlol/scikit-learn,ClimbsRocks/scikit-learn,justincassidy/scikit-learn,jjx02230808/project0223,vivekmishra1991/scikit-learn,jm-begon/scikit-learn,ogrisel/scikit-learn,thientu/scikit-learn,hugobowne/scikit-learn,sinhrks/scikit-learn,ngoix/OCRF,murali-munna/scikit-learn,pompiduskus/scikit-learn,LohithBlaze/scikit-learn,elkingtonmcb/scikit-learn,roxyboy/scikit-learn,RayMick/scikit-learn,kjung/scikit-learn,jmschrei/scikit-learn,bigdataelephants/scikit-learn,CforED/Machine-Learning,hrjn/scikit-learn,wlamond/scikit-learn,cwu2011/scikit-learn,aewhatley/scikit-learn,aminert/scikit-learn,shusenl/scikit-learn,lazywei/scikit-learn,ChanderG/scikit-learn,krez13/scikit-learn,icdishb/scikit-learn,shyamalschandra/scikit-learn,MartinDelzant/scikit-learn,f3r/scikit-learn,scikit-learn/scikit-learn,fengzhyuan/scikit-learn,JeanKossaifi/scikit-learn,yunfeilu/scikit-learn,jakirkham/scikit-learn,ankurankan/scikit-learn,gotomypc/scikit-learn,yyjiang/scikit-learn,icdishb/scikit-learn,davidgbe/scikit-learn,robbymeals/scikit-learn,potash/scikit-learn,rahuldhote/scikit-learn,mfjb/scikit-learn,q1ang/scikit-learn,nesterione/scikit-learn,raghavrv/scikit-learn,yonglehou/scikit-learn,aetilley/scikit-learn,ashhher3/scikit-learn,zorroblue/scikit-learn,smartscheduling/scikit-learn-categorical-tree,joernhees/scikit-learn,robin-lai/scikit-learn,shahankhatch/scikit-learn,glennq/scikit-learn,jmschrei/scikit-learn,maheshakya/scikit-learn,arahuja/scikit-learn,IssamLaradji/scikit-learn,hlin117/scikit-learn,cl4rke/scikit-learn,larsmans/scikit-learn,hainm/scikit-learn,fabioticconi/scikit-learn,mehdidc/scikit-learn,YinongLong/scikit-learn,samzhang111/scikit-learn,billy-inn/scikit-learn,pypot/scikit-learn,LiaoPan/scikit-learn,arabenjamin/scikit-learn,billy-inn/scikit-learn,adamgreenhall/scikit-learn,nikitasingh981/scikit-learn,imaculate/scikit-learn,ilyes14/scikit-learn,vybstat/scikit-learn,alexsavio/scikit-learn,simon-pepin/scikit-learn,AnasGhrab/scikit-learn,terkkila/scikit-learn,espg/scikit-learn,mrshu/scikit-learn,poryfly/scikit-learn,HolgerPeters/scikit-learn,gclenaghan/scikit-learn,fzalkow/scikit-learn,Obus/scikit-learn,zhenv5/scikit-learn,mxjl620/scikit-learn,billy-inn/scikit-learn,wzbozon/scikit-learn,liberatorqjw/scikit-learn,Sentient07/scikit-learn,Adai0808/scikit-learn,TomDLT/scikit-learn,fabioticconi/scikit-learn,marcocaccin/scikit-learn,jakobworldpeace/scikit-learn,djgagne/scikit-learn,cybernet14/scikit-learn,466152112/scikit-learn,equialgo/scikit-learn,CVML/scikit-learn,ishanic/scikit-learn,vermouthmjl/scikit-learn,0asa/scikit-learn,hsiaoyi0504/scikit-learn,MartinSavc/scikit-learn,robbymeals/scikit-learn,MartinSavc/scikit-learn,xavierwu/scikit-learn,mehdidc/scikit-learn,loli/semisupervisedforests,chrisburr/scikit-learn,gotomypc/scikit-learn,vybstat/scikit-learn,dhruv13J/scikit-learn,meduz/scikit-learn,rajat1994/scikit-learn,belltailjp/scikit-learn,glouppe/scikit-learn,Titan-C/scikit-learn,RayMick/scikit-learn,btabibian/scikit-learn,ycaihua/scikit-learn,kagayakidan/scikit-learn,bthirion/scikit-learn,pkruskal/scikit-learn,krez13/scikit-learn,MechCoder/scikit-learn,0asa/scikit-learn,sonnyhu/scikit-learn,justincassidy/scikit-learn,liyu1990/sklearn,voxlol/scikit-learn,pratapvardhan/scikit-learn,mhue/scikit-learn,ky822/scikit-learn,shyamalschandra/scikit-learn,bthirion/scikit-learn,rajat1994/scikit-learn,vshtanko/scikit-learn,michigraber/scikit-learn,equialgo/scikit-learn,MatthieuBizien/scikit-learn,hsiaoyi0504/scikit-learn,loli/semisupervisedforests,CVML/scikit-learn,andaag/scikit-learn,pv/scikit-learn,massmutual/scikit-learn,kmike/scikit-learn,IndraVikas/scikit-learn,terkkila/scikit-learn,bnaul/scikit-learn,waterponey/scikit-learn,rishikksh20/scikit-learn,saiwing-yeung/scikit-learn,aabadie/scikit-learn,mwv/scikit-learn,TomDLT/scikit-learn,huzq/scikit-learn,zorojean/scikit-learn,MartinDelzant/scikit-learn,eg-zhang/scikit-learn,spallavolu/scikit-learn,liyu1990/sklearn,shahankhatch/scikit-learn,ngoix/OCRF,Adai0808/scikit-learn,mayblue9/scikit-learn,jereze/scikit-learn,lin-credible/scikit-learn,manhhomienbienthuy/scikit-learn,terkkila/scikit-learn,jzt5132/scikit-learn,tdhopper/scikit-learn,dsullivan7/scikit-learn,mwv/scikit-learn,LiaoPan/scikit-learn,djgagne/scikit-learn,akionakamura/scikit-learn,Vimos/scikit-learn,jorik041/scikit-learn,r-mart/scikit-learn,herilalaina/scikit-learn,hugobowne/scikit-learn,DonBeo/scikit-learn,HolgerPeters/scikit-learn,ClimbsRocks/scikit-learn,jlegendary/scikit-learn,zuku1985/scikit-learn,aetilley/scikit-learn,stylianos-kampakis/scikit-learn,dingocuster/scikit-learn,huobaowangxi/scikit-learn,yanlend/scikit-learn,espg/scikit-learn,huzq/scikit-learn,ky822/scikit-learn,chrsrds/scikit-learn,equialgo/scikit-learn,xyguo/scikit-learn,cybernet14/scikit-learn,cainiaocome/scikit-learn,loli/sklearn-ensembletrees,maheshakya/scikit-learn,CforED/Machine-Learning,hsuantien/scikit-learn,mikebenfield/scikit-learn,fabioticconi/scikit-learn,IndraVikas/scikit-learn,hdmetor/scikit-learn,mhue/scikit-learn,xyguo/scikit-learn,untom/scikit-learn,chrisburr/scikit-learn,466152112/scikit-learn,AlexandreAbraham/scikit-learn,jayflo/scikit-learn,fyffyt/scikit-learn,tmhm/scikit-learn,poryfly/scikit-learn,BiaDarkia/scikit-learn,pkruskal/scikit-learn,jakobworldpeace/scikit-learn,yunfeilu/scikit-learn,PatrickChrist/scikit-learn,justincassidy/scikit-learn,ningchi/scikit-learn,procoder317/scikit-learn,RPGOne/scikit-learn,kjung/scikit-learn,Myasuka/scikit-learn,zaxtax/scikit-learn,vshtanko/scikit-learn,arahuja/scikit-learn,CVML/scikit-learn,abhishekkrthakur/scikit-learn,lenovor/scikit-learn,ldirer/scikit-learn,mjudsp/Tsallis,ElDeveloper/scikit-learn,fengzhyuan/scikit-learn,Clyde-fare/scikit-learn,RachitKansal/scikit-learn,MatthieuBizien/scikit-learn,anirudhjayaraman/scikit-learn,hsuantien/scikit-learn,pypot/scikit-learn,ahoyosid/scikit-learn,nhejazi/scikit-learn,lazywei/scikit-learn,cybernet14/scikit-learn,henrykironde/scikit-learn,ndingwall/scikit-learn,Obus/scikit-learn,jpautom/scikit-learn,heli522/scikit-learn,fabianp/scikit-learn,henridwyer/scikit-learn,henrykironde/scikit-learn,samuel1208/scikit-learn,plissonf/scikit-learn,lucidfrontier45/scikit-learn,0x0all/scikit-learn,DonBeo/scikit-learn,ChanChiChoi/scikit-learn,etkirsch/scikit-learn,robbymeals/scikit-learn,cauchycui/scikit-learn,rahuldhote/scikit-learn,glennq/scikit-learn,ahoyosid/scikit-learn,MatthieuBizien/scikit-learn,jjx02230808/project0223,jblackburne/scikit-learn,ZenDevelopmentSystems/scikit-learn,jaidevd/scikit-learn,dsquareindia/scikit-learn,IshankGulati/scikit-learn,ssaeger/scikit-learn,ivannz/scikit-learn,untom/scikit-learn,ky822/scikit-learn,nrhine1/scikit-learn,nhejazi/scikit-learn,kylerbrown/scikit-learn,espg/scikit-learn,khkaminska/scikit-learn,rexshihaoren/scikit-learn,glouppe/scikit-learn,wlamond/scikit-learn,bnaul/scikit-learn,arjoly/scikit-learn,shenzebang/scikit-learn,florian-f/sklearn,cdegroc/scikit-learn,toastedcornflakes/scikit-learn,beepee14/scikit-learn,treycausey/scikit-learn,tmhm/scikit-learn,mattgiguere/scikit-learn,eickenberg/scikit-learn,Nyker510/scikit-learn,procoder317/scikit-learn,untom/scikit-learn,PrashntS/scikit-learn,AIML/scikit-learn,macks22/scikit-learn,Garrett-R/scikit-learn,etkirsch/scikit-learn,eickenberg/scikit-learn,Achuth17/scikit-learn,sanketloke/scikit-learn,amueller/scikit-learn,wanggang3333/scikit-learn,murali-munna/scikit-learn,ankurankan/scikit-learn,yanlend/scikit-learn,nikitasingh981/scikit-learn,RPGOne/scikit-learn,f3r/scikit-learn,DSLituiev/scikit-learn,OshynSong/scikit-learn,ahoyosid/scikit-learn,ningchi/scikit-learn,jorge2703/scikit-learn,rohanp/scikit-learn,jakobworldpeace/scikit-learn,jzt5132/scikit-learn,raghavrv/scikit-learn,rajat1994/scikit-learn,Vimos/scikit-learn,khkaminska/scikit-learn,abimannans/scikit-learn,wlamond/scikit-learn,MartinDelzant/scikit-learn,treycausey/scikit-learn,ZENGXH/scikit-learn,0asa/scikit-learn,ChanChiChoi/scikit-learn,vibhorag/scikit-learn,Barmaley-exe/scikit-learn,ogrisel/scikit-learn,bikong2/scikit-learn,chrsrds/scikit-learn,rishikksh20/scikit-learn,nikitasingh981/scikit-learn,murali-munna/scikit-learn,sergeyf/scikit-learn,rahul-c1/scikit-learn,kevin-intel/scikit-learn,bigdataelephants/scikit-learn,tosolveit/scikit-learn,nvoron23/scikit-learn,btabibian/scikit-learn,arabenjamin/scikit-learn,devanshdalal/scikit-learn,abhishekgahlot/scikit-learn,vybstat/scikit-learn,moutai/scikit-learn,hitszxp/scikit-learn,phdowling/scikit-learn,3manuek/scikit-learn,jereze/scikit-learn,pythonvietnam/scikit-learn,mattilyra/scikit-learn,arjoly/scikit-learn,jkarnows/scikit-learn,ominux/scikit-learn,JsNoNo/scikit-learn,zihua/scikit-learn,Jimmy-Morzaria/scikit-learn,schets/scikit-learn,rrohan/scikit-learn,dhruv13J/scikit-learn,saiwing-yeung/scikit-learn,loli/sklearn-ensembletrees,xwolf12/scikit-learn,AlexandreAbraham/scikit-learn,iismd17/scikit-learn,adamgreenhall/scikit-learn,zhenv5/scikit-learn,ephes/scikit-learn,nesterione/scikit-learn,ltiao/scikit-learn,bnaul/scikit-learn,altairpearl/scikit-learn,abhishekgahlot/scikit-learn,0x0all/scikit-learn,manhhomienbienthuy/scikit-learn,Sentient07/scikit-learn,depet/scikit-learn,maheshakya/scikit-learn,larsmans/scikit-learn,nomadcube/scikit-learn,rahul-c1/scikit-learn,rahul-c1/scikit-learn,hlin117/scikit-learn,AlexandreAbraham/scikit-learn,ChanChiChoi/scikit-learn,quheng/scikit-learn,eickenberg/scikit-learn,mayblue9/scikit-learn,quheng/scikit-learn,themrmax/scikit-learn,hugobowne/scikit-learn,lbishal/scikit-learn,Windy-Ground/scikit-learn,loli/sklearn-ensembletrees,h2educ/scikit-learn,abimannans/scikit-learn,depet/scikit-learn,mjgrav2001/scikit-learn,HolgerPeters/scikit-learn,glemaitre/scikit-learn,LiaoPan/scikit-learn,theoryno3/scikit-learn,vortex-ape/scikit-learn,thientu/scikit-learn,rsivapr/scikit-learn,PrashntS/scikit-learn,ningchi/scikit-learn,Garrett-R/scikit-learn,jzt5132/scikit-learn,dingocuster/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,bhargav/scikit-learn,wzbozon/scikit-learn,xuewei4d/scikit-learn,ndingwall/scikit-learn,fabioticconi/scikit-learn,pythonvietnam/scikit-learn,marcocaccin/scikit-learn,NunoEdgarGub1/scikit-learn,glennq/scikit-learn,florian-f/sklearn,mjgrav2001/scikit-learn,mikebenfield/scikit-learn,mjgrav2001/scikit-learn,Fireblend/scikit-learn,UNR-AERIAL/scikit-learn,sinhrks/scikit-learn,lin-credible/scikit-learn,arahuja/scikit-learn,arabenjamin/scikit-learn,LohithBlaze/scikit-learn,ephes/scikit-learn,liberatorqjw/scikit-learn,sarahgrogan/scikit-learn,jblackburne/scikit-learn,stylianos-kampakis/scikit-learn,jkarnows/scikit-learn,pv/scikit-learn,bthirion/scikit-learn,IssamLaradji/scikit-learn,hdmetor/scikit-learn,mehdidc/scikit-learn,mblondel/scikit-learn,mattilyra/scikit-learn,Windy-Ground/scikit-learn,ngoix/OCRF,NunoEdgarGub1/scikit-learn,pypot/scikit-learn,rsivapr/scikit-learn,lesteve/scikit-learn,yonglehou/scikit-learn,ngoix/OCRF,vybstat/scikit-learn,amueller/scikit-learn,florian-f/sklearn,robin-lai/scikit-learn,nmayorov/scikit-learn,trankmichael/scikit-learn,Akshay0724/scikit-learn,nelson-liu/scikit-learn,lazywei/scikit-learn,wlamond/scikit-learn,akionakamura/scikit-learn,mwv/scikit-learn,petosegan/scikit-learn,hlin117/scikit-learn,shenzebang/scikit-learn,clemkoa/scikit-learn,PatrickChrist/scikit-learn,CforED/Machine-Learning,ycaihua/scikit-learn,RomainBrault/scikit-learn,JosmanPS/scikit-learn,zuku1985/scikit-learn,mhdella/scikit-learn,sergeyf/scikit-learn,manashmndl/scikit-learn,djgagne/scikit-learn,eickenberg/scikit-learn,joshloyal/scikit-learn,mfjb/scikit-learn,massmutual/scikit-learn,russel1237/scikit-learn,bnaul/scikit-learn,pratapvardhan/scikit-learn,vigilv/scikit-learn,vshtanko/scikit-learn,saiwing-yeung/scikit-learn,pianomania/scikit-learn,hainm/scikit-learn,ashhher3/scikit-learn,frank-tancf/scikit-learn,yunfeilu/scikit-learn,kjung/scikit-learn,NelisVerhoef/scikit-learn,Barmaley-exe/scikit-learn,0x0all/scikit-learn,pnedunuri/scikit-learn,alexsavio/scikit-learn,evgchz/scikit-learn,RomainBrault/scikit-learn,Djabbz/scikit-learn,ChanderG/scikit-learn,liberatorqjw/scikit-learn,akionakamura/scikit-learn,sgenoud/scikit-learn,sumspr/scikit-learn,q1ang/scikit-learn,rexshihaoren/scikit-learn,shikhardb/scikit-learn,YinongLong/scikit-learn,sanketloke/scikit-learn,ishanic/scikit-learn,shikhardb/scikit-learn,sarahgrogan/scikit-learn,xzh86/scikit-learn,tomlof/scikit-learn,marcocaccin/scikit-learn,nelson-liu/scikit-learn,yanlend/scikit-learn,devanshdalal/scikit-learn,equialgo/scikit-learn,chrisburr/scikit-learn,ssaeger/scikit-learn,ClimbsRocks/scikit-learn,tdhopper/scikit-learn,henridwyer/scikit-learn,sumspr/scikit-learn,jlegendary/scikit-learn,frank-tancf/scikit-learn,Nyker510/scikit-learn,iismd17/scikit-learn,xyguo/scikit-learn,quheng/scikit-learn,yyjiang/scikit-learn,Myasuka/scikit-learn,466152112/scikit-learn,arahuja/scikit-learn,tawsifkhan/scikit-learn,MohammedWasim/scikit-learn,spallavolu/scikit-learn,shenzebang/scikit-learn,marcocaccin/scikit-learn,anurag313/scikit-learn,deepesch/scikit-learn,Aasmi/scikit-learn,justincassidy/scikit-learn,HolgerPeters/scikit-learn,olologin/scikit-learn,toastedcornflakes/scikit-learn,cauchycui/scikit-learn,ilyes14/scikit-learn,trungnt13/scikit-learn,cl4rke/scikit-learn,maheshakya/scikit-learn,untom/scikit-learn,xubenben/scikit-learn,jseabold/scikit-learn,aewhatley/scikit-learn,adamgreenhall/scikit-learn,macks22/scikit-learn,shahankhatch/scikit-learn,BiaDarkia/scikit-learn,Lawrence-Liu/scikit-learn,sinhrks/scikit-learn,kmike/scikit-learn,liyu1990/sklearn,pythonvietnam/scikit-learn,luo66/scikit-learn,DSLituiev/scikit-learn,themrmax/scikit-learn,xubenben/scikit-learn,samzhang111/scikit-learn,xiaoxiamii/scikit-learn,Achuth17/scikit-learn,ominux/scikit-learn,pratapvardhan/scikit-learn,luo66/scikit-learn,NelisVerhoef/scikit-learn,sanketloke/scikit-learn,anntzer/scikit-learn,AlexandreAbraham/scikit-learn,maheshakya/scikit-learn,thilbern/scikit-learn,elkingtonmcb/scikit-learn,abhishekgahlot/scikit-learn,mjudsp/Tsallis,zhenv5/scikit-learn,vermouthmjl/scikit-learn,voxlol/scikit-learn,wanggang3333/scikit-learn,andaag/scikit-learn,hsuantien/scikit-learn,glennq/scikit-learn,sonnyhu/scikit-learn,kagayakidan/scikit-learn,kashif/scikit-learn,themrmax/scikit-learn,wanggang3333/scikit-learn,vermouthmjl/scikit-learn,abhishekkrthakur/scikit-learn,nelson-liu/scikit-learn,aabadie/scikit-learn,liyu1990/sklearn,IshankGulati/scikit-learn,evgchz/scikit-learn,anirudhjayaraman/scikit-learn,zorojean/scikit-learn,carrillo/scikit-learn,deepesch/scikit-learn,thilbern/scikit-learn,treycausey/scikit-learn,fyffyt/scikit-learn,jorge2703/scikit-learn,aflaxman/scikit-learn,manashmndl/scikit-learn,xzh86/scikit-learn,rvraghav93/scikit-learn,sarahgrogan/scikit-learn,spallavolu/scikit-learn,ivannz/scikit-learn,fredhusser/scikit-learn,trankmichael/scikit-learn,henrykironde/scikit-learn,AnasGhrab/scikit-learn,jlegendary/scikit-learn,IssamLaradji/scikit-learn,ahoyosid/scikit-learn,anurag313/scikit-learn,akionakamura/scikit-learn,poryfly/scikit-learn,nhejazi/scikit-learn,gotomypc/scikit-learn,alexeyum/scikit-learn,mrshu/scikit-learn,ssaeger/scikit-learn,yask123/scikit-learn,pratapvardhan/scikit-learn,phdowling/scikit-learn,mattgiguere/scikit-learn,elkingtonmcb/scikit-learn,ZenDevelopmentSystems/scikit-learn,fzalkow/scikit-learn,larsmans/scikit-learn,hitszxp/scikit-learn,dsquareindia/scikit-learn,henridwyer/scikit-learn,jlegendary/scikit-learn,cdegroc/scikit-learn,fredhusser/scikit-learn,rrohan/scikit-learn,Achuth17/scikit-learn,joernhees/scikit-learn,nrhine1/scikit-learn,florian-f/sklearn,sarahgrogan/scikit-learn,RomainBrault/scikit-learn,mwv/scikit-learn,jblackburne/scikit-learn,JeanKossaifi/scikit-learn,russel1237/scikit-learn,imaculate/scikit-learn,yyjiang/scikit-learn,sergeyf/scikit-learn,jseabold/scikit-learn,procoder317/scikit-learn,potash/scikit-learn,adamgreenhall/scikit-learn,3manuek/scikit-learn,trungnt13/scikit-learn,andaag/scikit-learn,olologin/scikit-learn,Akshay0724/scikit-learn,kashif/scikit-learn,phdowling/scikit-learn,hsuantien/scikit-learn,fredhusser/scikit-learn,hugobowne/scikit-learn,IndraVikas/scikit-learn,B3AU/waveTree,jmschrei/scikit-learn,ngoix/OCRF,anntzer/scikit-learn,JsNoNo/scikit-learn,djgagne/scikit-learn,jaidevd/scikit-learn,glemaitre/scikit-learn,kevin-intel/scikit-learn,yonglehou/scikit-learn,ycaihua/scikit-learn,treycausey/scikit-learn,jaidevd/scikit-learn,fengzhyuan/scikit-learn,themrmax/scikit-learn,Adai0808/scikit-learn,nhejazi/scikit-learn,shyamalschandra/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,harshaneelhg/scikit-learn,jpautom/scikit-learn,joernhees/scikit-learn,procoder317/scikit-learn,clemkoa/scikit-learn,tmhm/scikit-learn,devanshdalal/scikit-learn,kjung/scikit-learn,mlyundin/scikit-learn,wazeerzulfikar/scikit-learn,glemaitre/scikit-learn,CVML/scikit-learn,ldirer/scikit-learn,rahul-c1/scikit-learn,kaichogami/scikit-learn,thientu/scikit-learn,mxjl620/scikit-learn,Aasmi/scikit-learn,bigdataelephants/scikit-learn,mojoboss/scikit-learn,huobaowangxi/scikit-learn,mblondel/scikit-learn,mojoboss/scikit-learn,JPFrancoia/scikit-learn,Vimos/scikit-learn,joshloyal/scikit-learn,jakirkham/scikit-learn,TomDLT/scikit-learn,lenovor/scikit-learn,michigraber/scikit-learn,ashhher3/scikit-learn,BiaDarkia/scikit-learn,larsmans/scikit-learn,Obus/scikit-learn,russel1237/scikit-learn,NelisVerhoef/scikit-learn,elkingtonmcb/scikit-learn,pnedunuri/scikit-learn,cwu2011/scikit-learn,espg/scikit-learn,ZenDevelopmentSystems/scikit-learn,bikong2/scikit-learn,rishikksh20/scikit-learn,ZENGXH/scikit-learn,walterreade/scikit-learn,harshaneelhg/scikit-learn,schets/scikit-learn,appapantula/scikit-learn,raghavrv/scikit-learn,vinayak-mehta/scikit-learn,eg-zhang/scikit-learn,ankurankan/scikit-learn,lbishal/scikit-learn,MechCoder/scikit-learn,vigilv/scikit-learn,macks22/scikit-learn,mrshu/scikit-learn,theoryno3/scikit-learn,billy-inn/scikit-learn,nomadcube/scikit-learn,kagayakidan/scikit-learn,trungnt13/scikit-learn,lin-credible/scikit-learn,rohanp/scikit-learn,lesteve/scikit-learn,voxlol/scikit-learn,Fireblend/scikit-learn,cwu2011/scikit-learn,fzalkow/scikit-learn,fredhusser/scikit-learn,zorojean/scikit-learn,plissonf/scikit-learn,Srisai85/scikit-learn,saiwing-yeung/scikit-learn,jjx02230808/project0223,lin-credible/scikit-learn,sumspr/scikit-learn,mugizico/scikit-learn,joernhees/scikit-learn,manashmndl/scikit-learn,vshtanko/scikit-learn,khkaminska/scikit-learn,tmhm/scikit-learn,Garrett-R/scikit-learn,alexsavio/scikit-learn,zihua/scikit-learn,sanketloke/scikit-learn,fbagirov/scikit-learn,nmayorov/scikit-learn,pnedunuri/scikit-learn,walterreade/scikit-learn,btabibian/scikit-learn,chrsrds/scikit-learn,rvraghav93/scikit-learn,shahankhatch/scikit-learn,jereze/scikit-learn,dsullivan7/scikit-learn,olologin/scikit-learn,victorbergelin/scikit-learn,AlexRobson/scikit-learn,xzh86/scikit-learn,plissonf/scikit-learn,massmutual/scikit-learn,xuewei4d/scikit-learn,ivannz/scikit-learn,robbymeals/scikit-learn,beepee14/scikit-learn,vermouthmjl/scikit-learn,nikitasingh981/scikit-learn,potash/scikit-learn,aabadie/scikit-learn,Titan-C/scikit-learn,gclenaghan/scikit-learn,h2educ/scikit-learn,zaxtax/scikit-learn,ilyes14/scikit-learn,mjudsp/Tsallis,jereze/scikit-learn,0x0all/scikit-learn,zuku1985/scikit-learn,vigilv/scikit-learn,tdhopper/scikit-learn,harshaneelhg/scikit-learn,sgenoud/scikit-learn,nvoron23/scikit-learn,mjudsp/Tsallis,ycaihua/scikit-learn,pompiduskus/scikit-learn,jseabold/scikit-learn,abhishekkrthakur/scikit-learn,Titan-C/scikit-learn,robin-lai/scikit-learn,r-mart/scikit-learn,ominux/scikit-learn,vigilv/scikit-learn,davidgbe/scikit-learn,ilyes14/scikit-learn,ldirer/scikit-learn,loli/semisupervisedforests,dhruv13J/scikit-learn,JPFrancoia/scikit-learn,dsullivan7/scikit-learn,nomadcube/scikit-learn,DSLituiev/scikit-learn,aflaxman/scikit-learn,fabianp/scikit-learn,glemaitre/scikit-learn,cl4rke/scikit-learn,poryfly/scikit-learn,ltiao/scikit-learn,mehdidc/scikit-learn,aminert/scikit-learn,glouppe/scikit-learn,zuku1985/scikit-learn,depet/scikit-learn,mattgiguere/scikit-learn,alvarofierroclavero/scikit-learn,zorojean/scikit-learn,ningchi/scikit-learn,walterreade/scikit-learn,samuel1208/scikit-learn,rexshihaoren/scikit-learn,q1ang/scikit-learn,madjelan/scikit-learn,macks22/scikit-learn,fyffyt/scikit-learn,jblackburne/scikit-learn,roxyboy/scikit-learn,loli/sklearn-ensembletrees,gclenaghan/scikit-learn,mhdella/scikit-learn,hrjn/scikit-learn,vinayak-mehta/scikit-learn,jjx02230808/project0223,ilo10/scikit-learn,lesteve/scikit-learn,DonBeo/scikit-learn,evgchz/scikit-learn,alexeyum/scikit-learn,Jimmy-Morzaria/scikit-learn,nvoron23/scikit-learn,depet/scikit-learn,jakirkham/scikit-learn,jkarnows/scikit-learn,pianomania/scikit-learn,cauchycui/scikit-learn,IssamLaradji/scikit-learn,treycausey/scikit-learn,zorroblue/scikit-learn,pnedunuri/scikit-learn,liangz0707/scikit-learn,qifeigit/scikit-learn,PatrickOReilly/scikit-learn,xiaoxiamii/scikit-learn,zaxtax/scikit-learn,MartinSavc/scikit-learn,zihua/scikit-learn,ChanderG/scikit-learn,MartinSavc/scikit-learn,jayflo/scikit-learn,ishanic/scikit-learn,phdowling/scikit-learn,vortex-ape/scikit-learn,herilalaina/scikit-learn,shangwuhencc/scikit-learn,ilo10/scikit-learn,qifeigit/scikit-learn,Sentient07/scikit-learn,xwolf12/scikit-learn,moutai/scikit-learn,shusenl/scikit-learn,0asa/scikit-learn,andrewnc/scikit-learn,dhruv13J/scikit-learn,kevin-intel/scikit-learn,jaidevd/scikit-learn,pompiduskus/scikit-learn,mrshu/scikit-learn,rsivapr/scikit-learn,moutai/scikit-learn,chrisburr/scikit-learn,JsNoNo/scikit-learn,Clyde-fare/scikit-learn,mattgiguere/scikit-learn,nrhine1/scikit-learn,DSLituiev/scikit-learn,ltiao/scikit-learn,ChanderG/scikit-learn,simon-pepin/scikit-learn,Akshay0724/scikit-learn,mfjb/scikit-learn,fabianp/scikit-learn,xiaoxiamii/scikit-learn,ogrisel/scikit-learn,simon-pepin/scikit-learn,altairpearl/scikit-learn,sgenoud/scikit-learn,Titan-C/scikit-learn,smartscheduling/scikit-learn-categorical-tree,vinayak-mehta/scikit-learn,pianomania/scikit-learn,trankmichael/scikit-learn,Obus/scikit-learn,ndingwall/scikit-learn,OshynSong/scikit-learn,RPGOne/scikit-learn,tomlof/scikit-learn,JsNoNo/scikit-learn,YinongLong/scikit-learn,tawsifkhan/scikit-learn,aflaxman/scikit-learn,idlead/scikit-learn,UNR-AERIAL/scikit-learn,andrewnc/scikit-learn,Lawrence-Liu/scikit-learn,mblondel/scikit-learn,madjelan/scikit-learn,BiaDarkia/scikit-learn,trankmichael/scikit-learn,nelson-liu/scikit-learn,wzbozon/scikit-learn,robin-lai/scikit-learn,Adai0808/scikit-learn,ZENGXH/scikit-learn,bhargav/scikit-learn,AnasGhrab/scikit-learn,Nyker510/scikit-learn,PatrickChrist/scikit-learn,bthirion/scikit-learn,kaichogami/scikit-learn,eg-zhang/scikit-learn,davidgbe/scikit-learn,liberatorqjw/scikit-learn,abimannans/scikit-learn,altairpearl/scikit-learn,krez13/scikit-learn,icdishb/scikit-learn,rishikksh20/scikit-learn,Fireblend/scikit-learn,zihua/scikit-learn,fyffyt/scikit-learn,xubenben/scikit-learn,AlexanderFabisch/scikit-learn,jorik041/scikit-learn,PrashntS/scikit-learn,UNR-AERIAL/scikit-learn,loli/sklearn-ensembletrees,abhishekgahlot/scikit-learn,RayMick/scikit-learn,ClimbsRocks/scikit-learn,samzhang111/scikit-learn,krez13/scikit-learn,wanggang3333/scikit-learn,depet/scikit-learn,r-mart/scikit-learn,arjoly/scikit-learn,giorgiop/scikit-learn,roxyboy/scikit-learn,madjelan/scikit-learn,pompiduskus/scikit-learn,mojoboss/scikit-learn,zaxtax/scikit-learn,mhue/scikit-learn,terkkila/scikit-learn,IshankGulati/scikit-learn,rohanp/scikit-learn,stylianos-kampakis/scikit-learn,andrewnc/scikit-learn,dingocuster/scikit-learn,theoryno3/scikit-learn,Nyker510/scikit-learn,yyjiang/scikit-learn,xavierwu/scikit-learn,hdmetor/scikit-learn,shenzebang/scikit-learn,ycaihua/scikit-learn,glouppe/scikit-learn,jmetzen/scikit-learn,Vimos/scikit-learn,jkarnows/scikit-learn,waterponey/scikit-learn,plissonf/scikit-learn,etkirsch/scikit-learn,alvarofierroclavero/scikit-learn,rrohan/scikit-learn,meduz/scikit-learn,mjgrav2001/scikit-learn,carrillo/scikit-learn,466152112/scikit-learn,lucidfrontier45/scikit-learn,jm-begon/scikit-learn,nmayorov/scikit-learn,xavierwu/scikit-learn,wazeerzulfikar/scikit-learn,mhdella/scikit-learn,NelisVerhoef/scikit-learn,walterreade/scikit-learn,dsquareindia/scikit-learn,ssaeger/scikit-learn,Djabbz/scikit-learn,mikebenfield/scikit-learn,lbishal/scikit-learn,nmayorov/scikit-learn,ngoix/OCRF,cybernet14/scikit-learn,xubenben/scikit-learn,bhargav/scikit-learn,Windy-Ground/scikit-learn,dingocuster/scikit-learn,beepee14/scikit-learn,ChanChiChoi/scikit-learn,mugizico/scikit-learn,Djabbz/scikit-learn,tosolveit/scikit-learn,waterponey/scikit-learn,AIML/scikit-learn,mikebenfield/scikit-learn,rexshihaoren/scikit-learn,jmetzen/scikit-learn,wazeerzulfikar/scikit-learn,lazywei/scikit-learn,sonnyhu/scikit-learn,mblondel/scikit-learn,shyamalschandra/scikit-learn,xuewei4d/scikit-learn,ivannz/scikit-learn,hsiaoyi0504/scikit-learn,aminert/scikit-learn,kmike/scikit-learn,AlexRobson/scikit-learn,vortex-ape/scikit-learn,JosmanPS/scikit-learn,alvarofierroclavero/scikit-learn,pypot/scikit-learn,giorgiop/scikit-learn,harshaneelhg/scikit-learn,RachitKansal/scikit-learn,ankurankan/scikit-learn,scikit-learn/scikit-learn,herilalaina/scikit-learn,ElDeveloper/scikit-learn,betatim/scikit-learn,AlexanderFabisch/scikit-learn,betatim/scikit-learn,CforED/Machine-Learning,0asa/scikit-learn,jayflo/scikit-learn,meduz/scikit-learn,kmike/scikit-learn,rsivapr/scikit-learn,belltailjp/scikit-learn,eg-zhang/scikit-learn,jorge2703/scikit-learn,thilbern/scikit-learn,Clyde-fare/scikit-learn,vibhorag/scikit-learn,mjudsp/Tsallis,fengzhyuan/scikit-learn,hlin117/scikit-learn,RayMick/scikit-learn,Srisai85/scikit-learn,fbagirov/scikit-learn,ogrisel/scikit-learn,MartinDelzant/scikit-learn,tomlof/scikit-learn,Sentient07/scikit-learn,UNR-AERIAL/scikit-learn,clemkoa/scikit-learn,sonnyhu/scikit-learn,bikong2/scikit-learn,murali-munna/scikit-learn,sgenoud/scikit-learn,heli522/scikit-learn,vibhorag/scikit-learn,Jimmy-Morzaria/scikit-learn,amueller/scikit-learn,r-mart/scikit-learn,IndraVikas/scikit-learn,vivekmishra1991/scikit-learn,clemkoa/scikit-learn,cainiaocome/scikit-learn,imaculate/scikit-learn,ZENGXH/scikit-learn,mlyundin/scikit-learn,hitszxp/scikit-learn,MohammedWasim/scikit-learn,andrewnc/scikit-learn,kylerbrown/scikit-learn,arabenjamin/scikit-learn,huobaowangxi/scikit-learn,ankurankan/scikit-learn,RomainBrault/scikit-learn,JPFrancoia/scikit-learn,zhenv5/scikit-learn,manhhomienbienthuy/scikit-learn,Aasmi/scikit-learn,fbagirov/scikit-learn,mxjl620/scikit-learn,pythonvietnam/scikit-learn,0x0all/scikit-learn,deepesch/scikit-learn,mhue/scikit-learn,ilo10/scikit-learn,MechCoder/scikit-learn,kmike/scikit-learn,siutanwong/scikit-learn,aabadie/scikit-learn,lbishal/scikit-learn,pkruskal/scikit-learn,carrillo/scikit-learn,AIML/scikit-learn,rahuldhote/scikit-learn,tdhopper/scikit-learn,rahuldhote/scikit-learn,khkaminska/scikit-learn,hsiaoyi0504/scikit-learn,JosmanPS/scikit-learn,loli/semisupervisedforests,f3r/scikit-learn,mlyundin/scikit-learn,3manuek/scikit-learn,fbagirov/scikit-learn,mxjl620/scikit-learn,OshynSong/scikit-learn,Akshay0724/scikit-learn,amueller/scikit-learn,theoryno3/scikit-learn,jmetzen/scikit-learn,sinhrks/scikit-learn,waterponey/scikit-learn,PatrickOReilly/scikit-learn,vivekmishra1991/scikit-learn,alvarofierroclavero/scikit-learn,toastedcornflakes/scikit-learn,B3AU/waveTree,sumspr/scikit-learn,victorbergelin/scikit-learn,appapantula/scikit-learn,henrykironde/scikit-learn,rvraghav93/scikit-learn,kaichogami/scikit-learn,joshloyal/scikit-learn | """
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two gaussians.
"""
import numpy as np
from scikits.learn import gmm
import itertools
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
n, m = 300, 2
# generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
X_train = np.r_[np.dot(np.random.randn(n, 2), C),
np.random.randn(n, 2) + np.array([20, 20])]
clf = gmm.GMM(2, cvtype='full')
clf.fit(X_train)
x = np.linspace(-20.0, 30.0)
y = np.linspace(-20.0, 40.0)
X, Y = np.meshgrid(x, y)
XX = np.c_[X.ravel(), Y.ravel()]
Z = np.log(-clf.eval(XX)[0])
Z = Z.reshape(X.shape)
CS = pl.contour(X, Y, Z)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
pl.scatter(X_train[:, 0], X_train[:, 1], .8)
pl.axis('tight')
pl.show()
| Add an example with probability distribution estimates using GMM.
This is a work in progress. Also, the .eval() function from GMM
might very likely change it's return type in the future.
|
|
36716fe51800a19567c49e734d320b38d441054e | zerver/migrations/0003_custom_indexes.py | zerver/migrations/0003_custom_indexes.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0002_django_1_8'),
]
operations = [
migrations.RunSQL("CREATE INDEX upper_subject_idx ON zerver_message ((upper(subject)));",
reverse_sql="DROP INDEX upper_subject_idx;"),
migrations.RunSQL("CREATE INDEX upper_stream_name_idx ON zerver_stream ((upper(name)));",
reverse_sql="DROP INDEX upper_stream_name_idx;")
]
| Add remaining custom indexes that were created by South migrations | Add remaining custom indexes that were created by South migrations
(imported from commit 9798afa8161af4ae6b3fa0c5f4894a3211b77cd4)
| Python | apache-2.0 | punchagan/zulip,yocome/zulip,bluesea/zulip,armooo/zulip,zacps/zulip,vabs22/zulip,Juanvulcano/zulip,hj3938/zulip,zwily/zulip,grave-w-grave/zulip,dxq-git/zulip,LeeRisk/zulip,so0k/zulip,jerryge/zulip,technicalpickles/zulip,guiquanz/zulip,alliejones/zulip,firstblade/zulip,Drooids/zulip,xuanhan863/zulip,praveenaki/zulip,ApsOps/zulip,thomasboyt/zulip,vakila/zulip,mdavid/zulip,sonali0901/zulip,hustlzp/zulip,LeeRisk/zulip,andersk/zulip,vikas-parashar/zulip,zacps/zulip,jainayush975/zulip,umkay/zulip,ahmadassaf/zulip,wavelets/zulip,Juanvulcano/zulip,SmartPeople/zulip,joshisa/zulip,ericzhou2008/zulip,nicholasbs/zulip,LeeRisk/zulip,wavelets/zulip,shaunstanislaus/zulip,xuxiao/zulip,lfranchi/zulip,jessedhillon/zulip,ahmadassaf/zulip,esander91/zulip,stamhe/zulip,johnny9/zulip,Diptanshu8/zulip,akuseru/zulip,Vallher/zulip,eeshangarg/zulip,joyhchen/zulip,karamcnair/zulip,JanzTam/zulip,dawran6/zulip,m1ssou/zulip,Jianchun1/zulip,wangdeshui/zulip,Batterfii/zulip,udxxabp/zulip,hengqujushi/zulip,bluesea/zulip,dawran6/zulip,rishig/zulip,themass/zulip,AZtheAsian/zulip,hayderimran7/zulip,schatt/zulip,atomic-labs/zulip,praveenaki/zulip,seapasulli/zulip,firstblade/zulip,jessedhillon/zulip,Frouk/zulip,rht/zulip,MariaFaBella85/zulip,Suninus/zulip,voidException/zulip,JanzTam/zulip,zulip/zulip,zofuthan/zulip,udxxabp/zulip,lfranchi/zulip,amyliu345/zulip,kokoar/zulip,peguin40/zulip,wangdeshui/zulip,noroot/zulip,Drooids/zulip,glovebx/zulip,wavelets/zulip,AZtheAsian/zulip,bitemyapp/zulip,punchagan/zulip,bowlofstew/zulip,he15his/zulip,TigorC/zulip,codeKonami/zulip,rishig/zulip,ipernet/zulip,amallia/zulip,tdr130/zulip,luyifan/zulip,amyliu345/zulip,johnnygaddarr/zulip,bssrdf/zulip,Jianchun1/zulip,dwrpayne/zulip,krtkmj/zulip,aakash-cr7/zulip,Cheppers/zulip,yuvipanda/zulip,PhilSk/zulip,johnnygaddarr/zulip,aliceriot/zulip,isht3/zulip,DazWorrall/zulip,bastianh/zulip,DazWorrall/zulip,jonesgithub/zulip,souravbadami/zulip,kokoar/zulip,jphilipsen05/zulip,Vallher/zulip,amanharitsh123/zulip,Juanvulcano/zulip,armooo/zulip,he15his/zulip,dxq-git/zulip,Gabriel0402/zulip,KingxBanana/zulip,ikasumiwt/zulip,EasonYi/zulip,dnmfarrell/zulip,zachallaun/zulip,jonesgithub/zulip,hayderimran7/zulip,mohsenSy/zulip,huangkebo/zulip,adnanh/zulip,swinghu/zulip,so0k/zulip,hackerkid/zulip,akuseru/zulip,jerryge/zulip,PhilSk/zulip,sharmaeklavya2/zulip,hackerkid/zulip,saitodisse/zulip,dxq-git/zulip,jrowan/zulip,brainwane/zulip,brockwhittaker/zulip,Suninus/zulip,kaiyuanheshang/zulip,udxxabp/zulip,amallia/zulip,adnanh/zulip,armooo/zulip,AZtheAsian/zulip,verma-varsha/zulip,zwily/zulip,nicholasbs/zulip,susansls/zulip,xuxiao/zulip,huangkebo/zulip,rht/zulip,JanzTam/zulip,dhcrzf/zulip,ipernet/zulip,mahim97/zulip,he15his/zulip,rishig/zulip,jackrzhang/zulip,atomic-labs/zulip,akuseru/zulip,so0k/zulip,zhaoweigg/zulip,avastu/zulip,timabbott/zulip,jimmy54/zulip,technicalpickles/zulip,johnny9/zulip,eastlhu/zulip,ipernet/zulip,adnanh/zulip,calvinleenyc/zulip,wdaher/zulip,dawran6/zulip,vaidap/zulip,MariaFaBella85/zulip,glovebx/zulip,tdr130/zulip,showell/zulip,hayderimran7/zulip,gkotian/zulip,RobotCaleb/zulip,babbage/zulip,bluesea/zulip,ashwinirudrappa/zulip,mohsenSy/zulip,christi3k/zulip,souravbadami/zulip,technicalpickles/zulip,mdavid/zulip,joshisa/zulip,wavelets/zulip,jimmy54/zulip,shubhamdhama/zulip,shaunstanislaus/zulip,thomasboyt/zulip,stamhe/zulip,itnihao/zulip,vabs22/zulip,andersk/zulip,jerryge/zulip,sup95/zulip,MayB/zulip,bastianh/zulip,esander91/zulip,saitodisse/zulip,aakash-cr7/zulip,calvinleenyc/zulip,kaiyuanheshang/zulip,shubhamdhama/zulip,souravbadami/zulip,susansls/zulip,seapasulli/zulip,arpitpanwar/zulip,firstblade/zulip,eastlhu/zulip,Suninus/zulip,kaiyuanheshang/zulip,tiansiyuan/zulip,sup95/zulip,dnmfarrell/zulip,noroot/zulip,RobotCaleb/zulip,sharmaeklavya2/zulip,bitemyapp/zulip,mahim97/zulip,technicalpickles/zulip,zhaoweigg/zulip,armooo/zulip,Cheppers/zulip,xuxiao/zulip,vikas-parashar/zulip,saitodisse/zulip,thomasboyt/zulip,jonesgithub/zulip,sonali0901/zulip,kaiyuanheshang/zulip,noroot/zulip,nicholasbs/zulip,Diptanshu8/zulip,vakila/zulip,dxq-git/zulip,shubhamdhama/zulip,ufosky-server/zulip,dnmfarrell/zulip,hackerkid/zulip,arpitpanwar/zulip,AZtheAsian/zulip,JPJPJPOPOP/zulip,Suninus/zulip,christi3k/zulip,cosmicAsymmetry/zulip,christi3k/zulip,pradiptad/zulip,KingxBanana/zulip,proliming/zulip,adnanh/zulip,vabs22/zulip,hustlzp/zulip,praveenaki/zulip,ericzhou2008/zulip,zorojean/zulip,mansilladev/zulip,fw1121/zulip,willingc/zulip,KingxBanana/zulip,bowlofstew/zulip,avastu/zulip,dhcrzf/zulip,vikas-parashar/zulip,LeeRisk/zulip,themass/zulip,kou/zulip,developerfm/zulip,sharmaeklavya2/zulip,dxq-git/zulip,tdr130/zulip,zachallaun/zulip,adnanh/zulip,bitemyapp/zulip,rht/zulip,moria/zulip,aliceriot/zulip,jphilipsen05/zulip,moria/zulip,dotcool/zulip,ryansnowboarder/zulip,esander91/zulip,sup95/zulip,grave-w-grave/zulip,eeshangarg/zulip,gigawhitlocks/zulip,KJin99/zulip,synicalsyntax/zulip,akuseru/zulip,tbutter/zulip,Cheppers/zulip,Diptanshu8/zulip,punchagan/zulip,dotcool/zulip,samatdav/zulip,timabbott/zulip,Galexrt/zulip,fw1121/zulip,zorojean/zulip,thomasboyt/zulip,zhaoweigg/zulip,littledogboy/zulip,EasonYi/zulip,jackrzhang/zulip,hafeez3000/zulip,easyfmxu/zulip,showell/zulip,mdavid/zulip,hackerkid/zulip,amallia/zulip,dattatreya303/zulip,mohsenSy/zulip,jeffcao/zulip,suxinde2009/zulip,tiansiyuan/zulip,christi3k/zulip,gkotian/zulip,susansls/zulip,Juanvulcano/zulip,tiansiyuan/zulip,wweiradio/zulip,LAndreas/zulip,hustlzp/zulip,andersk/zulip,brainwane/zulip,dhcrzf/zulip,JPJPJPOPOP/zulip,krtkmj/zulip,ufosky-server/zulip,pradiptad/zulip,lfranchi/zulip,amanharitsh123/zulip,niftynei/zulip,technicalpickles/zulip,paxapy/zulip,kou/zulip,PaulPetring/zulip,cosmicAsymmetry/zulip,zulip/zulip,umkay/zulip,calvinleenyc/zulip,fw1121/zulip,sup95/zulip,joyhchen/zulip,glovebx/zulip,yocome/zulip,Cheppers/zulip,ericzhou2008/zulip,cosmicAsymmetry/zulip,fw1121/zulip,Frouk/zulip,sonali0901/zulip,vakila/zulip,rishig/zulip,nicholasbs/zulip,j831/zulip,ufosky-server/zulip,shrikrishnaholla/zulip,yocome/zulip,wangdeshui/zulip,aliceriot/zulip,proliming/zulip,jainayush975/zulip,LAndreas/zulip,punchagan/zulip,ryanbackman/zulip,synicalsyntax/zulip,hustlzp/zulip,Qgap/zulip,bssrdf/zulip,levixie/zulip,easyfmxu/zulip,natanovia/zulip,peguin40/zulip,amanharitsh123/zulip,so0k/zulip,brockwhittaker/zulip,TigorC/zulip,huangkebo/zulip,MayB/zulip,glovebx/zulip,timabbott/zulip,tommyip/zulip,zofuthan/zulip,ahmadassaf/zulip,samatdav/zulip,peguin40/zulip,armooo/zulip,nicholasbs/zulip,nicholasbs/zulip,levixie/zulip,karamcnair/zulip,shaunstanislaus/zulip,zofuthan/zulip,TigorC/zulip,tbutter/zulip,paxapy/zulip,yuvipanda/zulip,zorojean/zulip,showell/zulip,kokoar/zulip,LAndreas/zulip,tommyip/zulip,littledogboy/zulip,verma-varsha/zulip,jessedhillon/zulip,alliejones/zulip,amyliu345/zulip,ericzhou2008/zulip,voidException/zulip,willingc/zulip,swinghu/zulip,jessedhillon/zulip,bssrdf/zulip,pradiptad/zulip,showell/zulip,dotcool/zulip,umkay/zulip,sharmaeklavya2/zulip,deer-hope/zulip,shrikrishnaholla/zulip,KJin99/zulip,gigawhitlocks/zulip,peiwei/zulip,tbutter/zulip,xuanhan863/zulip,jackrzhang/zulip,vikas-parashar/zulip,bluesea/zulip,joshisa/zulip,jerryge/zulip,qq1012803704/zulip,shaunstanislaus/zulip,bluesea/zulip,vikas-parashar/zulip,showell/zulip,ashwinirudrappa/zulip,natanovia/zulip,kou/zulip,vaidap/zulip,niftynei/zulip,brainwane/zulip,dnmfarrell/zulip,stamhe/zulip,tommyip/zulip,itnihao/zulip,ryansnowboarder/zulip,amallia/zulip,tommyip/zulip,hayderimran7/zulip,Cheppers/zulip,AZtheAsian/zulip,firstblade/zulip,christi3k/zulip,firstblade/zulip,atomic-labs/zulip,Jianchun1/zulip,armooo/zulip,Galexrt/zulip,dwrpayne/zulip,thomasboyt/zulip,Gabriel0402/zulip,ufosky-server/zulip,zofuthan/zulip,codeKonami/zulip,wweiradio/zulip,JPJPJPOPOP/zulip,dotcool/zulip,schatt/zulip,tommyip/zulip,reyha/zulip,moria/zulip,avastu/zulip,SmartPeople/zulip,udxxabp/zulip,tiansiyuan/zulip,SmartPeople/zulip,themass/zulip,littledogboy/zulip,dattatreya303/zulip,eastlhu/zulip,Gabriel0402/zulip,gigawhitlocks/zulip,seapasulli/zulip,Jianchun1/zulip,fw1121/zulip,ericzhou2008/zulip,codeKonami/zulip,easyfmxu/zulip,zwily/zulip,verma-varsha/zulip,niftynei/zulip,itnihao/zulip,jerryge/zulip,alliejones/zulip,hj3938/zulip,isht3/zulip,Jianchun1/zulip,Drooids/zulip,natanovia/zulip,rht/zulip,Frouk/zulip,susansls/zulip,arpith/zulip,ryansnowboarder/zulip,Jianchun1/zulip,armooo/zulip,Batterfii/zulip,sup95/zulip,hengqujushi/zulip,eastlhu/zulip,Frouk/zulip,Gabriel0402/zulip,brockwhittaker/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,deer-hope/zulip,Qgap/zulip,ahmadassaf/zulip,jphilipsen05/zulip,jonesgithub/zulip,bssrdf/zulip,kaiyuanheshang/zulip,bluesea/zulip,dawran6/zulip,firstblade/zulip,mahim97/zulip,wdaher/zulip,esander91/zulip,peiwei/zulip,amyliu345/zulip,EasonYi/zulip,hayderimran7/zulip,alliejones/zulip,guiquanz/zulip,lfranchi/zulip,MariaFaBella85/zulip,thomasboyt/zulip,zwily/zulip,JanzTam/zulip,guiquanz/zulip,LAndreas/zulip,grave-w-grave/zulip,levixie/zulip,deer-hope/zulip,jessedhillon/zulip,shrikrishnaholla/zulip,easyfmxu/zulip,eastlhu/zulip,jimmy54/zulip,peiwei/zulip,dattatreya303/zulip,luyifan/zulip,alliejones/zulip,aps-sids/zulip,jimmy54/zulip,jackrzhang/zulip,pradiptad/zulip,ericzhou2008/zulip,vaidap/zulip,synicalsyntax/zulip,karamcnair/zulip,blaze225/zulip,xuanhan863/zulip,eastlhu/zulip,brainwane/zulip,ashwinirudrappa/zulip,isht3/zulip,JanzTam/zulip,bitemyapp/zulip,littledogboy/zulip,mohsenSy/zulip,kou/zulip,LeeRisk/zulip,Vallher/zulip,DazWorrall/zulip,proliming/zulip,eeshangarg/zulip,moria/zulip,aps-sids/zulip,joshisa/zulip,ericzhou2008/zulip,atomic-labs/zulip,tiansiyuan/zulip,Drooids/zulip,ufosky-server/zulip,MayB/zulip,umkay/zulip,developerfm/zulip,atomic-labs/zulip,hayderimran7/zulip,pradiptad/zulip,easyfmxu/zulip,m1ssou/zulip,EasonYi/zulip,schatt/zulip,ipernet/zulip,brockwhittaker/zulip,dotcool/zulip,ikasumiwt/zulip,qq1012803704/zulip,esander91/zulip,ApsOps/zulip,sonali0901/zulip,calvinleenyc/zulip,kokoar/zulip,DazWorrall/zulip,EasonYi/zulip,arpith/zulip,tommyip/zulip,ryansnowboarder/zulip,zacps/zulip,qq1012803704/zulip,vakila/zulip,easyfmxu/zulip,rishig/zulip,Suninus/zulip,amanharitsh123/zulip,deer-hope/zulip,johnnygaddarr/zulip,ApsOps/zulip,stamhe/zulip,dnmfarrell/zulip,developerfm/zulip,ryanbackman/zulip,swinghu/zulip,vakila/zulip,thomasboyt/zulip,rht/zulip,bssrdf/zulip,eeshangarg/zulip,fw1121/zulip,kokoar/zulip,MayB/zulip,brainwane/zulip,littledogboy/zulip,noroot/zulip,johnny9/zulip,esander91/zulip,timabbott/zulip,seapasulli/zulip,bitemyapp/zulip,shubhamdhama/zulip,rishig/zulip,amyliu345/zulip,SmartPeople/zulip,Frouk/zulip,Gabriel0402/zulip,zorojean/zulip,ryansnowboarder/zulip,wweiradio/zulip,hafeez3000/zulip,babbage/zulip,levixie/zulip,Qgap/zulip,shaunstanislaus/zulip,krtkmj/zulip,babbage/zulip,seapasulli/zulip,wweiradio/zulip,hengqujushi/zulip,KJin99/zulip,Diptanshu8/zulip,hafeez3000/zulip,LeeRisk/zulip,samatdav/zulip,littledogboy/zulip,udxxabp/zulip,zofuthan/zulip,zulip/zulip,AZtheAsian/zulip,saitodisse/zulip,swinghu/zulip,esander91/zulip,luyifan/zulip,johnnygaddarr/zulip,vabs22/zulip,johnny9/zulip,atomic-labs/zulip,johnnygaddarr/zulip,karamcnair/zulip,zulip/zulip,shrikrishnaholla/zulip,brainwane/zulip,dattatreya303/zulip,eeshangarg/zulip,amallia/zulip,adnanh/zulip,Suninus/zulip,moria/zulip,zacps/zulip,Qgap/zulip,noroot/zulip,amallia/zulip,qq1012803704/zulip,j831/zulip,j831/zulip,jeffcao/zulip,vikas-parashar/zulip,kaiyuanheshang/zulip,KJin99/zulip,ikasumiwt/zulip,codeKonami/zulip,ApsOps/zulip,dotcool/zulip,jainayush975/zulip,bluesea/zulip,ashwinirudrappa/zulip,yocome/zulip,jrowan/zulip,jerryge/zulip,MayB/zulip,KJin99/zulip,LAndreas/zulip,wangdeshui/zulip,jimmy54/zulip,dotcool/zulip,peiwei/zulip,mdavid/zulip,ryansnowboarder/zulip,udxxabp/zulip,glovebx/zulip,bastianh/zulip,hackerkid/zulip,isht3/zulip,KJin99/zulip,itnihao/zulip,arpith/zulip,paxapy/zulip,hengqujushi/zulip,punchagan/zulip,deer-hope/zulip,souravbadami/zulip,TigorC/zulip,stamhe/zulip,amanharitsh123/zulip,levixie/zulip,yocome/zulip,zachallaun/zulip,glovebx/zulip,aps-sids/zulip,hj3938/zulip,zhaoweigg/zulip,hafeez3000/zulip,samatdav/zulip,zulip/zulip,brockwhittaker/zulip,KJin99/zulip,akuseru/zulip,DazWorrall/zulip,aliceriot/zulip,ashwinirudrappa/zulip,arpith/zulip,peiwei/zulip,hj3938/zulip,xuanhan863/zulip,andersk/zulip,dnmfarrell/zulip,swinghu/zulip,xuxiao/zulip,calvinleenyc/zulip,kokoar/zulip,Galexrt/zulip,Gabriel0402/zulip,bssrdf/zulip,joshisa/zulip,niftynei/zulip,mahim97/zulip,karamcnair/zulip,ryanbackman/zulip,paxapy/zulip,qq1012803704/zulip,zwily/zulip,karamcnair/zulip,tommyip/zulip,grave-w-grave/zulip,itnihao/zulip,susansls/zulip,krtkmj/zulip,hj3938/zulip,joshisa/zulip,aliceriot/zulip,MayB/zulip,MariaFaBella85/zulip,andersk/zulip,avastu/zulip,rishig/zulip,vaidap/zulip,luyifan/zulip,peguin40/zulip,jphilipsen05/zulip,mansilladev/zulip,jeffcao/zulip,babbage/zulip,gigawhitlocks/zulip,kokoar/zulip,RobotCaleb/zulip,developerfm/zulip,dhcrzf/zulip,PaulPetring/zulip,arpith/zulip,wdaher/zulip,yocome/zulip,vakila/zulip,ApsOps/zulip,dattatreya303/zulip,so0k/zulip,dwrpayne/zulip,Cheppers/zulip,zacps/zulip,eastlhu/zulip,RobotCaleb/zulip,aps-sids/zulip,natanovia/zulip,hafeez3000/zulip,RobotCaleb/zulip,shrikrishnaholla/zulip,tbutter/zulip,he15his/zulip,guiquanz/zulip,Qgap/zulip,voidException/zulip,adnanh/zulip,natanovia/zulip,wangdeshui/zulip,bowlofstew/zulip,codeKonami/zulip,Batterfii/zulip,ufosky-server/zulip,shubhamdhama/zulip,wweiradio/zulip,wdaher/zulip,jackrzhang/zulip,yuvipanda/zulip,arpitpanwar/zulip,joshisa/zulip,mdavid/zulip,firstblade/zulip,huangkebo/zulip,avastu/zulip,umkay/zulip,andersk/zulip,tbutter/zulip,tiansiyuan/zulip,verma-varsha/zulip,ryanbackman/zulip,arpitpanwar/zulip,tdr130/zulip,xuanhan863/zulip,themass/zulip,samatdav/zulip,verma-varsha/zulip,niftynei/zulip,deer-hope/zulip,jainayush975/zulip,samatdav/zulip,Batterfii/zulip,synicalsyntax/zulip,aakash-cr7/zulip,JPJPJPOPOP/zulip,cosmicAsymmetry/zulip,shubhamdhama/zulip,PaulPetring/zulip,bssrdf/zulip,johnny9/zulip,akuseru/zulip,bastianh/zulip,tdr130/zulip,itnihao/zulip,guiquanz/zulip,yuvipanda/zulip,MariaFaBella85/zulip,Juanvulcano/zulip,shrikrishnaholla/zulip,ahmadassaf/zulip,Batterfii/zulip,zwily/zulip,JPJPJPOPOP/zulip,Drooids/zulip,zorojean/zulip,mohsenSy/zulip,he15his/zulip,babbage/zulip,suxinde2009/zulip,gkotian/zulip,fw1121/zulip,seapasulli/zulip,schatt/zulip,lfranchi/zulip,RobotCaleb/zulip,JanzTam/zulip,souravbadami/zulip,shrikrishnaholla/zulip,gigawhitlocks/zulip,hengqujushi/zulip,KingxBanana/zulip,m1ssou/zulip,JanzTam/zulip,synicalsyntax/zulip,xuanhan863/zulip,hj3938/zulip,praveenaki/zulip,punchagan/zulip,Vallher/zulip,jimmy54/zulip,hayderimran7/zulip,gigawhitlocks/zulip,brainwane/zulip,gigawhitlocks/zulip,tbutter/zulip,sup95/zulip,ikasumiwt/zulip,Vallher/zulip,Vallher/zulip,christi3k/zulip,itnihao/zulip,alliejones/zulip,PaulPetring/zulip,MariaFaBella85/zulip,reyha/zulip,jerryge/zulip,noroot/zulip,developerfm/zulip,glovebx/zulip,dawran6/zulip,susansls/zulip,hustlzp/zulip,dxq-git/zulip,voidException/zulip,ipernet/zulip,avastu/zulip,mdavid/zulip,xuanhan863/zulip,atomic-labs/zulip,johnny9/zulip,joyhchen/zulip,jonesgithub/zulip,akuseru/zulip,willingc/zulip,bowlofstew/zulip,peguin40/zulip,jonesgithub/zulip,johnny9/zulip,hackerkid/zulip,rht/zulip,MariaFaBella85/zulip,PhilSk/zulip,showell/zulip,developerfm/zulip,PhilSk/zulip,jphilipsen05/zulip,m1ssou/zulip,bastianh/zulip,KingxBanana/zulip,Juanvulcano/zulip,levixie/zulip,jeffcao/zulip,SmartPeople/zulip,arpitpanwar/zulip,PaulPetring/zulip,swinghu/zulip,m1ssou/zulip,sharmaeklavya2/zulip,jrowan/zulip,jonesgithub/zulip,sonali0901/zulip,pradiptad/zulip,zhaoweigg/zulip,themass/zulip,ryanbackman/zulip,peiwei/zulip,ashwinirudrappa/zulip,swinghu/zulip,xuxiao/zulip,blaze225/zulip,zachallaun/zulip,proliming/zulip,willingc/zulip,jeffcao/zulip,eeshangarg/zulip,amallia/zulip,reyha/zulip,kaiyuanheshang/zulip,TigorC/zulip,xuxiao/zulip,stamhe/zulip,dxq-git/zulip,zofuthan/zulip,hafeez3000/zulip,isht3/zulip,krtkmj/zulip,wweiradio/zulip,codeKonami/zulip,LAndreas/zulip,showell/zulip,luyifan/zulip,jrowan/zulip,easyfmxu/zulip,mohsenSy/zulip,so0k/zulip,dwrpayne/zulip,wdaher/zulip,schatt/zulip,zacps/zulip,aakash-cr7/zulip,mansilladev/zulip,zhaoweigg/zulip,blaze225/zulip,alliejones/zulip,zwily/zulip,zachallaun/zulip,zulip/zulip,j831/zulip,yuvipanda/zulip,johnnygaddarr/zulip,ufosky-server/zulip,he15his/zulip,dwrpayne/zulip,hengqujushi/zulip,technicalpickles/zulip,andersk/zulip,grave-w-grave/zulip,praveenaki/zulip,aps-sids/zulip,hengqujushi/zulip,ikasumiwt/zulip,wangdeshui/zulip,Suninus/zulip,Drooids/zulip,bowlofstew/zulip,peiwei/zulip,mansilladev/zulip,zhaoweigg/zulip,johnnygaddarr/zulip,jrowan/zulip,dhcrzf/zulip,seapasulli/zulip,bitemyapp/zulip,willingc/zulip,hafeez3000/zulip,voidException/zulip,themass/zulip,krtkmj/zulip,praveenaki/zulip,wangdeshui/zulip,Frouk/zulip,hackerkid/zulip,calvinleenyc/zulip,voidException/zulip,blaze225/zulip,stamhe/zulip,willingc/zulip,technicalpickles/zulip,Diptanshu8/zulip,jeffcao/zulip,peguin40/zulip,joyhchen/zulip,aps-sids/zulip,zorojean/zulip,tiansiyuan/zulip,PhilSk/zulip,yuvipanda/zulip,qq1012803704/zulip,yocome/zulip,paxapy/zulip,vabs22/zulip,EasonYi/zulip,Gabriel0402/zulip,wavelets/zulip,bitemyapp/zulip,pradiptad/zulip,reyha/zulip,tdr130/zulip,karamcnair/zulip,wavelets/zulip,m1ssou/zulip,krtkmj/zulip,timabbott/zulip,Vallher/zulip,RobotCaleb/zulip,timabbott/zulip,suxinde2009/zulip,zachallaun/zulip,grave-w-grave/zulip,jainayush975/zulip,voidException/zulip,yuvipanda/zulip,synicalsyntax/zulip,souravbadami/zulip,dhcrzf/zulip,sharmaeklavya2/zulip,dhcrzf/zulip,blaze225/zulip,zulip/zulip,rht/zulip,PhilSk/zulip,mansilladev/zulip,ahmadassaf/zulip,nicholasbs/zulip,jrowan/zulip,reyha/zulip,ApsOps/zulip,tbutter/zulip,noroot/zulip,KingxBanana/zulip,verma-varsha/zulip,kou/zulip,blaze225/zulip,m1ssou/zulip,saitodisse/zulip,jessedhillon/zulip,j831/zulip,kou/zulip,ryansnowboarder/zulip,dwrpayne/zulip,levixie/zulip,PaulPetring/zulip,wweiradio/zulip,moria/zulip,dattatreya303/zulip,amanharitsh123/zulip,jainayush975/zulip,luyifan/zulip,lfranchi/zulip,shaunstanislaus/zulip,saitodisse/zulip,tdr130/zulip,Cheppers/zulip,gkotian/zulip,deer-hope/zulip,jackrzhang/zulip,he15his/zulip,aakash-cr7/zulip,suxinde2009/zulip,zofuthan/zulip,suxinde2009/zulip,ikasumiwt/zulip,punchagan/zulip,codeKonami/zulip,jeffcao/zulip,MayB/zulip,umkay/zulip,isht3/zulip,saitodisse/zulip,cosmicAsymmetry/zulip,themass/zulip,vaidap/zulip,DazWorrall/zulip,paxapy/zulip,dnmfarrell/zulip,willingc/zulip,developerfm/zulip,LeeRisk/zulip,proliming/zulip,EasonYi/zulip,joyhchen/zulip,bastianh/zulip,schatt/zulip,sonali0901/zulip,niftynei/zulip,qq1012803704/zulip,mansilladev/zulip,brockwhittaker/zulip,jessedhillon/zulip,ikasumiwt/zulip,so0k/zulip,natanovia/zulip,synicalsyntax/zulip,dwrpayne/zulip,amyliu345/zulip,guiquanz/zulip,mahim97/zulip,natanovia/zulip,PaulPetring/zulip,timabbott/zulip,jphilipsen05/zulip,Galexrt/zulip,ApsOps/zulip,schatt/zulip,moria/zulip,shubhamdhama/zulip,littledogboy/zulip,jackrzhang/zulip,Batterfii/zulip,j831/zulip,jimmy54/zulip,Galexrt/zulip,shaunstanislaus/zulip,mahim97/zulip,wdaher/zulip,aliceriot/zulip,umkay/zulip,kou/zulip,aps-sids/zulip,hustlzp/zulip,vaidap/zulip,bowlofstew/zulip,arpitpanwar/zulip,wdaher/zulip,arpitpanwar/zulip,mansilladev/zulip,gkotian/zulip,eeshangarg/zulip,LAndreas/zulip,DazWorrall/zulip,proliming/zulip,hustlzp/zulip,SmartPeople/zulip,joyhchen/zulip,Qgap/zulip,babbage/zulip,ipernet/zulip,huangkebo/zulip,praveenaki/zulip,lfranchi/zulip,hj3938/zulip,guiquanz/zulip,suxinde2009/zulip,Drooids/zulip,bastianh/zulip,ashwinirudrappa/zulip,suxinde2009/zulip,Galexrt/zulip,gkotian/zulip,babbage/zulip,vakila/zulip,cosmicAsymmetry/zulip,proliming/zulip,Galexrt/zulip,aliceriot/zulip,ipernet/zulip,xuxiao/zulip,Qgap/zulip,vabs22/zulip,luyifan/zulip,ryanbackman/zulip,Diptanshu8/zulip,udxxabp/zulip,wavelets/zulip,gkotian/zulip,reyha/zulip,zorojean/zulip,dawran6/zulip,Frouk/zulip,huangkebo/zulip,TigorC/zulip,bowlofstew/zulip,zachallaun/zulip,avastu/zulip,aakash-cr7/zulip,huangkebo/zulip,mdavid/zulip,arpith/zulip,Batterfii/zulip | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0002_django_1_8'),
]
operations = [
migrations.RunSQL("CREATE INDEX upper_subject_idx ON zerver_message ((upper(subject)));",
reverse_sql="DROP INDEX upper_subject_idx;"),
migrations.RunSQL("CREATE INDEX upper_stream_name_idx ON zerver_stream ((upper(name)));",
reverse_sql="DROP INDEX upper_stream_name_idx;")
]
| Add remaining custom indexes that were created by South migrations
(imported from commit 9798afa8161af4ae6b3fa0c5f4894a3211b77cd4)
|
|
115a71995f2ceae667c05114da8e8ba21c25c402 | syncplay/__init__.py | syncplay/__init__.py | version = '1.6.5'
revision = ' release'
milestone = 'Yoitsu'
release_number = '86'
projectURL = 'https://syncplay.pl/'
| version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
| Move to 1.6.6 dev for further development | Move to 1.6.6 dev for further development | Python | apache-2.0 | alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay | version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
| Move to 1.6.6 dev for further development
version = '1.6.5'
revision = ' release'
milestone = 'Yoitsu'
release_number = '86'
projectURL = 'https://syncplay.pl/'
|
5d0be01926950fe4d693adec824aaed8495f3f65 | rnacentral/rnacentral/local_settings_default.py | rnacentral/rnacentral/local_settings_default.py | """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
| """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
| Set a maximum database connection age, works in django >= 1.6 | Set a maximum database connection age, works in django >= 1.6
| Python | apache-2.0 | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
| Set a maximum database connection age, works in django >= 1.6
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
998586b575149ae549b755067c831f8b066c1845 | digi/migrations/0002_theme_page_add_body_and_blog_category.py | digi/migrations/0002_theme_page_add_body_and_blog_category.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 11:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '__latest__'),
('digi', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='themepage',
name='blog_category',
field=models.ForeignKey(blank=True, help_text='Corresponding blog category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.BlogCategory'),
),
migrations.AddField(
model_name='themepage',
name='body',
field=wagtail.wagtailcore.fields.StreamField((('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()),), blank=True, null=True),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 11:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20151019_1121'),
('digi', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='themepage',
name='blog_category',
field=models.ForeignKey(blank=True, help_text='Corresponding blog category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.BlogCategory'),
),
migrations.AddField(
model_name='themepage',
name='body',
field=wagtail.wagtailcore.fields.StreamField((('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()),), blank=True, null=True),
),
]
| Use exact version instead of latest in the migration dependencies | Use exact version instead of latest in the migration dependencies
Changed to use the latest migration of wagtail-blog v1.6.9.
Refs
https://github.com/thelabnyc/wagtail_blog/blob/5147d8129127102009c9bd63b1886e7665f6ccfb/blog/migrations/0005_auto_20151019_1121.py
| Python | mit | City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 11:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20151019_1121'),
('digi', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='themepage',
name='blog_category',
field=models.ForeignKey(blank=True, help_text='Corresponding blog category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.BlogCategory'),
),
migrations.AddField(
model_name='themepage',
name='body',
field=wagtail.wagtailcore.fields.StreamField((('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()),), blank=True, null=True),
),
]
| Use exact version instead of latest in the migration dependencies
Changed to use the latest migration of wagtail-blog v1.6.9.
Refs
https://github.com/thelabnyc/wagtail_blog/blob/5147d8129127102009c9bd63b1886e7665f6ccfb/blog/migrations/0005_auto_20151019_1121.py
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 11:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '__latest__'),
('digi', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='themepage',
name='blog_category',
field=models.ForeignKey(blank=True, help_text='Corresponding blog category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.BlogCategory'),
),
migrations.AddField(
model_name='themepage',
name='body',
field=wagtail.wagtailcore.fields.StreamField((('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()),), blank=True, null=True),
),
]
|
71088ebbed3f6060def0455814036185c70ba194 | shopify_auth/context_processors.py | shopify_auth/context_processors.py | import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()} | from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information. | Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information. | Python | mit | funkybob/django-shopify-auth,RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,funkybob/django-shopify-auth | from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information.
import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()} |
5b3a001af9ff992d061f880d6350292250fd8687 | apps/explorer/tests/test_views.py | apps/explorer/tests/test_views.py | from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set has been submitted yet'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
| Add tests for the pixelset list view | Add tests for the pixelset list view
| Python | bsd-3-clause | Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel | from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set has been submitted yet'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
| Add tests for the pixelset list view
|
|
ffbc35a0c19083a53a25aee71b74e06da0aa0a4b | timepiece/contracts/admin.py | timepiece/contracts/admin.py | from django.contrib import admin
from timepiece.contracts.models import ProjectContract, ContractHour,\
ContractAssignment, HourGroup
class ContractAssignmentInline(admin.TabularInline):
model = ContractAssignment
raw_id_fields = ('user',)
def queryset(self, request):
qs = super(ContractAssignmentInline, self).queryset(request)
return qs.select_related()
class ContractHourInline(admin.TabularInline):
model = ContractHour
class ProjectContractAdmin(admin.ModelAdmin):
model = ProjectContract
list_display = ('name', 'start_date', 'end_date', 'status',
'contracted_hours', 'pending_hours',
'hours_assigned', 'hours_unassigned',
'hours_worked',
'type')
inlines = (ContractAssignmentInline, ContractHourInline)
list_filter = ('status', 'type')
filter_horizontal = ('projects',)
list_per_page = 20
search_fields = ('name', 'projects__name', 'projects__business__name')
def hours_unassigned(self, obj):
return obj.contracted_hours() - obj.hours_assigned
class HourGroupAdmin(admin.ModelAdmin):
model = HourGroup
list_display = ('name',)
list_filter = ('activities',)
ordering = ('order', 'name')
filter_horizontal = ('activities',)
admin.site.register(ProjectContract, ProjectContractAdmin)
admin.site.register(HourGroup, HourGroupAdmin)
admin.site.register(ContractHour)
| from django.contrib import admin
from timepiece.contracts.models import ProjectContract, ContractHour,\
ContractAssignment, HourGroup
class ContractAssignmentInline(admin.TabularInline):
model = ContractAssignment
raw_id_fields = ('user',)
def get_queryset(self, request):
qs = super(ContractAssignmentInline, self).get_queryset(request)
return qs.select_related()
class ContractHourInline(admin.TabularInline):
model = ContractHour
class ProjectContractAdmin(admin.ModelAdmin):
model = ProjectContract
list_display = ('name', 'start_date', 'end_date', 'status',
'contracted_hours', 'pending_hours',
'hours_assigned', 'hours_unassigned',
'hours_worked',
'type')
inlines = (ContractAssignmentInline, ContractHourInline)
list_filter = ('status', 'type')
filter_horizontal = ('projects',)
list_per_page = 20
search_fields = ('name', 'projects__name', 'projects__business__name')
def hours_unassigned(self, obj):
return obj.contracted_hours() - obj.hours_assigned
class HourGroupAdmin(admin.ModelAdmin):
model = HourGroup
list_display = ('name',)
list_filter = ('activities',)
ordering = ('order', 'name')
filter_horizontal = ('activities',)
admin.site.register(ProjectContract, ProjectContractAdmin)
admin.site.register(HourGroup, HourGroupAdmin)
admin.site.register(ContractHour)
| Update Python/Django: Admin queryset -> get_queryset | Update Python/Django: Admin queryset -> get_queryset
| Python | mit | arbitrahj/django-timepiece,caktus/django-timepiece,caktus/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece,arbitrahj/django-timepiece,BocuStudio/django-timepiece,BocuStudio/django-timepiece,caktus/django-timepiece | from django.contrib import admin
from timepiece.contracts.models import ProjectContract, ContractHour,\
ContractAssignment, HourGroup
class ContractAssignmentInline(admin.TabularInline):
model = ContractAssignment
raw_id_fields = ('user',)
def get_queryset(self, request):
qs = super(ContractAssignmentInline, self).get_queryset(request)
return qs.select_related()
class ContractHourInline(admin.TabularInline):
model = ContractHour
class ProjectContractAdmin(admin.ModelAdmin):
model = ProjectContract
list_display = ('name', 'start_date', 'end_date', 'status',
'contracted_hours', 'pending_hours',
'hours_assigned', 'hours_unassigned',
'hours_worked',
'type')
inlines = (ContractAssignmentInline, ContractHourInline)
list_filter = ('status', 'type')
filter_horizontal = ('projects',)
list_per_page = 20
search_fields = ('name', 'projects__name', 'projects__business__name')
def hours_unassigned(self, obj):
return obj.contracted_hours() - obj.hours_assigned
class HourGroupAdmin(admin.ModelAdmin):
model = HourGroup
list_display = ('name',)
list_filter = ('activities',)
ordering = ('order', 'name')
filter_horizontal = ('activities',)
admin.site.register(ProjectContract, ProjectContractAdmin)
admin.site.register(HourGroup, HourGroupAdmin)
admin.site.register(ContractHour)
| Update Python/Django: Admin queryset -> get_queryset
from django.contrib import admin
from timepiece.contracts.models import ProjectContract, ContractHour,\
ContractAssignment, HourGroup
class ContractAssignmentInline(admin.TabularInline):
model = ContractAssignment
raw_id_fields = ('user',)
def queryset(self, request):
qs = super(ContractAssignmentInline, self).queryset(request)
return qs.select_related()
class ContractHourInline(admin.TabularInline):
model = ContractHour
class ProjectContractAdmin(admin.ModelAdmin):
model = ProjectContract
list_display = ('name', 'start_date', 'end_date', 'status',
'contracted_hours', 'pending_hours',
'hours_assigned', 'hours_unassigned',
'hours_worked',
'type')
inlines = (ContractAssignmentInline, ContractHourInline)
list_filter = ('status', 'type')
filter_horizontal = ('projects',)
list_per_page = 20
search_fields = ('name', 'projects__name', 'projects__business__name')
def hours_unassigned(self, obj):
return obj.contracted_hours() - obj.hours_assigned
class HourGroupAdmin(admin.ModelAdmin):
model = HourGroup
list_display = ('name',)
list_filter = ('activities',)
ordering = ('order', 'name')
filter_horizontal = ('activities',)
admin.site.register(ProjectContract, ProjectContractAdmin)
admin.site.register(HourGroup, HourGroupAdmin)
admin.site.register(ContractHour)
|
9310e94a1406102bba109416f781f9d6330d0028 | tests/test_itunes.py | tests/test_itunes.py | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
| """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value, run_applescript
from itunes.exceptions import AppleScriptError
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
def test_run_applescript(self):
self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \
" APPLESCRIPT")
| Add test to make sure `run_applescript` throws on bad script | Add test to make sure `run_applescript` throws on bad script
| Python | mit | adanoff/iTunesTUI | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value, run_applescript
from itunes.exceptions import AppleScriptError
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
def test_run_applescript(self):
self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \
" APPLESCRIPT")
| Add test to make sure `run_applescript` throws on bad script
"""
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
|
45db81042b58c72da04193cdcdc6d400b5370c18 | convert_quotes_json_to_sqlite.py | convert_quotes_json_to_sqlite.py | #!/usr/bin/env python3
# Convert legacy JSON quotes format to SQLite3 format
# It non-destructively reads in "quotes.json"
# and writes out a new file "quotes.sqlite3".
# It will prompt if "quotes.sqlite3" exists
# and will ask if you want to merge the quotes.
# "quotes.sqlite3" modifications will not be committed until the process is finished,
# so don't open it in the meantime and expect new data.
import json
from pathlib import Path
import sqlite3
import sys
print("Converting quotes.json to quotes.sqlite3")
with open("quotes.json") as f:
quotes = json.load(f)
if Path("quotes.sqlite3").exists():
print("'quotes.sqlite3' exists. It could be from a previous run, and may have quotes in it.")
print("You may want to check the existing file for data, or at least back it up.")
print()
print("Do you want to continue and attempt to merge the existing and new quotes?")
answer = input('If you wish to merge them, type "yes" without quotes: ')
if answer != "yes":
print("Aborting conversion. Move, delete, or prepare to merge 'quotes.sqlite3' and rerun this script.")
sys.exit("'quotes.sqlite3' exists")
# Should create a new file if it doesn't exist
quotes_db_connection = sqlite3.connect("quotes.sqlite3")
quotes_db_cursor = quotes_db_connection.cursor()
quotes_db_cursor.execute("CREATE TABLE IF NOT EXISTS quotes "
"(author text NOT NULL, quote text NOT NULL)")
for author in quotes:
for quote in quotes[author]:
quotes_db_cursor.execute("INSERT INTO quotes VALUES "
"(?,?)", (author, quote))
quotes_db_connection.commit()
quotes_db_connection.close()
print("quotes.sqlite3 written. Should be good to go.") | Add conversion script for quotes database | Add conversion script for quotes database
Figured I'd keep this around for a while just in case. Maybe remove it
in some future cleanup and declare the old format dead.
| Python | mit | TAOTheCrab/CrabBot | #!/usr/bin/env python3
# Convert legacy JSON quotes format to SQLite3 format
# It non-destructively reads in "quotes.json"
# and writes out a new file "quotes.sqlite3".
# It will prompt if "quotes.sqlite3" exists
# and will ask if you want to merge the quotes.
# "quotes.sqlite3" modifications will not be committed until the process is finished,
# so don't open it in the meantime and expect new data.
import json
from pathlib import Path
import sqlite3
import sys
print("Converting quotes.json to quotes.sqlite3")
with open("quotes.json") as f:
quotes = json.load(f)
if Path("quotes.sqlite3").exists():
print("'quotes.sqlite3' exists. It could be from a previous run, and may have quotes in it.")
print("You may want to check the existing file for data, or at least back it up.")
print()
print("Do you want to continue and attempt to merge the existing and new quotes?")
answer = input('If you wish to merge them, type "yes" without quotes: ')
if answer != "yes":
print("Aborting conversion. Move, delete, or prepare to merge 'quotes.sqlite3' and rerun this script.")
sys.exit("'quotes.sqlite3' exists")
# Should create a new file if it doesn't exist
quotes_db_connection = sqlite3.connect("quotes.sqlite3")
quotes_db_cursor = quotes_db_connection.cursor()
quotes_db_cursor.execute("CREATE TABLE IF NOT EXISTS quotes "
"(author text NOT NULL, quote text NOT NULL)")
for author in quotes:
for quote in quotes[author]:
quotes_db_cursor.execute("INSERT INTO quotes VALUES "
"(?,?)", (author, quote))
quotes_db_connection.commit()
quotes_db_connection.close()
print("quotes.sqlite3 written. Should be good to go.") | Add conversion script for quotes database
Figured I'd keep this around for a while just in case. Maybe remove it
in some future cleanup and declare the old format dead.
|
|
1fd73a2c07ce66a8dba0ef08210612a2535538ea | jesusmtnez/python/koans/koans/about_decorating_with_functions.py | jesusmtnez/python/koans/koans/about_decorating_with_functions.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutDecoratingWithFunctions(Koan):
def addcowbell(fn):
fn.wow_factor = 'COWBELL BABY!'
return fn
@addcowbell
def mediocre_song(self):
return "o/~ We all live in a broken submarine o/~"
def test_decorators_can_modify_a_function(self):
self.assertRegex(self.mediocre_song(), __)
self.assertEqual(__, self.mediocre_song.wow_factor)
# ------------------------------------------------------------------
def xmltag(fn):
def func(*args):
return '<' + fn(*args) + '/>'
return func
@xmltag
def render_tag(self, name):
return name
def test_decorators_can_change_a_function_output(self):
self.assertEqual(__, self.render_tag('llama'))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutDecoratingWithFunctions(Koan):
def addcowbell(fn):
fn.wow_factor = 'COWBELL BABY!'
return fn
@addcowbell
def mediocre_song(self):
return "o/~ We all live in a broken submarine o/~"
def test_decorators_can_modify_a_function(self):
self.assertRegex(self.mediocre_song(), "o/~ We all live in a broken submarine o/~")
self.assertEqual("COWBELL BABY!", self.mediocre_song.wow_factor)
# ------------------------------------------------------------------
def xmltag(fn):
def func(*args):
return '<' + fn(*args) + '/>'
return func
@xmltag
def render_tag(self, name):
return name
def test_decorators_can_change_a_function_output(self):
self.assertEqual("<llama/>", self.render_tag('llama'))
| Complete 'About Decorating with functions' koans | [Python] Complete 'About Decorating with functions' koans
| Python | mit | JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutDecoratingWithFunctions(Koan):
def addcowbell(fn):
fn.wow_factor = 'COWBELL BABY!'
return fn
@addcowbell
def mediocre_song(self):
return "o/~ We all live in a broken submarine o/~"
def test_decorators_can_modify_a_function(self):
self.assertRegex(self.mediocre_song(), "o/~ We all live in a broken submarine o/~")
self.assertEqual("COWBELL BABY!", self.mediocre_song.wow_factor)
# ------------------------------------------------------------------
def xmltag(fn):
def func(*args):
return '<' + fn(*args) + '/>'
return func
@xmltag
def render_tag(self, name):
return name
def test_decorators_can_change_a_function_output(self):
self.assertEqual("<llama/>", self.render_tag('llama'))
| [Python] Complete 'About Decorating with functions' koans
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutDecoratingWithFunctions(Koan):
def addcowbell(fn):
fn.wow_factor = 'COWBELL BABY!'
return fn
@addcowbell
def mediocre_song(self):
return "o/~ We all live in a broken submarine o/~"
def test_decorators_can_modify_a_function(self):
self.assertRegex(self.mediocre_song(), __)
self.assertEqual(__, self.mediocre_song.wow_factor)
# ------------------------------------------------------------------
def xmltag(fn):
def func(*args):
return '<' + fn(*args) + '/>'
return func
@xmltag
def render_tag(self, name):
return name
def test_decorators_can_change_a_function_output(self):
self.assertEqual(__, self.render_tag('llama'))
|
1c231a8ef54af82d8ec03b828856ddac619fd345 | knights/compat/django.py | knights/compat/django.py | import ast
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.tag(name='include')
def do_include(parser, token):
return ast.Yield(value=ast.Str(s='{include %s}' % token))
| import ast
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.tag(name='include')
def do_include(parser, token):
return ast.Yield(value=ast.Str(s='{include %s}' % token))
@register.helper
def safe(value):
return str(value)
| Add a dummy safe filter for Django compat | Add a dummy safe filter for Django compat
| Python | mit | funkybob/knights-templater,funkybob/knights-templater | import ast
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.tag(name='include')
def do_include(parser, token):
return ast.Yield(value=ast.Str(s='{include %s}' % token))
@register.helper
def safe(value):
return str(value)
| Add a dummy safe filter for Django compat
import ast
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.tag(name='include')
def do_include(parser, token):
return ast.Yield(value=ast.Str(s='{include %s}' % token))
|
dd2d5e96672fc7870434f030ca63f6d7111642f9 | resources/launchers/alfanousDesktop.py | resources/launchers/alfanousDesktop.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
| Add resource paths to python launcher script (proxy) | Add resource paths to python launcher script (proxy)
Former-commit-id: 7d20874c43637f1236442333f60a88ec653f53f2 | Python | agpl-3.0 | muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
| Add resource paths to python launcher script (proxy)
Former-commit-id: 7d20874c43637f1236442333f60a88ec653f53f2
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
|
3a2b536f24eee711a1329daf7403bd92840a87e3 | gpxpandas/gpxreader.py | gpxpandas/gpxreader.py | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack() | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | Use gpx.name as index to gpx data_frame | Use gpx.name as index to gpx data_frame
| Python | mit | komax/gpx-pandas | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | Use gpx.name as index to gpx data_frame
__author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack() |
0f3704a73ec54f015bff9a391d3a6dabc34368cd | palette/core/palette_selection.py | palette/core/palette_selection.py | # -*- coding: utf-8 -*-
## @package palette.core.palette_selection
#
# Implementation of automatic color palette selection.
# @author tody
# @date 2015/08/20
| # -*- coding: utf-8 -*-
## @package palette.core.palette_selection
#
# Implementation of automatic color palette selection.
# @author tody
# @date 2015/08/20
import os
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import cv2
from palette.datasets.google_image import dataFile
from palette.cv.image import to32F
from palette.io_util.image import loadRGB
from palette.plot.window import showMaximize
_root_dir = os.path.dirname(__file__)
## Result directory for SOM results.
def resultDir():
result_dir = os.path.abspath(os.path.join(_root_dir, "../results"))
if not os.path.exists(result_dir):
os.makedirs(result_dir)
return result_dir
def runPaletteSelectionResult(image_file):
image_name = os.path.basename(image_file)
image_name = os.path.splitext(image_name)[0]
C_8U = loadRGB(image_file)
C_32F = to32F(C_8U)
fig = plt.figure(figsize=(10, 8))
fig.subplots_adjust(left=0.05, bottom=0.05, right=0.95, top=0.95, wspace=0.05, hspace=0.05)
plt.title("Automatic Color Palette Selection")
plt.subplot(131)
plt.title("%s" % (image_name))
plt.imshow(C_32F)
plt.axis('off')
showMaximize()
def runPaletteSelectionResults(data_names, data_ids):
for data_name in data_names:
print "Palette Selection: %s" % data_name
for data_id in data_ids:
print "Data ID: %s" % data_id
image_file = dataFile(data_name, data_id)
runPaletteSelectionResult(image_file)
if __name__ == '__main__':
data_names = ["apple", "tulip", "flower"]
data_ids = [0, 1, 2]
runPaletteSelectionResults(data_names, data_ids)
| Add initial plaette selection code. | Add initial plaette selection code.
| Python | mit | tody411/PaletteSelection | # -*- coding: utf-8 -*-
## @package palette.core.palette_selection
#
# Implementation of automatic color palette selection.
# @author tody
# @date 2015/08/20
import os
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import cv2
from palette.datasets.google_image import dataFile
from palette.cv.image import to32F
from palette.io_util.image import loadRGB
from palette.plot.window import showMaximize
_root_dir = os.path.dirname(__file__)
## Result directory for SOM results.
def resultDir():
result_dir = os.path.abspath(os.path.join(_root_dir, "../results"))
if not os.path.exists(result_dir):
os.makedirs(result_dir)
return result_dir
def runPaletteSelectionResult(image_file):
image_name = os.path.basename(image_file)
image_name = os.path.splitext(image_name)[0]
C_8U = loadRGB(image_file)
C_32F = to32F(C_8U)
fig = plt.figure(figsize=(10, 8))
fig.subplots_adjust(left=0.05, bottom=0.05, right=0.95, top=0.95, wspace=0.05, hspace=0.05)
plt.title("Automatic Color Palette Selection")
plt.subplot(131)
plt.title("%s" % (image_name))
plt.imshow(C_32F)
plt.axis('off')
showMaximize()
def runPaletteSelectionResults(data_names, data_ids):
for data_name in data_names:
print "Palette Selection: %s" % data_name
for data_id in data_ids:
print "Data ID: %s" % data_id
image_file = dataFile(data_name, data_id)
runPaletteSelectionResult(image_file)
if __name__ == '__main__':
data_names = ["apple", "tulip", "flower"]
data_ids = [0, 1, 2]
runPaletteSelectionResults(data_names, data_ids)
| Add initial plaette selection code.
# -*- coding: utf-8 -*-
## @package palette.core.palette_selection
#
# Implementation of automatic color palette selection.
# @author tody
# @date 2015/08/20
|
a24b2b303c1cd5e9f43353d55cc6b9d07b37b7f4 | ephemeral-cluster.py | ephemeral-cluster.py | #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
| #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
| Fix forwarding ephemeral cluster exit code. | Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564
| Python | apache-2.0 | fuziontech/pgshovel,disqus/pgshovel,fuziontech/pgshovel,fuziontech/pgshovel,disqus/pgshovel | #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
| Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
f69a2dc9530fef44e5b67d64496bcec9eceaf0e4 | config.py | config.py | import os
import datetime
register_title_api = os.environ['REGISTER_TITLE_API']
login_api = os.environ['LOGIN_API']
logging_config_file_path = os.environ['LOGGING_CONFIG_FILE_PATH']
google_analytics_api_key = os.environ['GOOGLE_ANALYTICS_API_KEY']
secret_key = os.environ['APPLICATION_SECRET_KEY']
session_cookie_secure = os.environ['SESSION_COOKIE_SECURE'] != 'False'
CONFIG_DICT = {
'DEBUG': False,
'LOGGING': True,
'REGISTER_TITLE_API': register_title_api,
'LOGGING_CONFIG_FILE_PATH': logging_config_file_path,
'GOOGLE_ANALYTICS_API_KEY': google_analytics_api_key,
'LOGIN_API': login_api,
'PERMANENT_SESSION_LIFETIME': datetime.timedelta(minutes=15),
'SECRET_KEY': secret_key,
'SESSION_COOKIE_SECURE': session_cookie_secure,
}
settings = os.environ.get('SETTINGS')
if settings == 'dev':
CONFIG_DICT['DEBUG'] = True
elif settings == 'test':
# We do NOT set TESTING to True here as it turns off authentication, and we
# want to make sure the app behaves the same when running tests locally
# as it does in production.
CONFIG_DICT['LOGGING'] = False
CONFIG_DICT['DEBUG'] = True
CONFIG_DICT['SLEEP_BETWEEN_LOGINS'] = False
CONFIG_DICT['DISABLE_CSRF_PREVENTION'] = True
| import os
import datetime
register_title_api = os.environ['REGISTER_TITLE_API']
login_api = os.environ['LOGIN_API']
logging_config_file_path = os.environ['LOGGING_CONFIG_FILE_PATH']
google_analytics_api_key = os.environ['GOOGLE_ANALYTICS_API_KEY']
secret_key = os.environ['APPLICATION_SECRET_KEY']
session_cookie_secure = os.environ['SESSION_COOKIE_SECURE'].lower() != 'false'
CONFIG_DICT = {
'DEBUG': False,
'LOGGING': True,
'REGISTER_TITLE_API': register_title_api,
'LOGGING_CONFIG_FILE_PATH': logging_config_file_path,
'GOOGLE_ANALYTICS_API_KEY': google_analytics_api_key,
'LOGIN_API': login_api,
'PERMANENT_SESSION_LIFETIME': datetime.timedelta(minutes=15),
'SECRET_KEY': secret_key,
'SESSION_COOKIE_SECURE': session_cookie_secure,
}
settings = os.environ.get('SETTINGS')
if settings == 'dev':
CONFIG_DICT['DEBUG'] = True
elif settings == 'test':
# We do NOT set TESTING to True here as it turns off authentication, and we
# want to make sure the app behaves the same when running tests locally
# as it does in production.
CONFIG_DICT['LOGGING'] = False
CONFIG_DICT['DEBUG'] = True
CONFIG_DICT['SLEEP_BETWEEN_LOGINS'] = False
CONFIG_DICT['DISABLE_CSRF_PREVENTION'] = True
| Make the secure session cookie setting case-insensitive | Make the secure session cookie setting case-insensitive
| Python | mit | LandRegistry/digital-register-frontend,LandRegistry/digital-register-frontend,LandRegistry/digital-register-frontend,LandRegistry/digital-register-frontend | import os
import datetime
register_title_api = os.environ['REGISTER_TITLE_API']
login_api = os.environ['LOGIN_API']
logging_config_file_path = os.environ['LOGGING_CONFIG_FILE_PATH']
google_analytics_api_key = os.environ['GOOGLE_ANALYTICS_API_KEY']
secret_key = os.environ['APPLICATION_SECRET_KEY']
session_cookie_secure = os.environ['SESSION_COOKIE_SECURE'].lower() != 'false'
CONFIG_DICT = {
'DEBUG': False,
'LOGGING': True,
'REGISTER_TITLE_API': register_title_api,
'LOGGING_CONFIG_FILE_PATH': logging_config_file_path,
'GOOGLE_ANALYTICS_API_KEY': google_analytics_api_key,
'LOGIN_API': login_api,
'PERMANENT_SESSION_LIFETIME': datetime.timedelta(minutes=15),
'SECRET_KEY': secret_key,
'SESSION_COOKIE_SECURE': session_cookie_secure,
}
settings = os.environ.get('SETTINGS')
if settings == 'dev':
CONFIG_DICT['DEBUG'] = True
elif settings == 'test':
# We do NOT set TESTING to True here as it turns off authentication, and we
# want to make sure the app behaves the same when running tests locally
# as it does in production.
CONFIG_DICT['LOGGING'] = False
CONFIG_DICT['DEBUG'] = True
CONFIG_DICT['SLEEP_BETWEEN_LOGINS'] = False
CONFIG_DICT['DISABLE_CSRF_PREVENTION'] = True
| Make the secure session cookie setting case-insensitive
import os
import datetime
register_title_api = os.environ['REGISTER_TITLE_API']
login_api = os.environ['LOGIN_API']
logging_config_file_path = os.environ['LOGGING_CONFIG_FILE_PATH']
google_analytics_api_key = os.environ['GOOGLE_ANALYTICS_API_KEY']
secret_key = os.environ['APPLICATION_SECRET_KEY']
session_cookie_secure = os.environ['SESSION_COOKIE_SECURE'] != 'False'
CONFIG_DICT = {
'DEBUG': False,
'LOGGING': True,
'REGISTER_TITLE_API': register_title_api,
'LOGGING_CONFIG_FILE_PATH': logging_config_file_path,
'GOOGLE_ANALYTICS_API_KEY': google_analytics_api_key,
'LOGIN_API': login_api,
'PERMANENT_SESSION_LIFETIME': datetime.timedelta(minutes=15),
'SECRET_KEY': secret_key,
'SESSION_COOKIE_SECURE': session_cookie_secure,
}
settings = os.environ.get('SETTINGS')
if settings == 'dev':
CONFIG_DICT['DEBUG'] = True
elif settings == 'test':
# We do NOT set TESTING to True here as it turns off authentication, and we
# want to make sure the app behaves the same when running tests locally
# as it does in production.
CONFIG_DICT['LOGGING'] = False
CONFIG_DICT['DEBUG'] = True
CONFIG_DICT['SLEEP_BETWEEN_LOGINS'] = False
CONFIG_DICT['DISABLE_CSRF_PREVENTION'] = True
|
6adfd6ee8f673a601a3d118a45d21d2941b1e0aa | buildlet/utils/hashutils.py | buildlet/utils/hashutils.py | import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s)
return m.hexdigest()
| import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s.encode())
return m.hexdigest()
| Fix TypeError in Python 3 | Fix TypeError in Python 3
| Python | bsd-3-clause | tkf/buildlet | import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s.encode())
return m.hexdigest()
| Fix TypeError in Python 3
import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s)
return m.hexdigest()
|
5adaa6ec7b1c379d0fcbf7b488cc48e183739f0e | timed/tests/test_serializers.py | timed/tests/test_serializers.py | from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
| Add unit test for pk dict serializer | Add unit test for pk dict serializer
| Python | agpl-3.0 | adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend | from datetime import timedelta
import pytest
from rest_framework_json_api.serializers import DurationField, IntegerField
from timed.serializers import PkDictSerializer
class MyPkDictSerializer(PkDictSerializer):
test_duration = DurationField()
test_nr = IntegerField()
class Meta:
pk_key = 'test_nr'
resource_name = 'my-resource'
@pytest.fixture
def data():
return {
'test_nr': 123,
'test_duration': timedelta(hours=1),
'invalid_field': '1234'
}
def test_pk_dict_serializer_single(data):
serializer = MyPkDictSerializer(data)
expected_data = {
'test_duration': '01:00:00',
'test_nr': 123,
}
assert expected_data == serializer.data
def test_pk_dict_serializer_many(data):
list_data = [
data,
data
]
serializer = MyPkDictSerializer(list_data, many=True)
expected_data = [
{
'test_duration': '01:00:00',
'test_nr': 123,
},
{
'test_duration': '01:00:00',
'test_nr': 123,
},
]
assert expected_data == serializer.data
| Add unit test for pk dict serializer
|
|
cbd90060410108877d068913a4dfc681b81d6956 | galera_consistency.py | galera_consistency.py | import optparse
import subprocess
def table_checksum(user, password, host):
args = ['/usr/bin/pt-table-checksum', '-u', user, '-p', password]
if host:
args.extend(['-h', host])
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
(out, err) = proc.communicate()
return (proc.return_code, out, err)
def main():
usage = "Usage: %prog [-h] [-H] username password"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-H', '--host', action='store', dest='host',
default=None)
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
raise SystemExit(True)
(status, _, err) = table_checksum(args[0], args[1], options.host)
if status != 0:
print "status err %s" % err
raise SystemExit(True)
print "status ok"
if __name__ == '__main__':
main()
| Use pt-table-checksum to check for galera consistency | Use pt-table-checksum to check for galera consistency
| Python | apache-2.0 | jpmontez/rpc-openstack,cfarquhar/rpc-maas,miguelgrinberg/rpc-openstack,xeregin/rpc-openstack,mancdaz/rpc-openstack,nrb/rpc-openstack,stevelle/rpc-openstack,claco/rpc-openstack,BjoernT/rpc-openstack,npawelek/rpc-maas,shannonmitchell/rpc-openstack,rcbops/rpc-openstack,robb-romans/rpc-openstack,mattt416/rpc-openstack,andymcc/rpc-openstack,cloudnull/rpc-maas,busterswt/rpc-openstack,stevelle/rpc-openstack,robb-romans/rpc-openstack,stevelle/rpc-openstack,sigmavirus24/rpc-openstack,galstrom21/rpc-openstack,briancurtin/rpc-maas,hughsaunders/rpc-openstack,byronmccollum/rpc-openstack,andymcc/rpc-openstack,miguelgrinberg/rpc-openstack,cfarquhar/rpc-openstack,jacobwagner/rpc-openstack,cloudnull/rpc-openstack,npawelek/rpc-maas,cloudnull/rpc-openstack,mancdaz/rpc-openstack,prometheanfire/rpc-openstack,claco/rpc-openstack,busterswt/rpc-openstack,cloudnull/rpc-maas,claco/rpc-openstack,cfarquhar/rpc-openstack,shannonmitchell/rpc-openstack,nrb/rpc-openstack,darrenchan/rpc-openstack,cfarquhar/rpc-maas,busterswt/rpc-openstack,briancurtin/rpc-maas,sigmavirus24/rpc-openstack,andymcc/rpc-openstack,jpmontez/rpc-openstack,prometheanfire/rpc-openstack,cloudnull/rpc-maas,jpmontez/rpc-openstack,nrb/rpc-openstack,git-harry/rpc-openstack,npawelek/rpc-maas,xeregin/rpc-openstack,xeregin/rpc-openstack,major/rpc-openstack,darrenchan/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,byronmccollum/rpc-openstack,BjoernT/rpc-openstack,darrenchan/rpc-openstack,byronmccollum/rpc-openstack,xeregin/rpc-openstack,mattt416/rpc-openstack,sigmavirus24/rpc-openstack,mattt416/rpc-openstack,major/rpc-openstack,jacobwagner/rpc-openstack,galstrom21/rpc-openstack,briancurtin/rpc-maas,rcbops/rpc-openstack,darrenchan/rpc-openstack,hughsaunders/rpc-openstack,git-harry/rpc-openstack,cfarquhar/rpc-maas | import optparse
import subprocess
def table_checksum(user, password, host):
args = ['/usr/bin/pt-table-checksum', '-u', user, '-p', password]
if host:
args.extend(['-h', host])
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
(out, err) = proc.communicate()
return (proc.return_code, out, err)
def main():
usage = "Usage: %prog [-h] [-H] username password"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-H', '--host', action='store', dest='host',
default=None)
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
raise SystemExit(True)
(status, _, err) = table_checksum(args[0], args[1], options.host)
if status != 0:
print "status err %s" % err
raise SystemExit(True)
print "status ok"
if __name__ == '__main__':
main()
| Use pt-table-checksum to check for galera consistency
|
|
3d2f19ff097cf144efd9135c52e4d584193f9ddb | tohu/v7/custom_generator/tohu_items_class.py | tohu/v7/custom_generator/tohu_items_class.py | import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
return item_cls
| import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
item_cls.is_unset = False
return item_cls
| Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls | Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
| Python | mit | maxalbert/tohu | import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
item_cls.is_unset = False
return item_cls
| Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
return item_cls
|
60edf2f1534e02a6da9aa715662a0e4ea8922191 | mk/get_config_dir.py | mk/get_config_dir.py | #!/usr/bin/env python
import os
component = os.getenv("COMPONENT")
if component == "ocaml":
print "/repos/xen-dist-ocaml.hg"
if component == "api-libs":
print "/repos/xen-api-libs-rpm-buildroot"
| #!/usr/bin/env python
import os
component = os.getenv("COMPONENT")
if component == "ocaml":
print "/repos/xen-dist-ocaml.hg"
if component == "api-libs":
print "/repos/xen-api-libs-specs"
| Change name of config repo for api-libs component | Change name of config repo for api-libs component
Signed-off-by: Jon Ludlam <e7e3380887a8f95cc9dc4f0d51dedc7e849a287a@eu.citrix.com>
| Python | lgpl-2.1 | simonjbeaumont/planex,jonludlam/planex,euanh/planex-cleanhistory,jonludlam/planex,djs55/planex,djs55/planex,djs55/planex,simonjbeaumont/planex,jonludlam/planex,euanh/planex-cleanhistory,euanh/planex-cleanhistory,simonjbeaumont/planex | #!/usr/bin/env python
import os
component = os.getenv("COMPONENT")
if component == "ocaml":
print "/repos/xen-dist-ocaml.hg"
if component == "api-libs":
print "/repos/xen-api-libs-specs"
| Change name of config repo for api-libs component
Signed-off-by: Jon Ludlam <e7e3380887a8f95cc9dc4f0d51dedc7e849a287a@eu.citrix.com>
#!/usr/bin/env python
import os
component = os.getenv("COMPONENT")
if component == "ocaml":
print "/repos/xen-dist-ocaml.hg"
if component == "api-libs":
print "/repos/xen-api-libs-rpm-buildroot"
|
2560ca287e81cbefb6037e5688bfa4ef74d85149 | clock.py | clock.py | from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.run(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True,
check=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
| from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.check_call(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
| Change call method for Python2.7 | Change call method for Python2.7
| Python | mit | oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/dmm-eikaiwa-fft | from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.check_call(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
| Change call method for Python2.7
from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.run(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True,
check=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
|
149c1257f4af4c6962c61e74bddbdddfcc741524 | cbagent/collectors/libstats/psstats.py | cbagent/collectors/libstats/psstats.py | from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
PS_CMD = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
TOP_CMD = "top -b n2 -d1 -p {0} | grep {0}"
@parallel_task(server_side=True)
def get_server_samples(self, process):
return self.get_samples(process)
@parallel_task(server_side=False)
def get_client_samples(self, process):
return self.get_samples(process)
def get_samples(self, process):
samples = {}
stdout = self.run(self.PS_CMD.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1 + len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = self.run(self.TOP_CMD.format(pid))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
| from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
PS_CMD = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
TOP_CMD = "top -b n2 -d1 -p {0} | grep ^{0}"
@parallel_task(server_side=True)
def get_server_samples(self, process):
return self.get_samples(process)
@parallel_task(server_side=False)
def get_client_samples(self, process):
return self.get_samples(process)
def get_samples(self, process):
samples = {}
stdout = self.run(self.PS_CMD.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1 + len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = self.run(self.TOP_CMD.format(pid))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
| Use more precise grep expression | Use more precise grep expression
Otherwise we match wrong lines when memory stats contain PID.
Change-Id: I924c1b151ddaad8209445a514bf02a7af5d2e0e0
Reviewed-on: http://review.couchbase.org/79848
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
Tested-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
| Python | apache-2.0 | couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner | from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
PS_CMD = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
TOP_CMD = "top -b n2 -d1 -p {0} | grep ^{0}"
@parallel_task(server_side=True)
def get_server_samples(self, process):
return self.get_samples(process)
@parallel_task(server_side=False)
def get_client_samples(self, process):
return self.get_samples(process)
def get_samples(self, process):
samples = {}
stdout = self.run(self.PS_CMD.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1 + len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = self.run(self.TOP_CMD.format(pid))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
| Use more precise grep expression
Otherwise we match wrong lines when memory stats contain PID.
Change-Id: I924c1b151ddaad8209445a514bf02a7af5d2e0e0
Reviewed-on: http://review.couchbase.org/79848
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
Tested-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
PS_CMD = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
TOP_CMD = "top -b n2 -d1 -p {0} | grep {0}"
@parallel_task(server_side=True)
def get_server_samples(self, process):
return self.get_samples(process)
@parallel_task(server_side=False)
def get_client_samples(self, process):
return self.get_samples(process)
def get_samples(self, process):
samples = {}
stdout = self.run(self.PS_CMD.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1 + len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = self.run(self.TOP_CMD.format(pid))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
3ceb39e4bbc4c5de7cbcce9c1ecfe94daa57266e | zhihudaily/models.py | zhihudaily/models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
database.connect()
database.create_tables([Zhihudaily])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
Config.database.connect()
Config.database.create_tables([Zhihudaily])
| Fix bug when create the datebase table | Fix bug when create the datebase table
| Python | mit | lord63/zhihudaily,lord63/zhihudaily,lord63/zhihudaily | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
Config.database.connect()
Config.database.create_tables([Zhihudaily])
| Fix bug when create the datebase table
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from peewee import Model, IntegerField, CharField
from zhihudaily.configs import Config
class BaseModel(Model):
class Meta:
database = Config.database
class Zhihudaily(BaseModel):
date = IntegerField()
json_news = CharField()
display_date = CharField()
def create_tables():
database.connect()
database.create_tables([Zhihudaily])
|
ab41dfa53325ee90032c4ed1b2e6e3c90b808ecf | contact/views.py | contact/views.py | import binascii
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.db import IntegrityError, transaction
from django.conf import settings
from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.http import Http404
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success('Success!')
else:
messages.error('Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
| import binascii
from django.shortcuts import render
from django.contrib import messages
from django.core.mail import send_mail
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
'email': request.user.email,
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success(request, 'Success!')
else:
messages.error(request, 'Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
| Clean up import, pre-fill email address | Clean up import, pre-fill email address
If user is logged in, their email address is automatically filled in for them.
Also fixed an error with the messages usage, namely I forgot to pass in the request object.
| Python | mit | Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters | import binascii
from django.shortcuts import render
from django.contrib import messages
from django.core.mail import send_mail
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
'email': request.user.email,
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success(request, 'Success!')
else:
messages.error(request, 'Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
| Clean up import, pre-fill email address
If user is logged in, their email address is automatically filled in for them.
Also fixed an error with the messages usage, namely I forgot to pass in the request object.
import binascii
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.db import IntegrityError, transaction
from django.conf import settings
from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.http import Http404
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success('Success!')
else:
messages.error('Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
|
3c0ce6a3e4e16ff3991a838009c42efa2f5b237d | tviit/admin.py | tviit/admin.py | from django.contrib import admin
from .models import Tviit
admin.site.register(Tviit) | from django.contrib import admin
from .models import Tviit
class TviitAdmin(admin.ModelAdmin):
readonly_fields=('uuid',)
admin.site.register(Tviit, TviitAdmin) | Add uuid to be readable in Admin-panel | Add uuid to be readable in Admin-panel
| Python | mit | DeWaster/Tviserrys,DeWaster/Tviserrys | from django.contrib import admin
from .models import Tviit
class TviitAdmin(admin.ModelAdmin):
readonly_fields=('uuid',)
admin.site.register(Tviit, TviitAdmin) | Add uuid to be readable in Admin-panel
from django.contrib import admin
from .models import Tviit
admin.site.register(Tviit) |
d47abe23bf1c88520ee6ecf954dadaae42142366 | src/engine/SCons/Tool/javacTests.py | src/engine/SCons/Tool/javacTests.py | #
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import unittest
import SCons.Tool.javac
class pathoptTestCase(unittest.TestCase):
def assert_pathopt(self, expect, path):
popt = SCons.Tool.javac.pathopt('-foopath', 'FOOPATH')
env = {'FOOPATH': path}
actual = popt(None, None, env, None)
self.assertEquals(expect, actual)
def test_unset(self):
self.assert_pathopt([], None)
self.assert_pathopt([], '')
def test_str(self):
self.assert_pathopt(['-foopath', '/foo/bar'],
'/foo/bar')
def test_list_str(self):
self.assert_pathopt(['-foopath', '/foo%s/bar' % os.pathsep],
['/foo', '/bar'])
def test_uses_pathsep(self):
save = os.pathsep
try:
os.pathsep = '!'
self.assert_pathopt(['-foopath', 'foo!bar'],
['foo', 'bar'])
finally:
os.pathsep = save
def test_default(self):
popt = SCons.Tool.javac.pathopt('-foopath', 'FOOPATH', default='DPATH')
env = {'FOOPATH': ['/foo', '/bar'],
'DPATH': '/baz'}
expect = ['-foopath', os.pathsep.join(['/foo', '/bar', '/baz'])]
actual = popt(None, None, env, None)
self.assertEquals(expect, actual)
if __name__ == "__main__":
unittest.main()
| Add unit tests for SCons.Tool.javac (pathopt class only). This carefully avoids known bugs in order to have passing tests. | Add unit tests for SCons.Tool.javac (pathopt class only).
This carefully avoids known bugs in order to have passing tests.
| Python | mit | Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons,Distrotech/scons | #
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import unittest
import SCons.Tool.javac
class pathoptTestCase(unittest.TestCase):
def assert_pathopt(self, expect, path):
popt = SCons.Tool.javac.pathopt('-foopath', 'FOOPATH')
env = {'FOOPATH': path}
actual = popt(None, None, env, None)
self.assertEquals(expect, actual)
def test_unset(self):
self.assert_pathopt([], None)
self.assert_pathopt([], '')
def test_str(self):
self.assert_pathopt(['-foopath', '/foo/bar'],
'/foo/bar')
def test_list_str(self):
self.assert_pathopt(['-foopath', '/foo%s/bar' % os.pathsep],
['/foo', '/bar'])
def test_uses_pathsep(self):
save = os.pathsep
try:
os.pathsep = '!'
self.assert_pathopt(['-foopath', 'foo!bar'],
['foo', 'bar'])
finally:
os.pathsep = save
def test_default(self):
popt = SCons.Tool.javac.pathopt('-foopath', 'FOOPATH', default='DPATH')
env = {'FOOPATH': ['/foo', '/bar'],
'DPATH': '/baz'}
expect = ['-foopath', os.pathsep.join(['/foo', '/bar', '/baz'])]
actual = popt(None, None, env, None)
self.assertEquals(expect, actual)
if __name__ == "__main__":
unittest.main()
| Add unit tests for SCons.Tool.javac (pathopt class only).
This carefully avoids known bugs in order to have passing tests.
|
|
f9c7a911411429972929bb4372b370192bd4cf8a | altair/examples/interactive_layered_crossfilter.py | altair/examples/interactive_layered_crossfilter.py | """
Interactive Crossfilter
=======================
This example shows a multi-panel view of the same data, where you can interactively
select a portion of the data in any of the panels to highlight that portion in any
of the other panels.
"""
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# blue background with selection
background = base.add_selection(brush)
# yellow highlights on the transformed data
highlight = base.encode(
color=alt.value('goldenrod')
).transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
| """
Interactive Crossfilter
=======================
This example shows a multi-panel view of the same data, where you can interactively
select a portion of the data in any of the panels to highlight that portion in any
of the other panels.
"""
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
| Update crossfilter to gray/blue scheme | Update crossfilter to gray/blue scheme
Same as in https://vega.github.io/editor/#/examples/vega-lite/interactive_layered_crossfilter | Python | bsd-3-clause | altair-viz/altair,jakevdp/altair | """
Interactive Crossfilter
=======================
This example shows a multi-panel view of the same data, where you can interactively
select a portion of the data in any of the panels to highlight that portion in any
of the other panels.
"""
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
| Update crossfilter to gray/blue scheme
Same as in https://vega.github.io/editor/#/examples/vega-lite/interactive_layered_crossfilter
"""
Interactive Crossfilter
=======================
This example shows a multi-panel view of the same data, where you can interactively
select a portion of the data in any of the panels to highlight that portion in any
of the other panels.
"""
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# blue background with selection
background = base.add_selection(brush)
# yellow highlights on the transformed data
highlight = base.encode(
color=alt.value('goldenrod')
).transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
|
2efe1364a1e37f06dc26f1a3a122c544437d914e | collector/classes/service.py | collector/classes/service.py | # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
return None
else:
return datum
| # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
| Add log message if data we can't parse appears | Add log message if data we can't parse appears
If the datum isn't numeric and doesn't match the 'no data' pattern,
something has gone horribly wrong.
| Python | mit | alphagov/backdrop-transactions-explorer-collector,alphagov/backdrop-transactions-explorer-collector | # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
| Add log message if data we can't parse appears
If the datum isn't numeric and doesn't match the 'no data' pattern,
something has gone horribly wrong.
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
return None
else:
return datum
|
85f8d0662901047115f2d852489a3a5be1a01226 | datafilters/views.py | datafilters/views.py | try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
| from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
| Set base class for view mixin to MultipleObjectMixin | Set base class for view mixin to MultipleObjectMixin
| Python | mit | freevoid/django-datafilters,zorainc/django-datafilters,zorainc/django-datafilters | from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
| Set base class for view mixin to MultipleObjectMixin
try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
8fb8a77dcad4aa657b7ddb637459a6279a21755e | alexandria/drivers.py | alexandria/drivers.py | # coding=utf-8
import types
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager
class DriverCollection(list):
pass
| # coding=utf-8
import types
import pprint
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager.copy()
if ci.data is config.alexandria.model.Manager:
print "identical"
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(ci.data)
class DriverCollection(list):
pass
| Copy reference object example. This commit is just implemented to validate the medhod. But this is not conveniant, because reference models must not be changed. So next step is to make them private attributes. | Copy reference object example.
This commit is just implemented to validate the medhod.
But this is not conveniant, because reference models must not be
changed.
So next step is to make them private attributes.
| Python | apache-2.0 | sl4shme/alexandria,sl4shme/alexandria,sl4shme/alexandria,uggla/alexandria | # coding=utf-8
import types
import pprint
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager.copy()
if ci.data is config.alexandria.model.Manager:
print "identical"
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(ci.data)
class DriverCollection(list):
pass
| Copy reference object example.
This commit is just implemented to validate the medhod.
But this is not conveniant, because reference models must not be
changed.
So next step is to make them private attributes.
# coding=utf-8
import types
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager
class DriverCollection(list):
pass
|
dfcac1268a46a879cb1c387fa8b33f450860038c | setup.py | setup.py | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='GPL 2+',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
],
install_requires = [
'dbus-python'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
)
| import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='GPL 2+',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
],
install_requires = [
'dbus-python'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| Make stratis an installable script. | Make stratis an installable script.
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
| Python | apache-2.0 | stratis-storage/stratis-cli,stratis-storage/stratis-cli | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='GPL 2+',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
],
install_requires = [
'dbus-python'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| Make stratis an installable script.
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='GPL 2+',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
],
install_requires = [
'dbus-python'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
)
|
054b0bf9cacef4e55fb8167fb5f2611e2ce39b43 | hw3/hw3_2a.py | hw3/hw3_2a.py | import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
flag = True
for i in H_xs.eigenvals().keys():
if i.evalf() <= 0:
flag = False
break
if flag:
print 'Stationary point'
else:
print 'Saddle point'
| import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
| Fix decision about minima, maxima and saddle point | Fix decision about minima, maxima and saddle point
| Python | bsd-2-clause | escorciav/amcs211,escorciav/amcs211 | import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
| Fix decision about minima, maxima and saddle point
import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
flag = True
for i in H_xs.eigenvals().keys():
if i.evalf() <= 0:
flag = False
break
if flag:
print 'Stationary point'
else:
print 'Saddle point'
|
ea48f0fbe09fbcce843b6d380743ee65a31aa8f8 | app/evolver.py | app/evolver.py | import app.selector as selector
import app.applier as applier
from app.rules import rules
def rule_representation(rule):
'''Takes a Rule and returns a list of strings which represent it, in the
form [name, target, replacement, environment]'''
return [rule.name, rule.target, rule.replacement, rule.environments[0][1]]
def evolve(words, generations=5, rewrite_rules=[]):
'''Evolves the language specified by:
words: list [strings]
for the given number of generations. One sound change is applied per
generation.'''
changes = []
for _ in range(generations):
# Try to select a valid rule
try:
sound_change = selector.select_rule(words, rules)
# If there aren't any, finish early by breaking from the loop.
except ValueError:
break
changes.append(rule_representation(sound_change))
print(sound_change)
words = applier.apply_rule(words, sound_change)
return words, changes
| import app.selector as selector
import app.applier as applier
from app.rules import rules
def rule_representation(rule):
'''Takes a Rule and returns a list of strings which represent it, in the
form [name, target, replacement, environment]'''
return [rule.name, rule.target, rule.replacement, rule.environments[0][1]]
def rewrite(words, rewrite_rules, to='ipa'):
'''Rewrite a list of words according to a list of tuple rules of form
(plain, ipa), in direction given by target.'''
modified = []
for word in words:
for rule in rewrite_rules:
if to == 'ipa':
word = word.replace(rule[0], rule[1])
elif to == 'plain':
word = word.replace(rule[1], rule[0])
modified.append(word)
return modified
def evolve(words, generations=5, rewrite_rules=[]):
'''Evolves the language specified by:
words: list [strings]
for the given number of generations. One sound change is applied per
generation.'''
# Apply the given transcription rules
words = rewrite(words, rewrite_rules, to='ipa')
changes = []
for _ in range(generations):
# Try to select a valid rule
try:
sound_change = selector.select_rule(words, rules)
# If there aren't any, finish early by breaking from the loop.
except ValueError:
break
changes.append(rule_representation(sound_change))
print(sound_change)
words = applier.apply_rule(words, sound_change)
# Convert back to orthographic representation using the given transcription
# rules
words = rewrite(words, rewrite_rules, to='plain')
return words, changes
| Add transcription to and from IPA | Add transcription to and from IPA
| Python | mit | kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve | import app.selector as selector
import app.applier as applier
from app.rules import rules
def rule_representation(rule):
'''Takes a Rule and returns a list of strings which represent it, in the
form [name, target, replacement, environment]'''
return [rule.name, rule.target, rule.replacement, rule.environments[0][1]]
def rewrite(words, rewrite_rules, to='ipa'):
'''Rewrite a list of words according to a list of tuple rules of form
(plain, ipa), in direction given by target.'''
modified = []
for word in words:
for rule in rewrite_rules:
if to == 'ipa':
word = word.replace(rule[0], rule[1])
elif to == 'plain':
word = word.replace(rule[1], rule[0])
modified.append(word)
return modified
def evolve(words, generations=5, rewrite_rules=[]):
'''Evolves the language specified by:
words: list [strings]
for the given number of generations. One sound change is applied per
generation.'''
# Apply the given transcription rules
words = rewrite(words, rewrite_rules, to='ipa')
changes = []
for _ in range(generations):
# Try to select a valid rule
try:
sound_change = selector.select_rule(words, rules)
# If there aren't any, finish early by breaking from the loop.
except ValueError:
break
changes.append(rule_representation(sound_change))
print(sound_change)
words = applier.apply_rule(words, sound_change)
# Convert back to orthographic representation using the given transcription
# rules
words = rewrite(words, rewrite_rules, to='plain')
return words, changes
| Add transcription to and from IPA
import app.selector as selector
import app.applier as applier
from app.rules import rules
def rule_representation(rule):
'''Takes a Rule and returns a list of strings which represent it, in the
form [name, target, replacement, environment]'''
return [rule.name, rule.target, rule.replacement, rule.environments[0][1]]
def evolve(words, generations=5, rewrite_rules=[]):
'''Evolves the language specified by:
words: list [strings]
for the given number of generations. One sound change is applied per
generation.'''
changes = []
for _ in range(generations):
# Try to select a valid rule
try:
sound_change = selector.select_rule(words, rules)
# If there aren't any, finish early by breaking from the loop.
except ValueError:
break
changes.append(rule_representation(sound_change))
print(sound_change)
words = applier.apply_rule(words, sound_change)
return words, changes
|
2ca3f28b4423fc8ecd19591a039b7a5c814ab25b | webserver/codemanagement/validators.py | webserver/codemanagement/validators.py | from django.core.validators import RegexValidator
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_validator = RegexValidator(regex="^[A-Za-z][\w\-\.]+[A-Za-z]$",
message="Must be letters and numbers" +
" separated by dashes, dots, or underscores")
| from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
| Make dulwich check the tag. | Make dulwich check the tag.
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
| Make dulwich check the tag.
from django.core.validators import RegexValidator
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_validator = RegexValidator(regex="^[A-Za-z][\w\-\.]+[A-Za-z]$",
message="Must be letters and numbers" +
" separated by dashes, dots, or underscores")
|
310a7fd5024e49f82504410bf40647b7c8d14207 | tricircle/tests/unit/common/test_utils.py | tricircle/tests/unit/common/test_utils.py |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from tricircle.common import exceptions
from tricircle.common import utils
class TricircleUtilsTestCase(unittest.TestCase):
def test_check_string_length(self):
self.assertIsNone(utils.check_string_length(
'test', 'name', max_len=255))
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
11, 'name', max_len=255)
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
'', 'name', min_len=1)
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
'a' * 256, 'name', max_len=255)
| Add utils's check_string_length test case | Add utils's check_string_length test case
1. What is the problem
Tricircle does not have utils module's test case
2. What is the solution to the problem
Implement related test case
3. What the features need to be implemented to the Tricircle
No new features
Change-Id: I42e54cfe310349578ae0605789249acbc349f5e4
| Python | apache-2.0 | stackforge/tricircle,openstack/tricircle,openstack/tricircle,stackforge/tricircle |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from tricircle.common import exceptions
from tricircle.common import utils
class TricircleUtilsTestCase(unittest.TestCase):
def test_check_string_length(self):
self.assertIsNone(utils.check_string_length(
'test', 'name', max_len=255))
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
11, 'name', max_len=255)
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
'', 'name', min_len=1)
self.assertRaises(exceptions.InvalidInput,
utils.check_string_length,
'a' * 256, 'name', max_len=255)
| Add utils's check_string_length test case
1. What is the problem
Tricircle does not have utils module's test case
2. What is the solution to the problem
Implement related test case
3. What the features need to be implemented to the Tricircle
No new features
Change-Id: I42e54cfe310349578ae0605789249acbc349f5e4
|
|
d9024e4db0489b141fec9b96913c94a5d583f086 | backend/scripts/mktemplate.py | backend/scripts/mktemplate.py | #!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
| #!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
print "Loading template file: %s" % (options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
| Update script to show which file it is loading. | Update script to show which file it is loading.
| Python | mit | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org | #!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
print "Loading template file: %s" % (options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
| Update script to show which file it is loading.
#!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
|
f325b02c66810cff9e3ace8b31e7f3a7b410342f | awx/wsgi.py | awx/wsgi.py | # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
| # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
| Fix import error by calling prepare_env first | Fix import error by calling prepare_env first
| Python | apache-2.0 | wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx | # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
| Fix import error by calling prepare_env first
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
5dd5d4e6ac93ad3867f98bd35412de11dd6b1dc2 | tests/test_generalwords.py | tests/test_generalwords.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generalwords
----------------------------------
All the tests for the generalword module. Simple module, simple tests.
"""
import unittest
from generalwords import get_word
class TestGeneralwords(unittest.TestCase):
def setUp(self):
pass
def test_get_word(self):
self.assertIsNotNone(get_word)
def test_get_word_is_somewhat_random(self):
sample_size = 100
words = {get_word() for i in range(sample_size)}
self.assertAlmostEqual(len(words), sample_size,
delta=int((sample_size * 0.1)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generalwords
----------------------------------
All the tests for the generalword module. Simple module, simple tests.
"""
import unittest
from generalwords import get_word
class TestGeneralwords(unittest.TestCase):
def setUp(self):
pass
def test_get_word(self):
self.assertIsNotNone(get_word)
def test_get_word_is_somewhat_random(self):
sample_size = 100
words = set(get_word() for i in range(sample_size))
self.assertAlmostEqual(len(words), sample_size,
delta=int((sample_size * 0.1)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| Remove set-comprehensions so that tests will pass on 2.6 | Remove set-comprehensions so that tests will pass on 2.6
| Python | bsd-3-clause | petrilli/generalwords | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generalwords
----------------------------------
All the tests for the generalword module. Simple module, simple tests.
"""
import unittest
from generalwords import get_word
class TestGeneralwords(unittest.TestCase):
def setUp(self):
pass
def test_get_word(self):
self.assertIsNotNone(get_word)
def test_get_word_is_somewhat_random(self):
sample_size = 100
words = set(get_word() for i in range(sample_size))
self.assertAlmostEqual(len(words), sample_size,
delta=int((sample_size * 0.1)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| Remove set-comprehensions so that tests will pass on 2.6
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generalwords
----------------------------------
All the tests for the generalword module. Simple module, simple tests.
"""
import unittest
from generalwords import get_word
class TestGeneralwords(unittest.TestCase):
def setUp(self):
pass
def test_get_word(self):
self.assertIsNotNone(get_word)
def test_get_word_is_somewhat_random(self):
sample_size = 100
words = {get_word() for i in range(sample_size)}
self.assertAlmostEqual(len(words), sample_size,
delta=int((sample_size * 0.1)))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
7e166ba983bd1470f6eac6776107a64539c38581 | animal_spharm/test/test_animal_spharm.py | animal_spharm/test/test_animal_spharm.py | import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
| Copy over tests from aospy-obj-lib | Copy over tests from aospy-obj-lib
| Python | apache-2.0 | spencerahill/animal-spharm | import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
| Copy over tests from aospy-obj-lib
|
|
a0392d693c238cb4548fa6aa2b7f10b6c818b648 | currencies/utils.py | currencies/utils.py | from decimal import *
from django.conf import settings
from currencies.models import Currency
def calculate_price(price, currency):
try:
factor = Currency.objects.get(code__exact=currency).factor
except Currency.DoesNotExist:
if settings.DEBUG:
raise Currency.DoesNotExist
else:
factor = Decimal('0.0')
new_price = Decimal(price) * factor
return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
| from decimal import *
from django.conf import settings
from currencies.models import Currency
def calculate_price(price, currency):
try:
factor = Currency.objects.get(code__exact=currency).factor
except Currency.DoesNotExist:
if settings.DEBUG:
raise
else:
factor = Decimal('0.0')
new_price = Decimal(price) * factor
return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
| Simplify a raise in debug mode | Simplify a raise in debug mode
| Python | bsd-3-clause | marcosalcazar/django-currencies,jmp0xf/django-currencies,bashu/django-simple-currencies,mysociety/django-currencies,mysociety/django-currencies,racitup/django-currencies,panosl/django-currencies,ydaniv/django-currencies,pathakamit88/django-currencies,marcosalcazar/django-currencies,racitup/django-currencies,bashu/django-simple-currencies,ydaniv/django-currencies,pathakamit88/django-currencies,panosl/django-currencies,barseghyanartur/django-currencies | from decimal import *
from django.conf import settings
from currencies.models import Currency
def calculate_price(price, currency):
try:
factor = Currency.objects.get(code__exact=currency).factor
except Currency.DoesNotExist:
if settings.DEBUG:
raise
else:
factor = Decimal('0.0')
new_price = Decimal(price) * factor
return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
| Simplify a raise in debug mode
from decimal import *
from django.conf import settings
from currencies.models import Currency
def calculate_price(price, currency):
try:
factor = Currency.objects.get(code__exact=currency).factor
except Currency.DoesNotExist:
if settings.DEBUG:
raise Currency.DoesNotExist
else:
factor = Decimal('0.0')
new_price = Decimal(price) * factor
return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
|
cbf4d85092232051cd7643d74e003b86f24ba571 | feincms/templatetags/feincms_admin_tags.py | feincms/templatetags/feincms_admin_tags.py | from django import template
register = template.Library()
@register.filter
def post_process_fieldsets(fieldset):
"""
Removes a few fields from FeinCMS admin inlines, those being
``id``, ``DELETE`` and ``ORDER`` currently.
"""
process = fieldset.model_admin.verbose_name_plural.startswith('Feincms_Inline:')
if process:
# Exclude special fields and the primary key
excluded_fields = ('id', 'DELETE', 'ORDER')
fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields]
for line in fieldset:
yield line
| from django import template
register = template.Library()
@register.filter
def post_process_fieldsets(fieldset):
"""
Removes a few fields from FeinCMS admin inlines, those being
``id``, ``DELETE`` and ``ORDER`` currently.
"""
excluded_fields = ('id', 'DELETE', 'ORDER')
fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields]
for line in fieldset:
yield line
| Fix post_process_fieldsets: This filter is only called for FeinCMS inlines anyway | Fix post_process_fieldsets: This filter is only called for FeinCMS inlines anyway
Thanks to mjl for the report and help in fixing the issue.
| Python | bsd-3-clause | matthiask/django-content-editor,matthiask/feincms2-content,joshuajonah/feincms,matthiask/feincms2-content,matthiask/django-content-editor,mjl/feincms,feincms/feincms,nickburlett/feincms,mjl/feincms,nickburlett/feincms,feincms/feincms,pjdelport/feincms,pjdelport/feincms,mjl/feincms,nickburlett/feincms,michaelkuty/feincms,feincms/feincms,pjdelport/feincms,michaelkuty/feincms,michaelkuty/feincms,michaelkuty/feincms,joshuajonah/feincms,matthiask/django-content-editor,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,nickburlett/feincms,joshuajonah/feincms | from django import template
register = template.Library()
@register.filter
def post_process_fieldsets(fieldset):
"""
Removes a few fields from FeinCMS admin inlines, those being
``id``, ``DELETE`` and ``ORDER`` currently.
"""
excluded_fields = ('id', 'DELETE', 'ORDER')
fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields]
for line in fieldset:
yield line
| Fix post_process_fieldsets: This filter is only called for FeinCMS inlines anyway
Thanks to mjl for the report and help in fixing the issue.
from django import template
register = template.Library()
@register.filter
def post_process_fieldsets(fieldset):
"""
Removes a few fields from FeinCMS admin inlines, those being
``id``, ``DELETE`` and ``ORDER`` currently.
"""
process = fieldset.model_admin.verbose_name_plural.startswith('Feincms_Inline:')
if process:
# Exclude special fields and the primary key
excluded_fields = ('id', 'DELETE', 'ORDER')
fieldset.fields = [f for f in fieldset.form.fields.keys() if f not in excluded_fields]
for line in fieldset:
yield line
|
223b58cb0f9c63543a4d23f75db4450ce93ab86d | readthedocs/builds/forms.py | readthedocs/builds/forms.py | import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=[self.instance.pk])
return cleaned_data
| import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=self.instance.pk)
self.instance.built = False
return cleaned_data
| Handle built state tracking on versions | Handle built state tracking on versions | Python | mit | espdev/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org | import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=self.instance.pk)
self.instance.built = False
return cleaned_data
| Handle built state tracking on versions
import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=[self.instance.pk])
return cleaned_data
|
0527cea9db518b5b8fb63fe2bb3792a806fa421d | src/python/setup.py | src/python/setup.py | __author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.5.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
| __author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.6.4'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
| Change to use evdev 0.6.4 | Change to use evdev 0.6.4
Signed-off-by: tom <3abfbc22eec6ecd173d744487905db1fa6a502d5@gmail.com>
| Python | apache-2.0 | ApproxEng/approxeng.input | __author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.6.4'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
| Change to use evdev 0.6.4
Signed-off-by: tom <3abfbc22eec6ecd173d744487905db1fa6a502d5@gmail.com>
__author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.5.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
|
ae600fdf602d12f1a2f8082df49693117fba2596 | test/test_cxx_imports.py | test/test_cxx_imports.py | def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
| def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
| Remove fixed references from test_cxx.py | Remove fixed references from test_cxx.py
| Python | bsd-3-clause | datamicroscopes/mixturemodel,datamicroscopes/mixturemodel,datamicroscopes/mixturemodel | def test_cxx_import():
from microscopes.mixture.model import \
state, \
bind, \
initialize, \
deserialize
assert state
assert bind
assert initialize
assert deserialize
| Remove fixed references from test_cxx.py
def test_cxx_import():
from microscopes.mixture.model import \
state, fixed_state, \
bind, bind_fixed, \
initialize, initialize_fixed, \
deserialize, deserialize_fixed
assert state and fixed_state
assert bind and bind_fixed
assert initialize and initialize_fixed
assert deserialize and deserialize_fixed
|
0f40869157ef56df0ff306fb510be4401b5cbe5d | test/low_level/test_frame_identifiers.py | test/low_level/test_frame_identifiers.py | import inspect
from pyinstrument.low_level import stat_profile as stat_profile_c
from pyinstrument.low_level import stat_profile_python
class AClass:
def get_frame_identfier_for_a_method(self, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
@classmethod
def get_frame_identfier_for_a_class_method(cls, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def test_frame_identifier():
frame = inspect.currentframe()
assert frame
assert stat_profile_c.get_frame_identifier(frame) == stat_profile_python.get_frame_identifier(
frame
)
def test_frame_identifier_for_method():
instance = AClass()
assert instance.get_frame_identfier_for_a_method(
stat_profile_c.get_frame_identifier
) == instance.get_frame_identfier_for_a_method(stat_profile_python.get_frame_identifier)
| import inspect
from pyinstrument.low_level import stat_profile as stat_profile_c
from pyinstrument.low_level import stat_profile_python
class AClass:
def get_frame_identifier_for_a_method(self, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def get_frame_identifier_with_cell_variable(self, getter_function):
frame = inspect.currentframe()
assert frame
def an_inner_function():
# reference self to make it a cell variable
if self:
pass
return getter_function(frame)
@classmethod
def get_frame_identifier_for_a_class_method(cls, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def test_frame_identifier():
frame = inspect.currentframe()
assert frame
assert stat_profile_c.get_frame_identifier(frame) == stat_profile_python.get_frame_identifier(
frame
)
def test_frame_identifiers():
instance = AClass()
test_functions = [
instance.get_frame_identifier_for_a_method,
AClass.get_frame_identifier_for_a_class_method,
instance.get_frame_identifier_with_cell_variable,
]
for test_function in test_functions:
assert test_function(stat_profile_c.get_frame_identifier) == test_function(
stat_profile_python.get_frame_identifier
)
| Add test for a cell variable | Add test for a cell variable
| Python | bsd-3-clause | joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument | import inspect
from pyinstrument.low_level import stat_profile as stat_profile_c
from pyinstrument.low_level import stat_profile_python
class AClass:
def get_frame_identifier_for_a_method(self, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def get_frame_identifier_with_cell_variable(self, getter_function):
frame = inspect.currentframe()
assert frame
def an_inner_function():
# reference self to make it a cell variable
if self:
pass
return getter_function(frame)
@classmethod
def get_frame_identifier_for_a_class_method(cls, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def test_frame_identifier():
frame = inspect.currentframe()
assert frame
assert stat_profile_c.get_frame_identifier(frame) == stat_profile_python.get_frame_identifier(
frame
)
def test_frame_identifiers():
instance = AClass()
test_functions = [
instance.get_frame_identifier_for_a_method,
AClass.get_frame_identifier_for_a_class_method,
instance.get_frame_identifier_with_cell_variable,
]
for test_function in test_functions:
assert test_function(stat_profile_c.get_frame_identifier) == test_function(
stat_profile_python.get_frame_identifier
)
| Add test for a cell variable
import inspect
from pyinstrument.low_level import stat_profile as stat_profile_c
from pyinstrument.low_level import stat_profile_python
class AClass:
def get_frame_identfier_for_a_method(self, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
@classmethod
def get_frame_identfier_for_a_class_method(cls, getter_function):
frame = inspect.currentframe()
assert frame
return getter_function(frame)
def test_frame_identifier():
frame = inspect.currentframe()
assert frame
assert stat_profile_c.get_frame_identifier(frame) == stat_profile_python.get_frame_identifier(
frame
)
def test_frame_identifier_for_method():
instance = AClass()
assert instance.get_frame_identfier_for_a_method(
stat_profile_c.get_frame_identifier
) == instance.get_frame_identfier_for_a_method(stat_profile_python.get_frame_identifier)
|
0983715cd2ee4eb3ac411e1ff24fa2e49df54eb5 | src/manage.py | src/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Browsers doesn't use content negotiation using ETags with HTTP 1.0 servers
# Force Django to use HTTP 1.1 when using the runserver command
from wsgiref import simple_server
simple_server.ServerHandler.http_version = "1.1"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Allow to tests ETags when using the runserver command | Allow to tests ETags when using the runserver command
| Python | agpl-3.0 | jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Browsers doesn't use content negotiation using ETags with HTTP 1.0 servers
# Force Django to use HTTP 1.1 when using the runserver command
from wsgiref import simple_server
simple_server.ServerHandler.http_version = "1.1"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Allow to tests ETags when using the runserver command
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
10ddda3e230aa72889c81cd69792122b265010fe | rental/views/rental_state_view.py | rental/views/rental_state_view.py | from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
| from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def check_availability(self, rental):
availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
for item_rental in rental.itemrental_set:
intervals = availability.get_availability_intervals(item_rental.item)
available = availability.get_minimum_availability(intervals)
if item_rental.quantity > available:
raise ValidationError({
'quantity': 'The quantity must not exceed the availability '
'of the item in the requested time frame.'
})
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
if state == Rental.STATE_APPROVED:
self.check_availability(rental)
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
| Check availability when approving rental request | Check availability when approving rental request
| Python | agpl-3.0 | verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool | from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def check_availability(self, rental):
availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
for item_rental in rental.itemrental_set:
intervals = availability.get_availability_intervals(item_rental.item)
available = availability.get_minimum_availability(intervals)
if item_rental.quantity > available:
raise ValidationError({
'quantity': 'The quantity must not exceed the availability '
'of the item in the requested time frame.'
})
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
if state == Rental.STATE_APPROVED:
self.check_availability(rental)
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
| Check availability when approving rental request
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
|
fda634ca2457716c33842cd0d285c20a0478601a | bugle_project/configs/development/settings.py | bugle_project/configs/development/settings.py | from bugle_project.configs.settings import *
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
| from bugle_project.configs.settings import *
FAYE_ENABLED = False
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
| Disable Faye on development, for now. | Disable Faye on development, for now.
| Python | bsd-2-clause | devfort/bugle,devfort/bugle,devfort/bugle | from bugle_project.configs.settings import *
FAYE_ENABLED = False
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
| Disable Faye on development, for now.
from bugle_project.configs.settings import *
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
|
3a3cb923babfbba4234e646dc40c0a9b6364d207 | apps/announcements/management/commands/tweetannouncements.py | apps/announcements/management/commands/tweetannouncements.py | """
Management command to cross-publish announcements on Twitter.
"""
from django.core.management.base import NoArgsCommand
from ...models import AnnouncementTwitterCrossPublication
class Command(NoArgsCommand):
"""
A management command which cross-publish on Twitter any pending announcements
currently published on the site but not on Twitter yet. Simply call the ``publish_pending_announcements``of
the ``AnnouncementTwitterCrossPublication`` class to do the job.
"""
help = "Cross-publish pending announcements on Twitter"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
AnnouncementTwitterCrossPublication.objects.publish_pending_announcements()
| """
Management command to cross-publish announcements on Twitter.
"""
from django.core.management.base import NoArgsCommand
from apps.dbmutex import MutexLock,AlreadyLockedError, LockTimeoutError
from ...models import AnnouncementTwitterCrossPublication
class Command(NoArgsCommand):
"""
A management command which cross-publish on Twitter any pending announcements
currently published on the site but not on Twitter yet. Simply call the ``publish_pending_announcements``of
the ``AnnouncementTwitterCrossPublication`` class to do the job. Use the ``dbmutex`` app to avoid concurrent
execution of the code.
"""
help = "Cross-publish pending announcements on Twitter"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
# Lock a critical section of code
try:
with MutexLock('twitter_announcements'):
# Do the job
AnnouncementTwitterCrossPublication.objects.publish_pending_announcements()
except AlreadyLockedError:
print('Could not obtain lock (another instance of the script running?)')
except LockTimeoutError:
print('Task completed but the lock timed out')
| Add mutex to the "tweetannouncement" management command. | Add mutex to the "tweetannouncement" management command.
| Python | agpl-3.0 | TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker | """
Management command to cross-publish announcements on Twitter.
"""
from django.core.management.base import NoArgsCommand
from apps.dbmutex import MutexLock,AlreadyLockedError, LockTimeoutError
from ...models import AnnouncementTwitterCrossPublication
class Command(NoArgsCommand):
"""
A management command which cross-publish on Twitter any pending announcements
currently published on the site but not on Twitter yet. Simply call the ``publish_pending_announcements``of
the ``AnnouncementTwitterCrossPublication`` class to do the job. Use the ``dbmutex`` app to avoid concurrent
execution of the code.
"""
help = "Cross-publish pending announcements on Twitter"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
# Lock a critical section of code
try:
with MutexLock('twitter_announcements'):
# Do the job
AnnouncementTwitterCrossPublication.objects.publish_pending_announcements()
except AlreadyLockedError:
print('Could not obtain lock (another instance of the script running?)')
except LockTimeoutError:
print('Task completed but the lock timed out')
| Add mutex to the "tweetannouncement" management command.
"""
Management command to cross-publish announcements on Twitter.
"""
from django.core.management.base import NoArgsCommand
from ...models import AnnouncementTwitterCrossPublication
class Command(NoArgsCommand):
"""
A management command which cross-publish on Twitter any pending announcements
currently published on the site but not on Twitter yet. Simply call the ``publish_pending_announcements``of
the ``AnnouncementTwitterCrossPublication`` class to do the job.
"""
help = "Cross-publish pending announcements on Twitter"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
AnnouncementTwitterCrossPublication.objects.publish_pending_announcements()
|
a4cffc0e74f9dd972357eb9dc49a57e10f1fe944 | core/forms.py | core/forms.py | from collections import namedtuple
from django import forms
IMAGE = "img"
UploadType = namedtuple("UploadType", ["directory", "label"])
FILE_TYPE_CHOICES = (
UploadType(directory=IMAGE, label="Image"),
UploadType(directory="thumb", label="Thumbnail"),
UploadType(directory="doc", label="Document"),
UploadType(directory="code", label="Code"),
UploadType(directory="pres", label="Presentation"),
)
class UploadForm(forms.Form):
upload_file = forms.FileField()
upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE)
| from collections import namedtuple
from django import forms
IMAGE = "img"
UploadType = namedtuple("UploadType", ["directory", "label"])
FILE_TYPE_CHOICES = (
UploadType(directory=IMAGE, label="Image"),
UploadType(directory="thumb", label="Thumbnail"),
UploadType(directory="doc", label="Document"),
UploadType(directory="code", label="Code"),
UploadType(directory="pres", label="Presentation"),
)
class UploadForm(forms.Form):
upload_file = forms.FileField()
upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE)
def clean_upload_file(self):
data = self.cleaned_data["upload_file"]
if " " in data.name:
raise forms.ValidationError("Spaces in filename not allowed")
return data
| Check names of files for spaces | Check names of files for spaces
| Python | bsd-3-clause | ahernp/DMCM,ahernp/DMCM,ahernp/DMCM | from collections import namedtuple
from django import forms
IMAGE = "img"
UploadType = namedtuple("UploadType", ["directory", "label"])
FILE_TYPE_CHOICES = (
UploadType(directory=IMAGE, label="Image"),
UploadType(directory="thumb", label="Thumbnail"),
UploadType(directory="doc", label="Document"),
UploadType(directory="code", label="Code"),
UploadType(directory="pres", label="Presentation"),
)
class UploadForm(forms.Form):
upload_file = forms.FileField()
upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE)
def clean_upload_file(self):
data = self.cleaned_data["upload_file"]
if " " in data.name:
raise forms.ValidationError("Spaces in filename not allowed")
return data
| Check names of files for spaces
from collections import namedtuple
from django import forms
IMAGE = "img"
UploadType = namedtuple("UploadType", ["directory", "label"])
FILE_TYPE_CHOICES = (
UploadType(directory=IMAGE, label="Image"),
UploadType(directory="thumb", label="Thumbnail"),
UploadType(directory="doc", label="Document"),
UploadType(directory="code", label="Code"),
UploadType(directory="pres", label="Presentation"),
)
class UploadForm(forms.Form):
upload_file = forms.FileField()
upload_type = forms.ChoiceField(choices=FILE_TYPE_CHOICES, initial=IMAGE)
|
c0d2550e4cfb647b21e6fdde9705fe6e4a76a2df | Lambda/Functions/TagEC2Dependencies/tag_ec2_dependencies.py | Lambda/Functions/TagEC2Dependencies/tag_ec2_dependencies.py | '''
Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
except in compliance with the License. A copy of the License is located at
http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under the License.
'''
from __future__ import print_function
print('Loading function')
import json, boto3, re
def lambda_handler(event, context):
# print("Received event: \n" + json.dumps(event))
# If CreateTags failed nothing to do
if 'errorCode' in event['detail']:
print('CreateTags failed with error code {} and error message "{}", nothing to do.'
.format(event['detail']['errorCode'], event['detail']['errorMessage']))
return
region = event['detail']['awsRegion']
ec2 = boto3.client('ec2', region_name=region)
instance_ids = []
is_instance = re.compile('i-[0-9a-f]+')
# Run instances may create several instances, then the event will contain
# several instances
for item in event['detail']['requestParameters']['resourcesSet']['items']:
if is_instance.match(item['resourceId']):
instance_ids.append(item['resourceId'])
# check if we were tagging any instances
if len(instance_ids) == 0:
return
tags = []
for tag in event['detail']['requestParameters']['tagSet']['items']:
tags.append({
'Key': tag['key'],
'Value': tag['value']
})
# If the number of created instances then describe instances may be paginated
paginator = ec2.get_paginator('describe_instances')
instances_iterator = paginator.paginate(
DryRun=False,
InstanceIds=instance_ids
)
for page in instances_iterator:
resources = []
for reservation in page['Reservations']:
for instance in reservation['Instances']:
for eni in instance['NetworkInterfaces']:
resources.append(eni['NetworkInterfaceId'])
for volume in instance['BlockDeviceMappings']:
if 'Ebs' in volume:
resources.append(volume['Ebs']['VolumeId'])
print("Tagging resorces for instance ids:\n[{}]".format(', '.join(instance_ids)))
print("Resources to be tagged:\n[{}]".format(', '.join(resources)))
ec2.create_tags(
DryRun=False,
Resources=resources,
Tags=tags
)
return
| Add tag EC2 dependencies Lambda Python code. | Add tag EC2 dependencies Lambda Python code.
| Python | apache-2.0 | rsavordelli/aws-support-tools,rsavordelli/aws-support-tools,rsavordelli/aws-support-tools,rsavordelli/aws-support-tools,rsavordelli/aws-support-tools,rsavordelli/aws-support-tools | '''
Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
except in compliance with the License. A copy of the License is located at
http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under the License.
'''
from __future__ import print_function
print('Loading function')
import json, boto3, re
def lambda_handler(event, context):
# print("Received event: \n" + json.dumps(event))
# If CreateTags failed nothing to do
if 'errorCode' in event['detail']:
print('CreateTags failed with error code {} and error message "{}", nothing to do.'
.format(event['detail']['errorCode'], event['detail']['errorMessage']))
return
region = event['detail']['awsRegion']
ec2 = boto3.client('ec2', region_name=region)
instance_ids = []
is_instance = re.compile('i-[0-9a-f]+')
# Run instances may create several instances, then the event will contain
# several instances
for item in event['detail']['requestParameters']['resourcesSet']['items']:
if is_instance.match(item['resourceId']):
instance_ids.append(item['resourceId'])
# check if we were tagging any instances
if len(instance_ids) == 0:
return
tags = []
for tag in event['detail']['requestParameters']['tagSet']['items']:
tags.append({
'Key': tag['key'],
'Value': tag['value']
})
# If the number of created instances then describe instances may be paginated
paginator = ec2.get_paginator('describe_instances')
instances_iterator = paginator.paginate(
DryRun=False,
InstanceIds=instance_ids
)
for page in instances_iterator:
resources = []
for reservation in page['Reservations']:
for instance in reservation['Instances']:
for eni in instance['NetworkInterfaces']:
resources.append(eni['NetworkInterfaceId'])
for volume in instance['BlockDeviceMappings']:
if 'Ebs' in volume:
resources.append(volume['Ebs']['VolumeId'])
print("Tagging resorces for instance ids:\n[{}]".format(', '.join(instance_ids)))
print("Resources to be tagged:\n[{}]".format(', '.join(resources)))
ec2.create_tags(
DryRun=False,
Resources=resources,
Tags=tags
)
return
| Add tag EC2 dependencies Lambda Python code.
|
|
e01ec3b6c877bc76ffa2e93d97d706036a90405c | test/on_yubikey/cli_piv/test_misc.py | test/on_yubikey/cli_piv/test_misc.py | import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
| import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
data = 'test data'
for i in range(0, 3):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input=data)
data = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual(data, 'test data')
return [Misc]
| Test that repeated read/write-object cycles do not change the data | Test that repeated read/write-object cycles do not change the data
| Python | bsd-2-clause | Yubico/yubikey-manager,Yubico/yubikey-manager | import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
data = 'test data'
for i in range(0, 3):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input=data)
data = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual(data, 'test data')
return [Misc]
| Test that repeated read/write-object cycles do not change the data
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
53c9b988d2ccda253080deaa35b70d814309a4aa | src/algorithms/simple_hybrid.py | src/algorithms/simple_hybrid.py | def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
| Add a simple hybrid recommender from Zhou et. al | Add a simple hybrid recommender from Zhou et. al
| Python | apache-2.0 | tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes | def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
| Add a simple hybrid recommender from Zhou et. al
|
|
9bda75b5200790bb2c68e256207d8fc5d45a76c6 | setup.py | setup.py | from setuptools import setup
from setuptools import find_packages
__author__ = 'Ryan McGrath <ryan@venodesigns.net>'
__version__ = '2.5.5'
setup(
# Basic package information.
name='twython',
version=__version__,
packages=find_packages(),
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['simplejson', 'requests==1.1.0', 'requests_oauthlib==0.3.0'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='ryan@venodesigns.net',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| from setuptools import setup
from setuptools import find_packages
__author__ = 'Ryan McGrath <ryan@venodesigns.net>'
__version__ = '2.5.5'
setup(
# Basic package information.
name='twython',
version=__version__,
packages=find_packages(),
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['simplejson', 'requests>=1.0.0, <2.0.0', 'requests_oauthlib==0.3.0'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='ryan@venodesigns.net',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| Allow versions of requests between 1.0.0 and 2.0.0 | Allow versions of requests between 1.0.0 and 2.0.0
Requests is semantically versioned, so minor version changes are expected to be compatible.
| Python | mit | Oire/twython,joebos/twython,akarambir/twython,Fueled/twython,fibears/twython,ryanmcgrath/twython,vivek8943/twython,Hasimir/twython,Devyani-Divs/twython,ping/twython | from setuptools import setup
from setuptools import find_packages
__author__ = 'Ryan McGrath <ryan@venodesigns.net>'
__version__ = '2.5.5'
setup(
# Basic package information.
name='twython',
version=__version__,
packages=find_packages(),
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['simplejson', 'requests>=1.0.0, <2.0.0', 'requests_oauthlib==0.3.0'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='ryan@venodesigns.net',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| Allow versions of requests between 1.0.0 and 2.0.0
Requests is semantically versioned, so minor version changes are expected to be compatible.
from setuptools import setup
from setuptools import find_packages
__author__ = 'Ryan McGrath <ryan@venodesigns.net>'
__version__ = '2.5.5'
setup(
# Basic package information.
name='twython',
version=__version__,
packages=find_packages(),
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['simplejson', 'requests==1.1.0', 'requests_oauthlib==0.3.0'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='ryan@venodesigns.net',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
20ef91b51230811cab0e4edb426993cfb1c63a1a | osf/management/commands/checkmigrations.py | osf/management/commands/checkmigrations.py | """
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
| Add a management command to check for unapplied migrations. | Add a management command to check for unapplied migrations.
[PLAT-972]
| Python | apache-2.0 | brianjgeiger/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,aaxelb/osf.io,erinspace/osf.io,Johnetordoff/osf.io,adlius/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,felliott/osf.io,aaxelb/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,adlius/osf.io,aaxelb/osf.io,aaxelb/osf.io,felliott/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,saradbowman/osf.io,cslzchen/osf.io,mattclark/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,adlius/osf.io,mfraezz/osf.io,adlius/osf.io,pattisdr/osf.io,mfraezz/osf.io,mattclark/osf.io,cslzchen/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,felliott/osf.io,brianjgeiger/osf.io,saradbowman/osf.io | """
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
| Add a management command to check for unapplied migrations.
[PLAT-972]
|
|
871ec80a78ef2caaaea8882e9c2846b064eb7b96 | trytond_nereid/tests/__init__.py | trytond_nereid/tests/__init__.py | # -*- coding: utf-8 -*-
"""
__init__
Nereid Tryton module test cases
:copyright: (c) 2011 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from unittest import TestSuite
from .configuration import suite as configuration_test_suite
from .test_currency import suite as currency_test_suite
from .test_language import suite as language_test_suite
suite = TestSuite()
suite.addTests([
configuration_test_suite,
currency_test_suite,
language_test_suite
])
| Add a consolidated test suite which could be imported by the tryton test suite | Add a consolidated test suite which could be imported by the tryton test suite
| Python | bsd-3-clause | fulfilio/nereid,usudaysingh/nereid,prakashpp/nereid,riteshshrv/nereid,riteshshrv/nereid,usudaysingh/nereid,fulfilio/nereid,prakashpp/nereid | # -*- coding: utf-8 -*-
"""
__init__
Nereid Tryton module test cases
:copyright: (c) 2011 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from unittest import TestSuite
from .configuration import suite as configuration_test_suite
from .test_currency import suite as currency_test_suite
from .test_language import suite as language_test_suite
suite = TestSuite()
suite.addTests([
configuration_test_suite,
currency_test_suite,
language_test_suite
])
| Add a consolidated test suite which could be imported by the tryton test suite
|
|
418b1e1176f4b4d286983f69cf4e5c1deacd4afb | sympy/core/tests/test_cache.py | sympy/core/tests/test_cache.py | from sympy.core.cache import cacheit
def test_cacheit_doc():
@cacheit
def testfn():
"test docstring"
pass
assert testfn.__doc__ == "test docstring"
assert testfn.__name__ == "testfn"
| Add a test for the @cachit decorator. | Add a test for the @cachit decorator.
Make sure that the caching decorator correctly
copies over the function docstring and function name.
This fixes issue #744 from the issue tracker.
Signed-off-by: Jochen Voss <1dcd5c846f3eb4984f0655fb5407be7c9e0c9079@seehuhn.de>
Signed-off-by: Ondrej Certik <b816faa87b7d35274d2e545c5be11ed4376f3ccf@certik.cz>
| Python | bsd-3-clause | madan96/sympy,hargup/sympy,Gadal/sympy,cswiercz/sympy,sampadsaha5/sympy,Shaswat27/sympy,fperez/sympy,atreyv/sympy,lidavidm/sympy,Davidjohnwilson/sympy,AkademieOlympia/sympy,beni55/sympy,Mitchkoens/sympy,beni55/sympy,jbbskinny/sympy,pbrady/sympy,shikil/sympy,kmacinnis/sympy,shikil/sympy,Curious72/sympy,toolforger/sympy,yashsharan/sympy,jaimahajan1997/sympy,asm666/sympy,Titan-C/sympy,asm666/sympy,sahmed95/sympy,hrashk/sympy,yukoba/sympy,wyom/sympy,meghana1995/sympy,yukoba/sympy,oliverlee/sympy,madan96/sympy,saurabhjn76/sympy,cccfran/sympy,grevutiu-gabriel/sympy,bukzor/sympy,garvitr/sympy,kumarkrishna/sympy,AunShiLord/sympy,kmacinnis/sympy,emon10005/sympy,ahhda/sympy,jamesblunt/sympy,ahhda/sympy,saurabhjn76/sympy,sunny94/temp,atsao72/sympy,maniteja123/sympy,drufat/sympy,mattpap/sympy-polys,hazelnusse/sympy-old,MridulS/sympy,Shaswat27/sympy,iamutkarshtiwari/sympy,jerli/sympy,bukzor/sympy,MechCoder/sympy,grevutiu-gabriel/sympy,srjoglekar246/sympy,VaibhavAgarwalVA/sympy,wyom/sympy,meghana1995/sympy,farhaanbukhsh/sympy,dqnykamp/sympy,drufat/sympy,mcdaniel67/sympy,grevutiu-gabriel/sympy,kumarkrishna/sympy,jamesblunt/sympy,postvakje/sympy,souravsingh/sympy,shipci/sympy,shikil/sympy,Designist/sympy,yashsharan/sympy,atsao72/sympy,farhaanbukhsh/sympy,wanglongqi/sympy,toolforger/sympy,vipulroxx/sympy,rahuldan/sympy,jaimahajan1997/sympy,abloomston/sympy,Sumith1896/sympy,liangjiaxing/sympy,pernici/sympy,Arafatk/sympy,wyom/sympy,aktech/sympy,lindsayad/sympy,amitjamadagni/sympy,tovrstra/sympy,lidavidm/sympy,diofant/diofant,AkademieOlympia/sympy,kaichogami/sympy,Vishluck/sympy,yashsharan/sympy,Titan-C/sympy,liangjiaxing/sympy,jamesblunt/sympy,flacjacket/sympy,ga7g08/sympy,Shaswat27/sympy,moble/sympy,Vishluck/sympy,atreyv/sympy,ga7g08/sympy,MechCoder/sympy,pandeyadarsh/sympy,kmacinnis/sympy,kevalds51/sympy,lindsayad/sympy,mcdaniel67/sympy,moble/sympy,sampadsaha5/sympy,mafiya69/sympy,hrashk/sympy,Davidjohnwilson/sympy,kaichogami/sympy,Designist/sympy,hargup/sympy,kaichogami/sympy,hargup/sympy,ChristinaZografou/sympy,emon10005/sympy,jbaayen/sympy,hazelnusse/sympy-old,sahmed95/sympy,jerli/sympy,pandeyadarsh/sympy,Gadal/sympy,ga7g08/sympy,abhiii5459/sympy,mcdaniel67/sympy,MridulS/sympy,emon10005/sympy,aktech/sympy,rahuldan/sympy,meghana1995/sympy,skidzo/sympy,postvakje/sympy,sahilshekhawat/sympy,shipci/sympy,beni55/sympy,skidzo/sympy,bukzor/sympy,ChristinaZografou/sympy,AunShiLord/sympy,Arafatk/sympy,ryanGT/sympy,drufat/sympy,AunShiLord/sympy,atsao72/sympy,abhiii5459/sympy,wanglongqi/sympy,dqnykamp/sympy,pbrady/sympy,kevalds51/sympy,madan96/sympy,liangjiaxing/sympy,souravsingh/sympy,wanglongqi/sympy,VaibhavAgarwalVA/sympy,Gadal/sympy,sahilshekhawat/sympy,asm666/sympy,skidzo/sympy,cswiercz/sympy,yukoba/sympy,lindsayad/sympy,sampadsaha5/sympy,chaffra/sympy,moble/sympy,MridulS/sympy,maniteja123/sympy,debugger22/sympy,jbbskinny/sympy,kevalds51/sympy,Titan-C/sympy,Curious72/sympy,shipci/sympy,dqnykamp/sympy,kumarkrishna/sympy,AkademieOlympia/sympy,skirpichev/omg,vipulroxx/sympy,iamutkarshtiwari/sympy,atreyv/sympy,pbrady/sympy,debugger22/sympy,hrashk/sympy,KevinGoodsell/sympy,sahilshekhawat/sympy,mafiya69/sympy,minrk/sympy,pandeyadarsh/sympy,vipulroxx/sympy,Sumith1896/sympy,oliverlee/sympy,cccfran/sympy,Designist/sympy,chaffra/sympy,cccfran/sympy,sahmed95/sympy,debugger22/sympy,Vishluck/sympy,iamutkarshtiwari/sympy,saurabhjn76/sympy,cswiercz/sympy,kaushik94/sympy,garvitr/sympy,Sumith1896/sympy,amitjamadagni/sympy,toolforger/sympy,Davidjohnwilson/sympy,chaffra/sympy,kaushik94/sympy,abhiii5459/sympy,VaibhavAgarwalVA/sympy,souravsingh/sympy,sunny94/temp,Mitchkoens/sympy,rahuldan/sympy,Mitchkoens/sympy,ahhda/sympy,lidavidm/sympy,sunny94/temp,Arafatk/sympy,abloomston/sympy,maniteja123/sympy,garvitr/sympy,kaushik94/sympy,Curious72/sympy,abloomston/sympy,jerli/sympy,jaimahajan1997/sympy,minrk/sympy,ChristinaZografou/sympy,MechCoder/sympy,jbbskinny/sympy,aktech/sympy,farhaanbukhsh/sympy,postvakje/sympy,mafiya69/sympy,oliverlee/sympy | from sympy.core.cache import cacheit
def test_cacheit_doc():
@cacheit
def testfn():
"test docstring"
pass
assert testfn.__doc__ == "test docstring"
assert testfn.__name__ == "testfn"
| Add a test for the @cachit decorator.
Make sure that the caching decorator correctly
copies over the function docstring and function name.
This fixes issue #744 from the issue tracker.
Signed-off-by: Jochen Voss <1dcd5c846f3eb4984f0655fb5407be7c9e0c9079@seehuhn.de>
Signed-off-by: Ondrej Certik <b816faa87b7d35274d2e545c5be11ed4376f3ccf@certik.cz>
|
|
a013af88adad469782d2f05a0b882c2f5500b6b8 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(
name='gallerize',
version='0.3.1',
description='Create a static HTML/CSS image gallery from a bunch of images.',
author='Jochen Kupperschmidt',
author_email='homework@nwsnet.de',
url='http://homework.nwsnet.de/releases/cc0e/#gallerize',
)
| # -*- coding: utf-8 -*-
from setuptools import setup
def read_readme():
with open('README.rst') as f:
return f.read()
setup(
name='gallerize',
version='0.3.1',
description='Create a static HTML/CSS image gallery from a bunch of images.',
long_description=read_readme(),
license='MIT',
author='Jochen Kupperschmidt',
author_email='homework@nwsnet.de',
url='http://homework.nwsnet.de/releases/cc0e/#gallerize',
)
| Include README as long package description. Specified license. | Include README as long package description. Specified license.
| Python | mit | TheLady/gallerize,TheLady/gallerize | # -*- coding: utf-8 -*-
from setuptools import setup
def read_readme():
with open('README.rst') as f:
return f.read()
setup(
name='gallerize',
version='0.3.1',
description='Create a static HTML/CSS image gallery from a bunch of images.',
long_description=read_readme(),
license='MIT',
author='Jochen Kupperschmidt',
author_email='homework@nwsnet.de',
url='http://homework.nwsnet.de/releases/cc0e/#gallerize',
)
| Include README as long package description. Specified license.
#!/usr/bin/env python
from setuptools import setup
setup(
name='gallerize',
version='0.3.1',
description='Create a static HTML/CSS image gallery from a bunch of images.',
author='Jochen Kupperschmidt',
author_email='homework@nwsnet.de',
url='http://homework.nwsnet.de/releases/cc0e/#gallerize',
)
|
b16016994f20945a8a2bbb63b9cb920d856ab66f | web/attempts/migrations/0008_add_submission_date.py | web/attempts/migrations/0008_add_submission_date.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-05-09 09:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = (
SELECT max(history_date)
FROM attempts_historicalattempt
WHERE attempts_attempt.user_id = user_id
AND attempts_attempt.part_id = part_id
)
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-05-09 09:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = subquery.submission_date
FROM (
SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
GROUP BY user_id, part_id
) AS subquery
WHERE attempts_attempt.user_id = subquery.user_id
AND attempts_attempt.part_id = subquery.part_id
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
| Revert "Make migration SQLite compatible" | Revert "Make migration SQLite compatible"
This reverts commit 768d85cccb17c8757dd8d14dad220d0b87568264.
| Python | agpl-3.0 | ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-05-09 09:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = subquery.submission_date
FROM (
SELECT user_id, part_id, max(history_date) AS submission_date
FROM attempts_historicalattempt
GROUP BY user_id, part_id
) AS subquery
WHERE attempts_attempt.user_id = subquery.user_id
AND attempts_attempt.part_id = subquery.part_id
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
| Revert "Make migration SQLite compatible"
This reverts commit 768d85cccb17c8757dd8d14dad220d0b87568264.
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-05-09 09:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attempts', '0007_auto_20161004_0927'),
]
operations = [
migrations.AddField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(null=True),
),
migrations.RunSQL(
'UPDATE attempts_historicalattempt SET submission_date = history_date'
),
migrations.RunSQL(
'''UPDATE attempts_attempt
SET submission_date = (
SELECT max(history_date)
FROM attempts_historicalattempt
WHERE attempts_attempt.user_id = user_id
AND attempts_attempt.part_id = part_id
)
'''
),
migrations.AlterField(
model_name='attempt',
name='submission_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='historicalattempt',
name='submission_date',
field=models.DateTimeField(blank=True, editable=False),
),
]
|
d565786278eaf32761957dd1e064a5d549ef3ab4 | praw/models/reddit/mixins/savable.py | praw/models/reddit/mixins/savable.py | """Provide the SavableMixin class."""
from ....const import API_PATH
class SavableMixin(object):
"""Interface for RedditBase classes that can be saved."""
def save(self, category=None):
"""Save the object.
:param category: The category to save to (Default: None).
"""
self._reddit.post(API_PATH['save'], data={'category': category,
'id': self.fullname})
def unsave(self):
"""Unsave the object."""
self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
| """Provide the SavableMixin class."""
from ....const import API_PATH
class SavableMixin(object):
"""Interface for RedditBase classes that can be saved."""
def save(self, category=None):
"""Save the object.
:param category: (Gold) The category to save to (Default:
None). If your user does not have gold this value is ignored by
Reddit.
"""
self._reddit.post(API_PATH['save'], data={'category': category,
'id': self.fullname})
def unsave(self):
"""Unsave the object."""
self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
| Clarify that category is a gold feature for saving an item | Clarify that category is a gold feature for saving an item
| Python | bsd-2-clause | 13steinj/praw,RGood/praw,RGood/praw,darthkedrik/praw,darthkedrik/praw,leviroth/praw,gschizas/praw,leviroth/praw,gschizas/praw,praw-dev/praw,nmtake/praw,praw-dev/praw,nmtake/praw,13steinj/praw | """Provide the SavableMixin class."""
from ....const import API_PATH
class SavableMixin(object):
"""Interface for RedditBase classes that can be saved."""
def save(self, category=None):
"""Save the object.
:param category: (Gold) The category to save to (Default:
None). If your user does not have gold this value is ignored by
Reddit.
"""
self._reddit.post(API_PATH['save'], data={'category': category,
'id': self.fullname})
def unsave(self):
"""Unsave the object."""
self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
| Clarify that category is a gold feature for saving an item
"""Provide the SavableMixin class."""
from ....const import API_PATH
class SavableMixin(object):
"""Interface for RedditBase classes that can be saved."""
def save(self, category=None):
"""Save the object.
:param category: The category to save to (Default: None).
"""
self._reddit.post(API_PATH['save'], data={'category': category,
'id': self.fullname})
def unsave(self):
"""Unsave the object."""
self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
|
146832fe1eba0bc22125ade183f34621de5625fa | apps/bluebottle_utils/fields.py | apps/bluebottle_utils/fields.py | from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
| from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
| Add south introspection rule for MoneyField. | Add south introspection rule for MoneyField.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
| Add south introspection rule for MoneyField.
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
|
fae9990c2cd12ebc65abb9cbabe1b53fde9b4eec | wtforms/ext/i18n/form.py | wtforms/ext/i18n/form.py | import warnings
from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
warnings.warn('wtforms.ext.i18n will be removed in WTForms 1.2', DeprecationWarning)
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else None
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
| import warnings
from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
**NOTE** this class is now un-necessary as the i18n features have
been moved into the core of WTForms, but it will be kept for
compatibility reasons until WTForms 1.2.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
warnings.warn('i18n is now in core, wtforms.ext.i18n will be removed in WTForms 1.2', DeprecationWarning)
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else None
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
| Make documentation more explicit for WTForms deprecation. | Make documentation more explicit for WTForms deprecation.
| Python | bsd-3-clause | cklein/wtforms,jmagnusson/wtforms,crast/wtforms,pawl/wtforms,subyraman/wtforms,Aaron1992/wtforms,hsum/wtforms,wtforms/wtforms,Xender/wtforms,skytreader/wtforms,pawl/wtforms,Aaron1992/wtforms | import warnings
from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
**NOTE** this class is now un-necessary as the i18n features have
been moved into the core of WTForms, but it will be kept for
compatibility reasons until WTForms 1.2.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
warnings.warn('i18n is now in core, wtforms.ext.i18n will be removed in WTForms 1.2', DeprecationWarning)
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else None
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
| Make documentation more explicit for WTForms deprecation.
import warnings
from wtforms import form
from wtforms.ext.i18n.utils import get_translations
translations_cache = {}
class Form(form.Form):
"""
Base form for a simple localized WTForms form.
This will use the stdlib gettext library to retrieve an appropriate
translations object for the language, by default using the locale
information from the environment.
If the LANGUAGES class variable is overridden and set to a sequence of
strings, this will be a list of languages by priority to use instead, e.g::
LANGUAGES = ['en_GB', 'en']
One can also provide the languages by passing `LANGUAGES=` to the
constructor of the form.
Translations objects are cached to prevent having to get a new one for the
same languages every instantiation.
"""
LANGUAGES = None
def __init__(self, *args, **kwargs):
warnings.warn('wtforms.ext.i18n will be removed in WTForms 1.2', DeprecationWarning)
super(Form, self).__init__(*args, **kwargs)
def _get_translations(self):
languages = tuple(self.LANGUAGES) if self.LANGUAGES else None
if languages not in translations_cache:
translations_cache[languages] = get_translations(languages)
return translations_cache[languages]
|
2e40bccd158d0dd3a8e741704f055dbe7a04e3a5 | heat/db/sqlalchemy/migrate_repo/manage.py | heat/db/sqlalchemy/migrate_repo/manage.py | #!/usr/bin/env python
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False')
| #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False')
| Add Apache 2.0 license to source file | Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I82387fec7ac94001a6c2379321ebf1f2e3f40c12
| Python | apache-2.0 | openstack/heat,noironetworks/heat,openstack/heat,noironetworks/heat | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False')
| Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[H104] Files with no code shouldn't contain any license header nor
comments, and must be left completely empty.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I82387fec7ac94001a6c2379321ebf1f2e3f40c12
#!/usr/bin/env python
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False')
|
91c620e228ad73e2e34efbd60813ed35b3f9ef46 | tests/test_dtool_dataset_freeze.py | tests/test_dtool_dataset_freeze.py | """Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
| """Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
| Fix the freeze functional test | Fix the freeze functional test
| Python | mit | jic-dtool/dtool-create | """Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
| Fix the freeze functional test
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
|
5a641736faf6bb3ce335480848464a1f22fab040 | fabfile.py | fabfile.py | # -*- coding: utf-8 -*-
from contextlib import nested
from fabric.api import *
def prepare_project():
u"""
Enters the directory and sources environment configuration.
I know ``nested`` is deprecated, but what a nice shortcut it is here ;)
"""
return nested(
cd(PROJECT_PATH),
prefix("source ../.virtualenvs/variablestars3/bin/activate")
)
PROJECT_PATH = "$HOME/variablestars.net"
env.roledefs = {
'web': ["variablestars2@variablestars.net"],
}
env.color = True
env.forward_agent = True
@task
@roles("web")
def git_pull():
with cd(PROJECT_PATH):
run("git pull origin master")
@task
@roles("web")
def update_requirements():
with prepare_project():
run("pip install -r requirements.txt")
run("source ~/.nvm/nvm.sh && npm install")
@task
@roles("web")
def migrate():
with prepare_project():
run("python manage.py syncdb")
run("python manage.py migrate")
@task
@roles("web")
def collect_static():
with prepare_project():
run("python manage.py collectstatic --noinput")
@task
@roles("web")
def restart():
run("appctl restart variablestars2")
@task
@roles("web")
def deploy():
git_pull()
update_requirements()
migrate()
collect_static()
restart()
| # -*- coding: utf-8 -*-
from contextlib import nested
from fabric.api import *
def prepare_project():
u"""
Enters the directory and sources environment configuration.
I know ``nested`` is deprecated, but what a nice shortcut it is here ;)
"""
return nested(
cd(PROJECT_PATH),
prefix("source ../.virtualenvs/variablestars3/bin/activate")
)
PROJECT_PATH = "$HOME/variablestars.net"
env.roledefs = {
'web': ["variablestars2@variablestars.net"],
}
env.color = True
env.forward_agent = True
env.use_ssh_config = True
@task
@roles("web")
def git_pull():
with cd(PROJECT_PATH):
run("git pull origin master")
@task
@roles("web")
def update_requirements():
with prepare_project():
run("pip install -r requirements.txt")
run("source ~/.nvm/nvm.sh && npm install")
@task
@roles("web")
def migrate():
with prepare_project():
run("python manage.py syncdb")
run("python manage.py migrate")
@task
@roles("web")
def collect_static():
with prepare_project():
run("python manage.py collectstatic --noinput")
@task
@roles("web")
def restart():
run("appctl restart variablestars2")
@task
@roles("web")
def deploy():
git_pull()
update_requirements()
migrate()
collect_static()
restart()
| Make Fabric honor .ssh/config settings | Make Fabric honor .ssh/config settings
| Python | mit | zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net | # -*- coding: utf-8 -*-
from contextlib import nested
from fabric.api import *
def prepare_project():
u"""
Enters the directory and sources environment configuration.
I know ``nested`` is deprecated, but what a nice shortcut it is here ;)
"""
return nested(
cd(PROJECT_PATH),
prefix("source ../.virtualenvs/variablestars3/bin/activate")
)
PROJECT_PATH = "$HOME/variablestars.net"
env.roledefs = {
'web': ["variablestars2@variablestars.net"],
}
env.color = True
env.forward_agent = True
env.use_ssh_config = True
@task
@roles("web")
def git_pull():
with cd(PROJECT_PATH):
run("git pull origin master")
@task
@roles("web")
def update_requirements():
with prepare_project():
run("pip install -r requirements.txt")
run("source ~/.nvm/nvm.sh && npm install")
@task
@roles("web")
def migrate():
with prepare_project():
run("python manage.py syncdb")
run("python manage.py migrate")
@task
@roles("web")
def collect_static():
with prepare_project():
run("python manage.py collectstatic --noinput")
@task
@roles("web")
def restart():
run("appctl restart variablestars2")
@task
@roles("web")
def deploy():
git_pull()
update_requirements()
migrate()
collect_static()
restart()
| Make Fabric honor .ssh/config settings
# -*- coding: utf-8 -*-
from contextlib import nested
from fabric.api import *
def prepare_project():
u"""
Enters the directory and sources environment configuration.
I know ``nested`` is deprecated, but what a nice shortcut it is here ;)
"""
return nested(
cd(PROJECT_PATH),
prefix("source ../.virtualenvs/variablestars3/bin/activate")
)
PROJECT_PATH = "$HOME/variablestars.net"
env.roledefs = {
'web': ["variablestars2@variablestars.net"],
}
env.color = True
env.forward_agent = True
@task
@roles("web")
def git_pull():
with cd(PROJECT_PATH):
run("git pull origin master")
@task
@roles("web")
def update_requirements():
with prepare_project():
run("pip install -r requirements.txt")
run("source ~/.nvm/nvm.sh && npm install")
@task
@roles("web")
def migrate():
with prepare_project():
run("python manage.py syncdb")
run("python manage.py migrate")
@task
@roles("web")
def collect_static():
with prepare_project():
run("python manage.py collectstatic --noinput")
@task
@roles("web")
def restart():
run("appctl restart variablestars2")
@task
@roles("web")
def deploy():
git_pull()
update_requirements()
migrate()
collect_static()
restart()
|
923ae01ab8beadfd73c5275f0c954510d3a13832 | coherence/__init__.py | coherence/__init__.py | import platform
import sys
__version_info__ = (0, 6, 7)
__version__ = '.'.join(map(str, __version_info__))
SERVER_ID = ','.join([platform.system(),
platform.release(),
'UPnP/1.0,Coherence UPnP framework',
__version__])
try:
from twisted import version as twisted_version
from twisted.web import version as twisted_web_version
from twisted.python.versions import Version
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
sys.stderr.write("Twisted >= 2.5 and Twisted.Web >= 2.5 are required. " \
"Please install them.\n")
raise
try:
if twisted_version < Version("twisted", 2, 5, 0):
raise ImportError("Twisted >= 2.5 is required. Please install it.")
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
for arg in exc.args:
sys.stderr.write("%s\n" % arg)
raise
| import platform
import sys
__version__ = "0.6.7.dev0"
SERVER_ID = ','.join([platform.system(),
platform.release(),
'UPnP/1.0,Coherence UPnP framework',
__version__])
try:
from twisted import version as twisted_version
from twisted.web import version as twisted_web_version
from twisted.python.versions import Version
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
sys.stderr.write("Twisted >= 2.5 and Twisted.Web >= 2.5 are required. " \
"Please install them.\n")
raise
try:
if twisted_version < Version("twisted", 2, 5, 0):
raise ImportError("Twisted >= 2.5 is required. Please install it.")
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
for arg in exc.args:
sys.stderr.write("%s\n" % arg)
raise
| Switch to PEP 440 compliant version string and bump to 0.6.7.dev0. | Switch to PEP 440 compliant version string and bump to 0.6.7.dev0.
| Python | mit | coherence-project/Coherence,coherence-project/Coherence | import platform
import sys
__version__ = "0.6.7.dev0"
SERVER_ID = ','.join([platform.system(),
platform.release(),
'UPnP/1.0,Coherence UPnP framework',
__version__])
try:
from twisted import version as twisted_version
from twisted.web import version as twisted_web_version
from twisted.python.versions import Version
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
sys.stderr.write("Twisted >= 2.5 and Twisted.Web >= 2.5 are required. " \
"Please install them.\n")
raise
try:
if twisted_version < Version("twisted", 2, 5, 0):
raise ImportError("Twisted >= 2.5 is required. Please install it.")
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
for arg in exc.args:
sys.stderr.write("%s\n" % arg)
raise
| Switch to PEP 440 compliant version string and bump to 0.6.7.dev0.
import platform
import sys
__version_info__ = (0, 6, 7)
__version__ = '.'.join(map(str, __version_info__))
SERVER_ID = ','.join([platform.system(),
platform.release(),
'UPnP/1.0,Coherence UPnP framework',
__version__])
try:
from twisted import version as twisted_version
from twisted.web import version as twisted_web_version
from twisted.python.versions import Version
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
sys.stderr.write("Twisted >= 2.5 and Twisted.Web >= 2.5 are required. " \
"Please install them.\n")
raise
try:
if twisted_version < Version("twisted", 2, 5, 0):
raise ImportError("Twisted >= 2.5 is required. Please install it.")
except ImportError, exc:
# log error to stderr, might be useful for debugging purpose
for arg in exc.args:
sys.stderr.write("%s\n" % arg)
raise
|
92622b83b1b191fec22655fa727fbd87c5af211f | spelling_ru.py | spelling_ru.py |
def pl_1(order):
"""2, 3, 4"""
return (order == 'тысяча') and 'тысячи' or order + 'а'
def pl_2(order):
"""5 и больше"""
return (order == 'тысяча') and 'тысяч' or order + 'ов'
RU_PASSES = """
^ 1 <order> = <order>
1 <thousand> = одна тысяча
2 <thousand> = две тысячи
<2_to_4> <order> = <order, pl_1>
<not_1> <order> = <order, pl_2>
"""
| Add preliminary Russian spelling definitions | Add preliminary Russian spelling definitions | Python | mit | alco/numspell,alco/numspell |
def pl_1(order):
"""2, 3, 4"""
return (order == 'тысяча') and 'тысячи' or order + 'а'
def pl_2(order):
"""5 и больше"""
return (order == 'тысяча') and 'тысяч' or order + 'ов'
RU_PASSES = """
^ 1 <order> = <order>
1 <thousand> = одна тысяча
2 <thousand> = две тысячи
<2_to_4> <order> = <order, pl_1>
<not_1> <order> = <order, pl_2>
"""
| Add preliminary Russian spelling definitions
|
|
a1e18385c2c5df9db8390b2da4d5baa2465f150e | webcomix/tests/test_comic_availability.py | webcomix/tests/test_comic_availability.py | import pytest
from webcomix.comic import Comic
from webcomix.supported_comics import supported_comics
from webcomix.util import check_first_pages
@pytest.mark.slow
def test_supported_comics():
for comic_name, comic_info in supported_comics.items():
comic = Comic(comic_name, *comic_info)
first_pages = comic.verify_xpath()
check_first_pages(first_pages)
| import pytest
from webcomix.comic import Comic
from webcomix.supported_comics import supported_comics
from webcomix.util import check_first_pages
@pytest.mark.slow
@pytest.mark.parametrize("comic_name", list(supported_comics.keys()))
def test_supported_comics(comic_name):
comic = Comic(comic_name, *supported_comics[comic_name])
first_pages = comic.verify_xpath()
check_first_pages(first_pages)
| Test comic availability of all supported comics independently through parametrization | Test comic availability of all supported comics independently through parametrization
| Python | mit | J-CPelletier/webcomix,J-CPelletier/WebComicToCBZ,J-CPelletier/webcomix | import pytest
from webcomix.comic import Comic
from webcomix.supported_comics import supported_comics
from webcomix.util import check_first_pages
@pytest.mark.slow
@pytest.mark.parametrize("comic_name", list(supported_comics.keys()))
def test_supported_comics(comic_name):
comic = Comic(comic_name, *supported_comics[comic_name])
first_pages = comic.verify_xpath()
check_first_pages(first_pages)
| Test comic availability of all supported comics independently through parametrization
import pytest
from webcomix.comic import Comic
from webcomix.supported_comics import supported_comics
from webcomix.util import check_first_pages
@pytest.mark.slow
def test_supported_comics():
for comic_name, comic_info in supported_comics.items():
comic = Comic(comic_name, *comic_info)
first_pages = comic.verify_xpath()
check_first_pages(first_pages)
|
9249f1f2fce010cb0378080b5b7fef55235eefea | rnacentral/portal/migrations/0010_add_precomputed_rna_type.py | rnacentral/portal/migrations/0010_add_precomputed_rna_type.py | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
| from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=40))
]
| Drop size of rna_type column to 40 | Drop size of rna_type column to 40
Largest rna_type is 32 so 40 should give us plenty of buffer space as
per Anton's suggestion.
| Python | apache-2.0 | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=40))
]
| Drop size of rna_type column to 40
Largest rna_type is 32 so 40 should give us plenty of buffer space as
per Anton's suggestion.
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
a506d8d45d0824b9b70af35831c1bde69906617e | test/test_soft.py | test/test_soft.py |
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
| Add rudimentary unit test for SoftU2FDevice | Add rudimentary unit test for SoftU2FDevice
| Python | bsd-2-clause | Yubico/python-u2flib-host |
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
| Add rudimentary unit test for SoftU2FDevice
|
|
fcffabef406cd0d983e4754c58c76760f0204357 | pywikibot/families/commons_family.py | pywikibot/families/commons_family.py | # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
| Enable Wikidata for Wikimedia Commons | Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3
| Python | mit | smalyshev/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,jayvdb/pywikibot-core,magul/pywikibot-core,VcamX/pywikibot-core,h4ck3rm1k3/pywikibot-core,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,darthbhyrava/pywikibot-local,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,Darkdadaah/pywikibot-core,emijrp/pywikibot-core,valhallasw/pywikibot-core,PersianWikipedia/pywikibot-core,icyflame/batman,hasteur/g13bot_tools_new,xZise/pywikibot-core,trishnaguha/pywikibot-core,jayvdb/pywikibot-core,h4ck3rm1k3/pywikibot-core,happy5214/pywikibot-core,TridevGuha/pywikibot-core,magul/pywikibot-core,npdoty/pywikibot,happy5214/pywikibot-core | # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
| Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
|
8fa1cae882c0ff020c0b9c3c2fac9e4248d46ce4 | deploy/common/sqlite_wrapper.py | deploy/common/sqlite_wrapper.py | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096. | Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.
| Python | mit | mikispag/bitiodine | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.
import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
|
1d555c184a10ae4fd84d758105e19b10828543c2 | q2_feature_classifier/tests/__init__.py | q2_feature_classifier/tests/__init__.py | # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from q2_types.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
| # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from qiime.plugin.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
| Update import location of TestPluginBase | TST: Update import location of TestPluginBase
| Python | bsd-3-clause | BenKaehler/q2-feature-classifier | # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from qiime.plugin.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
| TST: Update import location of TestPluginBase
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from q2_types.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
|
594869bca15b93a8e583e29b1a65496bd6c058f2 | config.py | config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'weather.sqlite')
config = {
'default': DevelopmentConfig
}
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'weather.sqlite')
config = {
'default': DevelopmentConfig
}
| Add SQLALCHEMY_TRACK_MODIFICATION to supress warnings | Add SQLALCHEMY_TRACK_MODIFICATION to supress warnings
| Python | mit | fernando24164/flask_api,fernando24164/flask_api | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'weather.sqlite')
config = {
'default': DevelopmentConfig
}
| Add SQLALCHEMY_TRACK_MODIFICATION to supress warnings
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'weather.sqlite')
config = {
'default': DevelopmentConfig
}
|
e75aecc3e8606559fa722f7ffb081e2df258b60f | py/search-in-rotated-sorted-array.py | py/search-in-rotated-sorted-array.py | class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if len(nums) == 1:
return 0 if nums[0] == target else -1
if target >= nums[0]:
L, U = -1, len(nums)
while L + 1 < U:
mid = L + (U - L) / 2
if nums[mid] > target or nums[mid] < nums[0]:
U = mid
else:
L = mid
return L if nums[L] == target else -1
elif target <= nums[-1]:
L, U = -1, len(nums)
while L + 1 < U:
mid = L + (U - L) / 2
if nums[mid] <= target or nums[mid] > nums[-1]:
L = mid
else:
U = mid
return L if nums[L] == target else -1
else:
return -1
| Add py solution for Search in Rotated Sorted Array | Add py solution for Search in Rotated Sorted Array
Search in Rotated Sorted Array: https://leetcode.com/problems/search-in-rotated-sorted-array/
| Python | apache-2.0 | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if len(nums) == 1:
return 0 if nums[0] == target else -1
if target >= nums[0]:
L, U = -1, len(nums)
while L + 1 < U:
mid = L + (U - L) / 2
if nums[mid] > target or nums[mid] < nums[0]:
U = mid
else:
L = mid
return L if nums[L] == target else -1
elif target <= nums[-1]:
L, U = -1, len(nums)
while L + 1 < U:
mid = L + (U - L) / 2
if nums[mid] <= target or nums[mid] > nums[-1]:
L = mid
else:
U = mid
return L if nums[L] == target else -1
else:
return -1
| Add py solution for Search in Rotated Sorted Array
Search in Rotated Sorted Array: https://leetcode.com/problems/search-in-rotated-sorted-array/
|
|
8514d379ac3a9d75722b3ccccd0a9da40d2c5819 | __openerp__.py | __openerp__.py | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| Add EbicsPy Launchpad repository url | Add EbicsPy Launchpad repository url
| Python | agpl-3.0 | yuntux/l10n_fr_ebics | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| Add EbicsPy Launchpad repository url
# -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
|
4e84dc31d52412a9d58d5f0c54f5514c0eac5137 | console.py | console.py | from dumpster import Dumpster
import os
i = input('\r>')
if i == 'list':
cwd = os.getcwd()
lcd = os.listdir()
dump = ''
for file in lcd:
if '.dmp' in file:
dump+= ' '+file
print(dump)
| from dumpster import Dumpster
import os
running = True
selected = ''
while running:
#cwd = os.getcwd()
i = input('\r%s>'%(selected))
if i == 'exit':
running = False
if i[0:6] == 'create':
name = i[7:]
Dumpster(name).write_to_dump()
if i == 'list':
if selected is 'none': #list currrent working directory
dirs = ''
lcd = os.listdir()
for file in lcd:
if '.dmp' in file:
dirs+= ' '+file.strip('.dmp')
print(dirs)
else: #list selected dump
#.......................................
if i[0:6] == 'select':
name = i[7:]
selected = name
| Select and Create and List | Select and Create and List
| Python | apache-2.0 | SirGuyOfGibson/source-dump | from dumpster import Dumpster
import os
running = True
selected = ''
while running:
#cwd = os.getcwd()
i = input('\r%s>'%(selected))
if i == 'exit':
running = False
if i[0:6] == 'create':
name = i[7:]
Dumpster(name).write_to_dump()
if i == 'list':
if selected is 'none': #list currrent working directory
dirs = ''
lcd = os.listdir()
for file in lcd:
if '.dmp' in file:
dirs+= ' '+file.strip('.dmp')
print(dirs)
else: #list selected dump
#.......................................
if i[0:6] == 'select':
name = i[7:]
selected = name
| Select and Create and List
from dumpster import Dumpster
import os
i = input('\r>')
if i == 'list':
cwd = os.getcwd()
lcd = os.listdir()
dump = ''
for file in lcd:
if '.dmp' in file:
dump+= ' '+file
print(dump)
|
Subsets and Splits