input
stringlengths
0
2.17k
instruction
stringlengths
18
2.94k
output
stringlengths
47
3.36k
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Include *.js and *.jinja files in sdist packages
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] )
Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process
import setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
Change project name to avoid pypi conflict
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
Add undeclared dependency on GenomicConsensus
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
Add missing project URL to the project meta
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
Add third place in version number, why not
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
Add Dijkstra implementation of shortest path.
from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
Allow HeaderRegexp to be imported directly from header_filter package.
from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0"
Add Path to Vulk package
"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args))
Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
Raise exception if unable to find a usable key in property mapping dict
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import *
Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value)
Use __repr__ instead of __str__ to print setting's value
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
Fix import error due to wrong import line
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ##############################################################################
[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text - Preserved explicit 3rd-party copyright notices - Explicit boilerplate should not be necessary - copyright law applies automatically in all countries thanks to Berne Convention + WTO rules, and a reference to the applicable license is clear enough.
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details.
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run the Better Bomb Defusal Manual :Copyright: 2015 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': ui = ConsoleUI() subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui)
Allow to enable graphical UI via command line option.
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run the Better Bomb Defusal Manual :Copyright: 2015 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ from argparse import ArgumentParser from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def parse_args(): parser = ArgumentParser() parser.add_argument( '--gui', action='store_true', default=False, dest='use_gui', help='use graphical user interface') return parser.parse_args() def get_ui(use_gui): if use_gui: from bombdefusalmanual.ui.tk import TkGUI return TkGUI() else: return ConsoleUI() def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': args = parse_args() ui = get_ui(args.use_gui) subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui)
from hamcrest import * from nose.tools import nottest from test.features import BrowserTest class test_create_pages(BrowserTest): def test_about_page(self): self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending") assert_that(self.browser.find_by_css('h1').text, is_('High-volume services')) def test_home_page(self): self.browser.visit("http://0.0.0.0:8000/home") headlines = self.browser.find_by_css('.headline') departments = headlines[0].text services = headlines[1].text transactions = headlines[2].text assert_that(departments, contains_string('16')) assert_that(services, contains_string('654')) assert_that(transactions, contains_string('1.31bn')) @nottest def test_all_services(self): self.browser.visit("http://0.0.0.0:8000/all-services") assert_that(self.browser.find_by_css('h1').text, is_("All Services")) assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
Change services number in test
from hamcrest import * from nose.tools import nottest from test.features import BrowserTest class test_create_pages(BrowserTest): def test_about_page(self): self.browser.visit("http://0.0.0.0:8000/high-volume-services/" "by-transactions-per-year/descending") assert_that(self.browser.find_by_css('h1').text, is_('High-volume services')) def test_home_page(self): self.browser.visit("http://0.0.0.0:8000/home") headlines = self.browser.find_by_css('.headline') departments = headlines[0].text services = headlines[1].text transactions = headlines[2].text assert_that(departments, contains_string('16')) assert_that(services, contains_string('658')) assert_that(transactions, contains_string('1.31bn')) @nottest def test_all_services(self): self.browser.visit("http://0.0.0.0:8000/all-services") assert_that(self.browser.find_by_css('h1').text, is_("All Services")) assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
"""Stupid simple Python configuration management""" from __future__ import absolute_import import os as _os __version__ = '0.5.0' # Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable # MARTEN_ENV defaults to 'default' config = None _marten_dir = _os.path.join(_os.getcwd(), '.marten') _os.environ.setdefault('MARTEN_ENV', 'default') if _os.path.isdir(_marten_dir): from .configurations import parse_directory as _parse_directory config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
Return empty Configuration instance when .marten/ directory is missing
"""Stupid simple Python configuration management""" from __future__ import absolute_import import os as _os __version__ = '0.5.1' # Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable # MARTEN_ENV defaults to 'default' _marten_dir = _os.path.join(_os.getcwd(), '.marten') _os.environ.setdefault('MARTEN_ENV', 'default') if _os.path.isdir(_marten_dir): from .configurations import parse_directory as _parse_directory config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV']) else: from .configurations import Configuration as _Configuration config = _Configuration({})
from random import randint def sort(arr, start, length): if length <= 1: return arr pivot = choose_pivot(arr, length) i = j = start + 1 while j < length: if arr[j] < pivot: swap(arr, j, i) i += 1 j += 1 swap(arr, start, i-1) return (arr, length, pivot) def swap(arr, x, y): temp = arr[x] arr[x] = arr[y] arr[y] = temp def choose_pivot(arr, length): return arr[0] if __name__ == '__main__': unsorted = [randint(0, 100) for n in range(100)] print sort(unsorted, 0, len(unsorted)-1)
Sort list by recursing through both parts The array is split into two parts: everything up to and including the pivot, and everything after the pivot. Sort() is called on each part and the resulting arrays are combined and returned. This sorts the array.
from random import randint def sort(arr, start, length): if length <= 1: return arr pivot = choose_pivot(arr, length) i = j = start + 1 while j < length: if arr[j] < pivot: swap(arr, j, i) i += 1 j += 1 swap(arr, start, i-1) first_part = sort(arr[start:i], start, i) second_part = sort(arr[i:length], start, length - i - 1) return first_part + second_part def swap(arr, x, y): temp = arr[x] arr[x] = arr[y] arr[y] = temp def choose_pivot(arr, length): return arr[0] if __name__ == '__main__': unsorted = [randint(0, 100) for n in range(100)] print sort(unsorted, 0, len(unsorted)-1)
from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode class LookupBase(object): def _name(cls): app_name = cls.__module__.split('.')[-2].lower() class_name = cls.__name__.lower() name = u'%s-%s' % (app_name, class_name) return name name = classmethod(_name) def _url(cls): return reverse('selectable-lookup', args=[cls.name()]) url = classmethod(_url) def get_query(self, request): return [] def get_item_name(self, item): return smart_unicode(item) def get_item_id(self, item): return smart_unicode(item) def get_item_value(self, item): return smart_unicode(item) def format_item(self, item): return { 'id': self.get_item_id(item), 'value': self.get_item_value(item), 'name': self.get_item_name(item) }
Change to return 'label' not 'name'.
from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode class LookupBase(object): def _name(cls): app_name = cls.__module__.split('.')[-2].lower() class_name = cls.__name__.lower() name = u'%s-%s' % (app_name, class_name) return name name = classmethod(_name) def _url(cls): return reverse('selectable-lookup', args=[cls.name()]) url = classmethod(_url) def get_query(self, request): return [] def get_item_label(self, item): return smart_unicode(item) def get_item_id(self, item): return smart_unicode(item) def get_item_value(self, item): return smart_unicode(item) def format_item(self, item): return { 'id': self.get_item_id(item), 'value': self.get_item_value(item), 'label': self.get_item_label(item) }
from django.conf.urls import patterns, include, url from django.contrib import admin urlpatterns = patterns('', url(r'^', include('imagr.urls', namespace='imagr')), url(r'^admin/', include(admin.site.urls)), url(r'^accounts/', include('registration.backends.default.urls')) )
Change back the url so site root is at /imagr again
from django.conf.urls import patterns, include, url from django.contrib import admin urlpatterns = patterns('', url(r'^imagr/', include('imagr.urls', namespace='imagr')), url(r'^admin/', include(admin.site.urls)), url(r'^accounts/', include('registration.backends.default.urls')) )
has_qt = True try: from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets except ImportError: try: from matplotlib.backends.qt4_compat import QtGui, QtCore QtWidgets = QtGui except ImportError: # Mock objects class QtGui(object): QMainWindow = object QDialog = object QWidget = object class QtCore_cls(object): class Qt(object): TopDockWidgetArea = None BottomDockWidgetArea = None LeftDockWidgetArea = None RightDockWidgetArea = None def Signal(self, *args, **kwargs): pass QWidget = object QtCore = QtWidgets = QtCore_cls() has_qt = False Qt = QtCore.Qt Signal = QtCore.Signal
Fix mock Qt objects again
has_qt = True try: from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets except ImportError: try: from matplotlib.backends.qt4_compat import QtGui, QtCore QtWidgets = QtGui except ImportError: # Mock objects class QtGui_cls(object): QMainWindow = object QDialog = object QWidget = object class QtCore_cls(object): class Qt(object): TopDockWidgetArea = None BottomDockWidgetArea = None LeftDockWidgetArea = None RightDockWidgetArea = None def Signal(self, *args, **kwargs): pass QtGui = QtWidgets = QtGui_cls() QtCore = QtCore_cls() has_qt = False Qt = QtCore.Qt Signal = QtCore.Signal
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.2" class VeraPPTargetCmakeConan(ConanFile): name = "verapp-target-cmake" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard", "tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util", "tooling-cmake-util/master@smspillaz/tooling-cmake-util", "cmake-unit/master@smspillaz/cmake-unit") url = "http://github.com/polysquare/verapp-target-cmake" license = "MIT" def source(self): zip_name = "verapp-target-cmake.zip" download("https://github.com/polysquare/" "verapp-target-cmake/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="*.cmake", dst="cmake/verapp-target-cmake", src="verapp-target-cmake-" + VERSION, keep_path=True)
conan: Copy find modules to root of module path
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.2" class VeraPPTargetCmakeConan(ConanFile): name = "verapp-target-cmake" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard", "tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util", "tooling-cmake-util/master@smspillaz/tooling-cmake-util", "cmake-unit/master@smspillaz/cmake-unit") url = "http://github.com/polysquare/verapp-target-cmake" license = "MIT" def source(self): zip_name = "verapp-target-cmake.zip" download("https://github.com/polysquare/" "verapp-target-cmake/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="Find*.cmake", dst="", src="verapp-target-cmake-" + VERSION, keep_path=True) self.copy(pattern="*.cmake", dst="cmake/verapp-target-cmake", src="verapp-target-cmake-" + VERSION, keep_path=True)
# -*- coding: utf-8 -*- # Copyright (C) 2015 Michael Hogg # This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution # Version import version __version__ = version.__version__
Fix import bug in Python 3
# -*- coding: utf-8 -*- # Copyright (C) 2017 Michael Hogg # This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution # Version from .version import __version__ __version__ = version.__version__
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen.""" import bsddb error = bsddb.error def open(file, flag, mode=0666): return bsddb.hashopen(file, flag, mode)
Clarify why we define error. Suggested by Andrew Dalke.
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen.""" import bsddb error = bsddb.error # Exported for anydbm def open(file, flag, mode=0666): return bsddb.hashopen(file, flag, mode)
import click import sys @click.command() @click.option('--acme', help='The address for the ACME Directory Resource', default='https://acme-v01.api.letsencrypt.org/directory', show_default=True) @click.option('--email', help=("Email address for Let's Encrypt certificate registration " "and recovery contact"), required=True) @click.option('--storage-dir', help='Path to directory for storing certificates') @click.option('--marathon', default='http://marathon.mesos:8080', help='The address for the Marathon HTTP API', show_default=True) @click.option('--listen', help=("The address of the interface and port to bind to to " "receive Marathon's event stream"), default='0.0.0.0:7000', show_default=True) @click.option('--advertise', default='http://marathon-acme.marathon.mesos', help=('The address to advertise to Marathon when registering ' 'for the event stream'), show_default=True) @click.option('--poll', help=("Periodically check Marathon's state every _n_ seconds " "[default: disabled]"), type=int) @click.option('--logfile', help='Where to log output to [default: stdout]', type=click.File('a'), default=sys.stdout) @click.option('--debug', help='Log debug output', is_flag=True) def main(acme, email, storage_dir, # ACME marathon, listen, advertise, poll, # Marathon logfile, debug): # Logging """ A tool to automatically request, renew and distribute Let's Encrypt certificates for apps running on Seed Stack. """
Remove imaginary event server CLI options
import click import sys @click.command() @click.option('--acme', help='The address for the ACME Directory Resource', default='https://acme-v01.api.letsencrypt.org/directory', show_default=True) @click.option('--email', help=("Email address for Let's Encrypt certificate registration " "and recovery contact"), required=True) @click.option('--storage-dir', help='Path to directory for storing certificates') @click.option('--marathon', default='http://marathon.mesos:8080', help='The address for the Marathon HTTP API', show_default=True) @click.option('--poll', help=("Periodically check Marathon's state every _n_ seconds " "[default: disabled]"), type=int) @click.option('--logfile', help='Where to log output to [default: stdout]', type=click.File('a'), default=sys.stdout) @click.option('--debug', help='Log debug output', is_flag=True) def main(acme, email, storage_dir, # ACME/certificates marathon, poll, # Marathon logfile, debug): # Logging """ A tool to automatically request, renew and distribute Let's Encrypt certificates for apps running on Seed Stack. """
from devbot import chat def call(message: str, name, protocol, cfg, commands): if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say_wrap('/msg {}', 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format( message)) chat.say('/nlip GlobalChat {}'.format(message))
Fix formatting issues with gadd
from devbot import chat def call(message: str, name, protocol, cfg, commands): if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
import boto3 def await_volume(client, volumeId, waitingState, finishedState): while True: volumes = client.describe_volumes(VolumeIds=[volumeId]) state = volumes['Volumes'][0]['State'] if state != waitingState: break if state != finishedState: print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes) sys.exit(1) def await_instance(client, instanceId, waitingState, finishedState): while True: instances = client.describe_instances(InstanceIds=[instanceId]) state = instances['Reservations'][0]['Instances'][0]['State']['Name'] if waitingState and state != waitingState: break if state == finishedState: break if state != finishedState: print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances) sys.exit(1)
Add sleep calls for AWS loops
import boto3 import time def await_volume(client, volumeId, waitingState, finishedState): while True: volumes = client.describe_volumes(VolumeIds=[volumeId]) state = volumes['Volumes'][0]['State'] if state != waitingState: break time.sleep(1) if state != finishedState: print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes) sys.exit(1) def await_instance(client, instanceId, waitingState, finishedState): while True: instances = client.describe_instances(InstanceIds=[instanceId]) state = instances['Reservations'][0]['Instances'][0]['State']['Name'] if waitingState and state != waitingState: break if state == finishedState: break time.sleep(1) if state != finishedState: print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances) sys.exit(1)
import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): sum_view = self.view.window().new_file() sum_view.set_name('Sum') file_text = self.view.substr(sublime.Region(0, self.view.size())) numbers = [] for s in file_text.split(): try: numbers.append(int(s)) except ValueError: try: numbers.append(float(s)) except ValueError: pass result = sum(numbers) sum_view.insert(edit, 0, str(result)) sum_view.set_read_only(True) sum_view.set_scratch(True)
Refactor using functions and a list comprehension
import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): sum_view = self.view.window().new_file() sum_view.set_name('Sum') file_text = self.view.substr(sublime.Region(0, self.view.size())) numbers = [to_number(s) for s in file_text.split() if is_number(s)] result = sum(numbers) sum_view.insert(edit, 0, str(result)) sum_view.set_read_only(True) sum_view.set_scratch(True) def is_int(s): """Return boolean indicating whether a string can be parsed to an int.""" try: int(s) return True except ValueError: return False def is_float(s): """Return boolean indicating whether a string can be parsed to an float.""" try: float(s) return True except ValueError: return False def is_number(s): """Return boolean indicating whether a string can be parsed to an int or float.""" return is_int(s) or is_float(s) def to_number(s): """ Parse and return number from string. Return float only if number is not an int. Assume number can be parsed from string. """ try: return int(s) except ValueError: return float(s)
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, you can obtain one at http://mozilla.org/MPL/2.0/. from io import StringIO import pytest from django.core.management import call_command def test_for_missing_migrations(): output = StringIO() try: call_command( 'makemigrations', interactive=False, dry_run=True, exit_code=True, stdout=output) except SystemExit as exc: # The exit code will be 1 when there are no missing migrations assert exc.code == 1 else: pytest.fail("There are missing migrations:\n %s" % output.getvalue())
Use new command line option for checking if all migrations have been applied.
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, you can obtain one at http://mozilla.org/MPL/2.0/. from io import StringIO import pytest from django.core.management import call_command def test_for_missing_migrations(): output = StringIO() try: call_command( 'makemigrations', interactive=False, check=True, stdout=output, ) except SystemExit as exc: # The exit code will be 0 when there are no missing migrations assert exc.code == 1 pytest.fail("There are missing migrations:\n %s" % output.getvalue())
__version_info__ = (1, 3, 1, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
Update version number for hotfix 1.3.2
__version_info__ = (1, 3, 2, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
# -*- coding: utf-8 -*- # # Copyright 2016 - 2018 Ternaris. # SPDX-License-Identifier: AGPL-3.0-only from __future__ import absolute_import, division, print_function from .auth import auth from .comment import comment from .dataset import dataset from .delete import delete from .tag import tag from .collection import collection, meta from .tooling import api_group as marv_api_group @marv_api_group() def webapi(app): pass # Groups and endpoints are all the same for now webapi.add_endpoint(auth) webapi.add_endpoint(comment) webapi.add_endpoint(dataset) webapi.add_endpoint(delete) webapi.add_endpoint(collection) webapi.add_endpoint(meta) webapi.add_endpoint(tag) from marv_robotics.webapi import robotics webapi.add_endpoint(robotics)
[marv] Support webapi extension via entry points
# -*- coding: utf-8 -*- # # Copyright 2016 - 2018 Ternaris. # SPDX-License-Identifier: AGPL-3.0-only from __future__ import absolute_import, division, print_function from pkg_resources import iter_entry_points from .auth import auth from .comment import comment from .dataset import dataset from .delete import delete from .tag import tag from .collection import collection, meta from .tooling import api_group as marv_api_group @marv_api_group() def webapi(app): pass # Groups and endpoints are all the same for now webapi.add_endpoint(auth) webapi.add_endpoint(comment) webapi.add_endpoint(dataset) webapi.add_endpoint(delete) webapi.add_endpoint(collection) webapi.add_endpoint(meta) webapi.add_endpoint(tag) from marv_robotics.webapi import robotics webapi.add_endpoint(robotics) for ep in iter_entry_points(group='marv_webapi'): endpoint = ep.load() webapi.add_endpoint(endpoint)
#!/usr/bin/env python import redis import os.path import shutil import glob """ ***** CONFIG START ***** """ REDIS_SOCKET = '/tmp/redis.sock' MAILDIR = '/var/mail' """ ***** CONFIG END ***** """ os.chdir(MAILDIR) r = redis.Redis(unix_socket_path=REDIS_SOCKET) filesDepth = glob.glob('*/*') dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth) for dirDepth in dirsDepth: domain, user = dirDepth.split('/') mail = user + '@' + domain exists = r.exists('mail.account:obj:' + mail) if not exists: mdir = MAILDIR + "/" + domain + "/" + user shutil.rmtree(mdir) print '%s,%s' % (mail, mdir)
Rebuild mailbox remove script with some verify checks We should not remove a mailbox and only archive it. Also it should be good to be sure the mailbox doesn't exists in redis anymore after two runs.
#!/usr/bin/env python # Thomas Merkel <tm@core.io> import redis import os import shutil import glob import time """ ***** CONFIG START ***** """ REDIS_SOCKET = '/tmp/redis.sock' MAILDIR = '/var/mail' ARCHIVE = '/var/mail/.archive' RM_FILE = 'core-remove-mailbox' """ ***** CONFIG END ***** """ os.chdir(MAILDIR) r = redis.Redis(unix_socket_path=REDIS_SOCKET) filesDepth = glob.glob('*/*') dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth) today = str(time.time()) for dirDepth in dirsDepth: # get all information domain, user = dirDepth.split('/') mail = user + '@' + domain mdir = MAILDIR + "/" + domain + "/" + user rm_file = mdir + "/" + RM_FILE # check if redis account exists exists = r.exists('mail.account:obj:' + mail) if not exists: if os.path.exists(rm_file): # RM_FILE file exists archive maildir shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today) print 'Archived: %s,%s' % (mail, mdir) else: # create RM_FILE file in maildir open(rm_file, 'a').close() print 'Tagged: %s,%s' % (mail, mdir) else: # remove RM_FILE because account still exists in redis if os.path.exists(rm_file): os.remove(rm_file)
# Only .py files at the top-level of a Sublime package are considered "plugins". # Make Sublime aware of our *{Command,Listener,Handler} classes by importing them: from .src.blame import * # noqa: F401,F403 from .src.blame_all import * # noqa: F401,F403 from .src.blame_inline import * # noqa: F401,F403 from .src.blame_instadiff import * # noqa: F401,F403 def plugin_loaded(): pass def plugin_unloaded(): pass
Stop defining plugin_{,un}loaded functions that aren't doing anything
# Only .py files at the top-level of a Sublime package are considered "plugins". # Make Sublime aware of our *{Command,Listener,Handler} classes by importing them: from .src.blame import * # noqa: F401,F403 from .src.blame_all import * # noqa: F401,F403 from .src.blame_inline import * # noqa: F401,F403 from .src.blame_instadiff import * # noqa: F401,F403 # def plugin_loaded(): # pass # def plugin_unloaded(): # pass
from kismetclient.utils import csv from kismetclient.exceptions import ServerError def kismet(server, version, starttime, servername, dumpfiles, uid): """ Handle server startup string. """ print version, servername, uid def capability(server, CAPABILITY, capabilities): """ Register a server's capability. """ server.capabilities[CAPABILITY] = csv(capabilities) def protocols(server, protocols): """ Enumerate capabilities so they can be registered. """ for protocol in csv(protocols): server.cmd('CAPABILITY', protocol) def ack(server, cmdid, text): """ Handle ack messages in response to commands. """ # Simply remove from the in_progress queue server.in_progress.pop(cmdid) def error(server, cmdid, text): """ Handle error messages in response to commands. """ cmd = server.in_progress.pop(cmdid) raise ServerError(cmd, text) def print_fields(server, **fields): """ A generic handler which prints all the fields. """ for k, v in fields.items(): print '%s: %s' % (k, v) print '-' * 80
Switch first handler arg from "server" to "client".
from kismetclient.utils import csv from kismetclient.exceptions import ServerError def kismet(client, version, starttime, servername, dumpfiles, uid): """ Handle server startup string. """ print version, servername, uid def capability(client, CAPABILITY, capabilities): """ Register a server capability. """ client.capabilities[CAPABILITY] = csv(capabilities) def protocols(client, protocols): """ Enumerate capabilities so they can be registered. """ for protocol in csv(protocols): client.cmd('CAPABILITY', protocol) def ack(client, cmdid, text): """ Handle ack messages in response to commands. """ # Simply remove from the in_progress queue client.in_progress.pop(cmdid) def error(client, cmdid, text): """ Handle error messages in response to commands. """ cmd = client.in_progress.pop(cmdid) raise ServerError(cmd, text) def print_fields(client, **fields): """ A generic handler which prints all the fields. """ for k, v in fields.items(): print '%s: %s' % (k, v) print '-' * 80
import boto3, json sdb = boto3.client('sdb') def lambda_handler(data, context): """ Handler for posting data to SimpleDB. Args: data -- Data to be stored (Dictionary). context -- AWS context for the request (Object). """ if data['Password'] and data['Password'] == 'INSERT PASSWORD': try: for person in ['Sharon', 'Ryan']: old_slice_count = int( sdb.get_attributes( DomainName='pizza', ItemName=person, AttributeNames=['Slices'] )['Attributes'][0]['Value'] ) new_slice_count = old_slice_count + data[person] sdb.put_attributes( DomainName='pizza', ItemName=person, Attributes=[{ 'Name': 'Slices', 'Value': str(new_slice_count), 'Replace': True }] ) return json.dumps({'Success': 'Your data was submitted!'}) except: return json.dumps({'Error': 'Your data was not submitted.'}) else: return json.dumps({ 'Error': 'Ah ah ah, you didn\'t say the magic word.' })
Remove JSON dependency in POST logic.
import boto3 sdb = boto3.client('sdb') def lambda_handler(data, context): """ Handler for posting data to SimpleDB. Args: data -- Data to be stored (Dictionary). context -- AWS context for the request (Object). """ if data['Password'] and data['Password'] == 'INSERT PASSWORD': try: for person in ['Sharon', 'Ryan']: old_slice_count = int( sdb.get_attributes( DomainName='pizza', ItemName=person, AttributeNames=['Slices'] )['Attributes'][0]['Value'] ) new_slice_count = old_slice_count + data[person] sdb.put_attributes( DomainName='pizza', ItemName=person, Attributes=[{ 'Name': 'Slices', 'Value': str(new_slice_count), 'Replace': True }] ) return {'Success': 'Your data was submitted!'} except: return {'Error': 'Your data was not submitted.'} else: return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
# -*- coding: utf-8 -*- from __future__ import division import argparse ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') def scale_data(data): m = min(data) n = (max(data) - m) / (len(ticks) - 1) # if every element is the same height return all lower ticks, else compute # the tick height if n == 0: return [ ticks[0] for t in data] else: return [ ticks[int((t - m) / n)] for t in data ] def print_ansi_spark(d): print ''.join(d) if __name__ == "__main__": parser = argparse.ArgumentParser(description='Process numbers') parser.add_argument('numbers', metavar='N', type=float, nargs='+', help='series of data to plot') args = parser.parse_args() print_ansi_spark(scale_data(args.numbers))
Make ticks passable argument to data
# -*- coding: utf-8 -*- from __future__ import division import argparse ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') def scale_data(data, ticks): m = min(data) n = (max(data) - m) / (len(ticks) - 1) # if every element is the same height return all lower ticks, else compute # the tick height if n == 0: return [ ticks[0] for t in data] else: return [ ticks[int((t - m) / n)] for t in data ] def print_ansi_spark(d): print ''.join(d) if __name__ == "__main__": parser = argparse.ArgumentParser(description='Process numbers') parser.add_argument('numbers', metavar='N', type=float, nargs='+', help='series of data to plot') args = parser.parse_args() print_ansi_spark(scale_data(args.numbers, ansi_ticks))
from django.shortcuts import render from django.http import HttpResponse from .models import User def homepage(request): return render(request, "index.html", {}) def search(request, name): members = User.objects.filter(first_name__icontains=name) or \ User.objects.filter(last_name__icontains=name) or \ User.objects.filter(username__icontains=name) json_data = [dict( id=member.id, full_name=' '.join([member.first_name, member.last_name])) for member in members] return HttpResponse(json_data, mimetype='application/json')
Add view for searching users and return json format
from django.shortcuts import render from django.http import HttpResponse from hackfmi.utils import json_view from .models import User def homepage(request): return render(request, "index.html", {}) @json_view def search(request, name): members = User.objects.filter(first_name__icontains=name) or \ User.objects.filter(last_name__icontains=name) or \ User.objects.filter(username__icontains=name) json_data = [dict( id=member.id, full_name=' '.join([member.first_name, member.last_name])) for member in members] return json_data
import numpy as np from numpy.testing import assert_equal from gala import evaluate as ev def test_contingency_table(): seg = np.array([0, 1, 1, 1, 2, 2, 2, 3]) gt = np.array([1, 1, 1, 2, 2, 2, 2, 0]) ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[]) ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0]) ctd = ct.todense() assert_equal(ctd, np.array([[0. , 0.125, 0. ], [0. , 0.25 , 0.125], [0. , 0. , 0.375], [0.125, 0. , 0. ]])) assert ct.shape == ct0.shape def test_vi(): seg = np.array([1, 2, 3, 4]) gt = np.array([1, 1, 8, 8]) assert_equal(ev.vi(seg, gt), 1) def test_are(): seg = np.eye(3) gt = np.eye(3) seg[1][1] = 0 assert seg.shape == gt.shape
Add in test for ARE
import numpy as np from numpy.testing import assert_equal from gala import evaluate as ev def test_contingency_table(): seg = np.array([0, 1, 1, 1, 2, 2, 2, 3]) gt = np.array([1, 1, 1, 2, 2, 2, 2, 0]) ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[]) ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0]) ctd = ct.todense() assert_equal(ctd, np.array([[0. , 0.125, 0. ], [0. , 0.25 , 0.125], [0. , 0. , 0.375], [0.125, 0. , 0. ]])) assert ct.shape == ct0.shape def test_vi(): seg = np.array([1, 2, 3, 4]) gt = np.array([1, 1, 8, 8]) assert_equal(ev.vi(seg, gt), 1) def test_are(): seg = np.array([[0,1], [1,0]]) gt = np.array([[1,2],[0,1]]) assert_almost_equal(ev.adapted_rand_error(seg,gt),0.081) assert seg.shape == gt.shape
from os.path import dirname, join TEST_ROOT = dirname(__file__) INSTALLED_APPS = ('adminfiles', 'tests', 'django.contrib.contenttypes', 'django.contrib.admin', 'django.contrib.sites', 'django.contrib.auth', 'django.contrib.sessions', 'sorl.thumbnail') DATABASE_ENGINE = 'sqlite3' SITE_ID = 1 MEDIA_URL = '/media/' MEDIA_ROOT = join(TEST_ROOT, 'media') STATIC_URL = '/static/' STATIC_ROOT = MEDIA_ROOT ROOT_URLCONF = 'tests.urls' TEMPLATE_DIRS = (join(TEST_ROOT, 'templates'),)
Fix deprecation warning: DATABASE_* -> DATABASES
from os.path import dirname, join TEST_ROOT = dirname(__file__) INSTALLED_APPS = ('adminfiles', 'tests', 'django.contrib.contenttypes', 'django.contrib.admin', 'django.contrib.sites', 'django.contrib.auth', 'django.contrib.sessions', 'sorl.thumbnail') DATABASES = { "default": { "ENGINE": 'django.db.backends.sqlite3', } } SITE_ID = 1 MEDIA_URL = '/media/' MEDIA_ROOT = join(TEST_ROOT, 'media') STATIC_URL = '/static/' STATIC_ROOT = MEDIA_ROOT ROOT_URLCONF = 'tests.urls' TEMPLATE_DIRS = (join(TEST_ROOT, 'templates'),)
#! /usr/bin/env python import numpy as np def plot_elevation(avulsion): import matplotlib.pyplot as plt z = avulsion.get_value('land_surface__elevation') plt.imshow(z, origin='lower', cmap='terrain') plt.colorbar().ax.set_label('Elevation (m)') plt.show() def main(): import argparse from avulsion_bmi import BmiRiverModule parser = argparse.ArgumentParser('Run the avulsion model') parser.add_argument('file', help='YAML-formatted parameters file') parser.add_argument('--days', type=int, default=0, help='Run model for DAYS') parser.add_argument('--years', type=int, default=0, help='Run model for YEARS') parser.add_argument('--plot', action='store_true', help='Plot final elevations') args = parser.parse_args() np.random.seed(1945) avulsion = BmiRiverModule() avulsion.initialize(args.file) n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step()) for _ in xrange(n_steps): avulsion.update() if args.plot: plot_elevation(avulsion) avulsion.finalize() if __name__ == '__main__': main()
Print final surface elevations to stdout.
#! /usr/bin/env python import sys import numpy as np def plot_elevation(avulsion): import matplotlib.pyplot as plt z = avulsion.get_value('land_surface__elevation') plt.imshow(z, origin='lower', cmap='terrain') plt.colorbar().ax.set_label('Elevation (m)') plt.show() def main(): import argparse from avulsion_bmi import BmiRiverModule parser = argparse.ArgumentParser('Run the avulsion model') parser.add_argument('file', help='YAML-formatted parameters file') parser.add_argument('--days', type=int, default=0, help='Run model for DAYS') parser.add_argument('--years', type=int, default=0, help='Run model for YEARS') parser.add_argument('--plot', action='store_true', help='Plot final elevations') args = parser.parse_args() np.random.seed(1945) avulsion = BmiRiverModule() avulsion.initialize(args.file) n_steps = int((args.days + args.years * 365.) / avulsion.get_time_step()) for _ in xrange(n_steps): avulsion.update() if args.plot: plot_elevation(avulsion) z = avulsion.get_value('land_surface__elevation') np.savetxt(sys.stdout, z) avulsion.finalize() if __name__ == '__main__': main()
from setuptools import setup, find_packages setup( name='tangled.auth', version='0.1a2.dev0', description='Tangled auth integration', long_description=open('README.rst').read(), url='http://tangledframework.org/', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=find_packages(), install_requires=( 'tangled.web>=0.1.dev0', ), extras_require={ 'dev': ( 'tangled[dev]', ), }, entry_points=""" [tangled.scripts] auth = tangled.auth.command:Command """, classifiers=( 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', ), )
Update package configuration, pt. ii
from setuptools import setup setup( name='tangled.auth', version='0.1a2.dev0', description='Tangled auth integration', long_description=open('README.rst').read(), url='http://tangledframework.org/', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled.auth', 'tangled.auth.tests', ], install_requires=[ 'tangled.web>=0.1.dev0', ], extras_require={ 'dev': [ 'tangled[dev]', ], }, entry_points=""" [tangled.scripts] auth = tangled.auth.command:Command """, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', ], )
from setuptools import setup datafiles = [('/etc', ['general_conf/bck.conf'])] setup( name='mysql-autoxtrabackup', version='1.1', packages=['general_conf', 'backup_prepare', 'partial_recovery', 'master_backup_script'], py_modules = ['autoxtrabackup'], url='https://github.com/ShahriyarR/MySQL-AutoXtraBackup', license='GPL', author='Shahriyar Rzayev', author_email='rzayev.shahriyar@yandex.com', description='Commandline tool written in Python 3 for using Percona Xtrabackup', install_requires=[ 'click>=3.3', 'mysql-connector-python>=2.0.2', ], dependency_links = ['https://dev.mysql.com/get/Downloads/Connector-Python/mysql-connector-python-2.1.3.tar.gz'], entry_points=''' [console_scripts] autoxtrabackup=autoxtrabackup:all_procedure ''', data_files = datafiles, )
Update requirements for correct mysql-connector name
from setuptools import setup datafiles = [('/etc', ['general_conf/bck.conf'])] setup( name='mysql-autoxtrabackup', version='1.1', packages=['general_conf', 'backup_prepare', 'partial_recovery', 'master_backup_script'], py_modules = ['autoxtrabackup'], url='https://github.com/ShahriyarR/MySQL-AutoXtraBackup', license='GPL', author='Shahriyar Rzayev', author_email='rzayev.shahriyar@yandex.com', description='Commandline tool written in Python 3 for using Percona Xtrabackup', install_requires=[ 'click>=3.3', 'mysql-connector>=2.0.2', ], dependency_links = ['https://dev.mysql.com/get/Downloads/Connector-Python/mysql-connector-python-2.1.3.tar.gz'], entry_points=''' [console_scripts] autoxtrabackup=autoxtrabackup:all_procedure ''', data_files = datafiles, )
#! /usr/bin/env python from setuptools import setup, Extension pyasmjit_module = Extension( 'pyasmjit.pyasmjit', sources = [ 'pyasmjit/pyasmjit.c' ], ) setup( author = 'Christian Heitman', author_email = 'cnheitman@fundacionsadosky.org.ar', description = 'PyAsmJIT', ext_modules = [ pyasmjit_module ], license = 'BSD 2-Clause', name = 'pyasmjit', version = '0.1', )
Update README. Add global change log file. Update version number.
#! /usr/bin/env python from setuptools import setup, Extension pyasmjit_module = Extension( 'pyasmjit.pyasmjit', sources = [ 'pyasmjit/pyasmjit.c' ], ) setup( author = 'Christian Heitman', author_email = 'cnheitman@fundacionsadosky.org.ar', description = 'PyAsmJIT', ext_modules = [ pyasmjit_module ], license = 'BSD 2-Clause', name = 'pyasmjit', url = 'http://github.com/programa-stic/barf-project', version = '0.2', )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name = "django-dummyimage", version = "0.1.1", description = "Dynamic Dummy Image Generator For Django!", author = "Rolando Espinoza La fuente", author_email = "darkrho@gmail.com", url = "https://github.com/darkrho/django-dummyimage", license = "BSD", packages = find_packages(), zip_safe=False, # because we're including media that Django needs include_package_data = True, install_requires = [ 'Django', 'PIL', ], classifiers=[ 'Programming Language :: Python', 'Framework :: Django', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
Use Pillow instead of PIL
#!/usr/bin/env python from setuptools import setup, find_packages setup( name = "django-dummyimage", version = "0.1.1", description = "Dynamic Dummy Image Generator For Django!", author = "Rolando Espinoza La fuente", author_email = "darkrho@gmail.com", url = "https://github.com/darkrho/django-dummyimage", license = "BSD", packages = find_packages(), zip_safe=False, # because we're including media that Django needs include_package_data = True, install_requires = [ 'Django', 'Pillow', ], classifiers=[ 'Programming Language :: Python', 'Framework :: Django', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ], )
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
Check setup.py desc when packaging
import os from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, 'package': 'invoke', }, 'packaging': { 'sign': True, 'wheel': True, 'check_desc': True, # Because of PyYAML's dual source nonsense =/ 'dual_wheels': True, 'changelog_file': os.path.join( www.configuration()['sphinx']['source'], 'changelog.rst', ), }, })
class PropDict(dict): def __init__(self, srcs): dict.__init__(self) self._sources = srcs def set_source_preference(self, sources): """ Change list of source preference This method has been deprecated and should no longer be used. """ raise DeprecationWarning("This method has been deprecated and should no longer be used. Set the sources list using the 'sources' property.") self._set_sources(sources) def has_key(self, item): try: self.__getitem__(item) return True except KeyError: return False def __contains__(self, item): return self.has_key(item) def __getitem__(self, item): if isinstance(item, basestring): for src in self._sources: if src.endswith('*'): for k,v in self.iteritems(): if k[0].startswith(src[:-1]) and k[1] == item: return v try: t = dict.__getitem__(self, (src, item)) return t except KeyError: pass raise KeyError, item return dict.__getitem__(self, item) def get(self, item, default=None): try: return self[item] except KeyError: return default def _get_sources(self): return self._sources def _set_sources(self, val): if not isinstance(val, list): raise TypeError("Need a list of sources") for i in val: if not isinstance(i, basestring): raise TypeError("Sources need to be strings") self._sources = val sources = property(_get_sources, _set_sources)
BUG(1900): Allow PropDict.sources in python bindings to be any sequence.
class PropDict(dict): def __init__(self, srcs): dict.__init__(self) self._sources = srcs def set_source_preference(self, sources): """ Change list of source preference This method has been deprecated and should no longer be used. """ raise DeprecationWarning("This method has been deprecated and should no longer be used. Set the sources list using the 'sources' property.") self._set_sources(sources) def has_key(self, item): try: self.__getitem__(item) return True except KeyError: return False def __contains__(self, item): return self.has_key(item) def __getitem__(self, item): if isinstance(item, basestring): for src in self._sources: if src.endswith('*'): for k,v in self.iteritems(): if k[0].startswith(src[:-1]) and k[1] == item: return v try: t = dict.__getitem__(self, (src, item)) return t except KeyError: pass raise KeyError, item return dict.__getitem__(self, item) def get(self, item, default=None): try: return self[item] except KeyError: return default def _get_sources(self): return self._sources def _set_sources(self, val): if isinstance(val, basestring): raise TypeError("Need a sequence of sources") for i in val: if not isinstance(i, basestring): raise TypeError("Sources need to be strings") self._sources = val sources = property(_get_sources, _set_sources)
import SimpleCV as scv from SimpleCV import Image import cv2 import time from start_camera import start_camera import threading # camera_thread = threading.Thread(target=start_camera) # camera_thread.start() # from get_images_from_pi import get_image, valid_image # time.sleep(2) # count = 0 # while (count < 50): # get_image(count) # count += 1 # exit() image = Image('images/stop') image.show() image.show() time.sleep(2) reds = image.hueDistance(color=scv.Color.RED) reds.show() reds.show() time.sleep(2) stretch = reds.stretch(20,21) stretch.show() stretch.show() time.sleep(3) # blobs = image.findBlobs() # if blobs: # for blob in blobs: # print "got a blob" # blob.draw(color=(0, 128, 0)) # image.show() # image.show() # time.sleep(4)
Add simple logic to driver code test for easy testing
import SimpleCV as scv from SimpleCV import Image import cv2 import time from start_camera import start_camera import threading def take_50_pictures(): camera_thread = threading.Thread(target=start_camera) camera_thread.start() from get_images_from_pi import get_image, valid_image time.sleep(2) count = 0 while (count < 50): get_image(count) count += 1 def detect_stop_sign(image): reds = image.hueDistance(color=scv.Color.RED) stretch = reds.stretch(20,21) invert = stretch.invert() blobs = invert.findBlobs(minsize=2000) if blobs: for blob in blobs: print blob.area() blob.draw(color=(0, 128, 0)) invert.show() invert.show() time.sleep(3) image = Image('images/0.jpg') x = 0 while (x < 40): image = Image('images/'+ str(x) + '.jpg') detect_stop_sign(image) print x x +=1 exit()
#!/usr/bin/env python """Sets a luxafor flag based on status.""" import luxafor import os import time from slackclient import SlackClient slack_token = os.environ["SLACK_API_TOKEN"] sc = SlackClient(slack_token) API = luxafor.API() while (True): presence = sc.api_call("dnd.info") if presence['snooze_enabled']: API.mode_colour(luxafor.COLOUR_RED) else: API.mode_colour(luxafor.COLOUR_GREEN) time.sleep(1) # make sure we don't flood slack
Switch to use RTM protocol There isn’t an RTM event for ‘stops being DND’ so I’ve added a simple countdown loop, it loses about a second or two, but it works. Happy to have feedback on the loops / logic, feels like it could be sharper.
#!/usr/bin/env python """Sets a luxafor flag based on status.""" import luxafor import os import time from slackclient import SlackClient slack_token = os.environ["SLACK_API_TOKEN"] slack_client = SlackClient(slack_token) lux = luxafor.API() snooze_remaining = -1 # Countdown timer user_id = 'U024G0M2L' if slack_client.rtm_connect(): while True: try: for event in slack_client.rtm_read(): if event['type'] == 'dnd_updated' and event['user'] == user_id: if event['dnd_status']['snooze_enabled'] is True: lux.mode_colour(luxafor.COLOUR_RED) snooze_remaining = event['dnd_status']['snooze_remaining'] else: lux.mode_colour(luxafor.COLOUR_GREEN) except KeyError: pass if snooze_remaining >= 1: snooze_remaining -= 1 if snooze_remaining == 0 or snooze_remaining == -1: lux.mode_colour(luxafor.COLOUR_GREEN) snooze_remaining = -1 time.sleep(1) else: print("Connection Failed, invalid token?")
from django.db import models class DenormalizeManagerMixin(object): def update_cohort(self, cohort, **kwargs): stats, created = self.get_or_create(**kwargs) stats.highest_paid = cohort.order_by('-compensation')[0] stats.lowest_paid = cohort.order_by('compensation')[0] stats.save() class OrganizationStatsManager(models.Manager): use_for_related_manager = True def denormalize(self, obj): Employee = obj._meta.concrete_model use_children = False organization = obj.position.organization # TODO: Allow organization to break and say it is top-level # Example: El Paso County Sheriff's Department instead # of going all the way to El Paso County. if organization.parent: use_children = True organization = organization.parent if use_children: kwargs = { 'parent': None, 'children__members__employee': obj, } else: kwargs = {'members__employee': obj, } cohort = Employee.objects.filter(**kwargs) self.update_cohort(cohort, organization=organization) class PositionStatsManager(models.Manager): use_for_related_manager = True def denormalize(self, obj): Employee = obj._meta.concrete_model position_cohort = Employee.objects.filter( position__organization=obj.position.organization) self.update_cohort(position_cohort, position=obj.position.post)
Fix this code so it works
from django.db import models class DenormalizeManagerMixin(object): def update_cohort(self, cohort, **kwargs): stats, created = self.get_or_create(**kwargs) stats.highest_paid = cohort.order_by('-compensation')[0] stats.lowest_paid = cohort.order_by('compensation')[0] stats.save() class OrganizationStatsManager(DenormalizeManagerMixin, models.Manager): use_for_related_manager = True def denormalize(self, obj): Employee = obj._meta.concrete_model use_children = False organization = obj.position.organization # TODO: Allow organization to break and say it is top-level # Example: El Paso County Sheriff's Department instead # of going all the way to El Paso County. if organization.parent: use_children = True organization = organization.parent if use_children: kwargs = {'position__organization__parent': organization, } else: kwargs = {'position__organization': organization, } cohort = Employee.objects.filter(**kwargs) self.update_cohort(cohort, organization=organization) class PositionStatsManager(DenormalizeManagerMixin, models.Manager): use_for_related_manager = True def denormalize(self, obj): Employee = obj._meta.concrete_model position_cohort = Employee.objects.filter( position__organization=obj.position.organization) self.update_cohort(position_cohort, position=obj.position.post)
import os import unittest class TestCython(unittest.TestCase): pass def add_test(name, runner, target): setattr(TestCython, "test_" + name, lambda s: runner(s, target)) try: import Cython import glob import sys targets = glob.glob(os.path.join(os.path.dirname(__file__), "cython", "setup_*.py")) sys.path.append(os.path.join(os.path.dirname(__file__), "cython")) for target in targets: def runner(self, target): cwd = os.getcwd() try: os.chdir(os.path.dirname(target)) exec(open(os.path.basename(target)).read()) except: raise finally: os.chdir(cwd) name, _ = os.path.splitext(os.path.basename(target)) add_test(name, runner, target) except ImportError: pass
Disable Cython test until Cython support new pythonic layout
import os import unittest class TestCython(unittest.TestCase): pass # Needs to wait unil cython supports pythran new builtins naming #def add_test(name, runner, target): # setattr(TestCython, "test_" + name, lambda s: runner(s, target)) # #try: # import Cython # import glob # import sys # targets = glob.glob(os.path.join(os.path.dirname(__file__), "cython", "setup_*.py")) # sys.path.append(os.path.join(os.path.dirname(__file__), "cython")) # # for target in targets: # def runner(self, target): # cwd = os.getcwd() # try: # os.chdir(os.path.dirname(target)) # exec(open(os.path.basename(target)).read()) # except: # raise # finally: # os.chdir(cwd) # name, _ = os.path.splitext(os.path.basename(target)) # add_test(name, runner, target) # # #except ImportError: # pass
#!/usr/bin/env python import sys from setuptools import setup from setuptools.command.test import test as TestCommand import nacl try: import nacl.nacl except ImportError: # installing - there is no cffi yet ext_modules = [] else: # building bdist - cffi is here! ext_modules = [nacl.nacl.ffi.verifier.get_extension()] class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errno = pytest.main(self.test_args) sys.exit(errno) setup( name=nacl.__title__, version=nacl.__version__, description=nacl.__summary__, long_description=open("README.rst").read(), url=nacl.__uri__, license=nacl.__license__, author=nacl.__author__, author_email=nacl.__email__, install_requires=[ "cffi", ], extras_require={ "tests": ["pytest"], }, tests_require=["pytest"], packages=[ "nacl", "nacl.invoke", ], ext_package="nacl", ext_modules=ext_modules, zip_safe=False, cmdclass={"test": PyTest}, )
Add clasifiers for the Python implementations and versions
#!/usr/bin/env python import sys from setuptools import setup from setuptools.command.test import test as TestCommand import nacl try: import nacl.nacl except ImportError: # installing - there is no cffi yet ext_modules = [] else: # building bdist - cffi is here! ext_modules = [nacl.nacl.ffi.verifier.get_extension()] class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errno = pytest.main(self.test_args) sys.exit(errno) setup( name=nacl.__title__, version=nacl.__version__, description=nacl.__summary__, long_description=open("README.rst").read(), url=nacl.__uri__, license=nacl.__license__, author=nacl.__author__, author_email=nacl.__email__, install_requires=[ "cffi", ], extras_require={ "tests": ["pytest"], }, tests_require=["pytest"], packages=[ "nacl", "nacl.invoke", ], ext_package="nacl", ext_modules=ext_modules, zip_safe=False, cmdclass={"test": PyTest}, classifiers=[ "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", ] )
#!/usr/bin/env python from setuptools import setup, find_packages with open('VERSION') as version_stream: version = version_stream.read().strip() setup( name='PlayerPiano', version=version, description='Amazes your friends by running Python doctests in a fake interactive shell.', author='Peter Fein', author_email='pete@wearpants.org', url='https://github.com/wearpants/playerpiano', entry_points = { 'console_scripts': [ 'playerpiano=playerpiano.piano:main', 'recorderpiano=playerpiano.recorder:main', ]}, packages=find_packages(), include_package_data = True, install_requires=['pygments'], license="BSD", long_description=open("README.rst").read(), classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: POSIX", "Topic :: Education :: Computer Aided Instruction (CAI)", "Topic :: System :: Shells", ], )
Use open context to load readme (per best practices)
#!/usr/bin/env python from setuptools import setup, find_packages with open('VERSION') as version_stream: version = version_stream.read().strip() with open('README.rst') as readme_stream: readme = readme_stream.read() setup( name='PlayerPiano', version=version, description='Amazes your friends by running Python doctests in a fake interactive shell.', author='Peter Fein', author_email='pete@wearpants.org', url='https://github.com/wearpants/playerpiano', entry_points = { 'console_scripts': [ 'playerpiano=playerpiano.piano:main', 'recorderpiano=playerpiano.recorder:main', ]}, packages=find_packages(), include_package_data = True, install_requires=['pygments'], license="BSD", long_description=readme, classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: POSIX", "Topic :: Education :: Computer Aided Instruction (CAI)", "Topic :: System :: Shells", ], )
from setuptools import setup setup( name='aufmachen', version='0.1-dev', url='http://github.com/fdb/aufmachen', license='BSD', author='Frederik & Jan De Bleser', description='Turns a website\'s HTML into nice, clean objects.', packages=['aufmachen', 'aufmachen.websites'], package_data = {'aufmachen': ['*.js']}, platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Change version number, add author email.
from setuptools import setup setup( name='aufmachen', version='0.2.1', url='http://github.com/fdb/aufmachen', license='BSD', author='Frederik & Jan De Bleser', author_email='frederik@burocrazy.com', description='Turns a website\'s HTML into nice, clean objects.', packages=['aufmachen', 'aufmachen.websites'], package_data = {'aufmachen': ['*.js']}, platforms='any', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
#!/usr/bin/python # -*- coding: utf-8 -*- import json import os import sys import argparse import tmux_wrapper as tmux __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def load_session_presets(): try: file_path = os.environ["TM_SESSIONS"] except KeyError: return None try: with open(file_path) as f: config = json.load(f) except IOError: print("Invalid TM_SESSIONS environmental variable: cannot open file {}".format(file_path)) def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() if len(argv) == 0: parser.print_help() if args.kill: try: tmux.kill(args.kill) except tmux.ServerConnectionError, e: print(e.description) elif args.list: print tmux.list() elif args.session: tmux.create_or_attach(args.session) if __name__ == "__main__": main(sys.argv[1:])
Check for all exceptions to main method
#!/usr/bin/python # -*- coding: utf-8 -*- import json import os import sys import argparse import tmux_wrapper as tmux __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def load_session_presets(): try: file_path = os.environ["TM_SESSIONS"] except KeyError: return None try: with open(file_path) as f: config = json.load(f) except IOError: print("Invalid TM_SESSIONS environmental variable: cannot open file {}".format(file_path)) def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() if len(argv) == 0: parser.print_help() if args.kill: try: tmux.kill(args.kill) except (tmux.ServerConnectionError, tmux.SessionDoesNotExist), e: print(e.description) elif args.list: try: print tmux.list() except tmux.ServerConnectionError, e: print(e.description) elif args.session: tmux.create_or_attach(args.session) if __name__ == "__main__": main(sys.argv[1:])
# coding: utf-8 import sys, getopt import os import json import datetime from subprocess import call current_date = str(datetime.datetime.now()).replace(' ', '!') output_file_name = './tmp_itorch_exec-'+current_date+'.lua' if __name__ == "__main__": if len(sys.argv) > 0: input_file = open(sys.argv[1], 'r') with input_file as json_file: json_data = json.load(json_file) input_file.close() sources = [] for item in json_data['cells']: if item['cell_type'] == 'code': sources = sources + item['source'] output_file = open(output_file_name, 'w') output_file.writelines(sources) output_file.close() call(["th", output_file_name]) os.remove(output_file_name)
Fix missing new line symbol for last line in notebook cell
# coding: utf-8 import sys, getopt import os import json import datetime from subprocess import call current_date = str(datetime.datetime.now()).replace(' ', '!') output_file_name = './tmp_itorch_exec-'+current_date+'.lua' if __name__ == "__main__": if len(sys.argv) > 0: input_file = open(sys.argv[1], 'r') with input_file as json_file: json_data = json.load(json_file) input_file.close() sources = [] for item in json_data['cells']: if item['cell_type'] == 'code' and len(item['source']) > 0: item['source'][-1] = item['source'][-1]+'\n' sources = sources + item['source'] output_file = open(output_file_name, 'w') output_file.writelines(sources) output_file.close() call(["th", output_file_name]) os.remove(output_file_name)
#!/usr/bin/env python from setuptools import setup, find_packages # Get version string with open('gdx2py/version.py') as f: exec(f.read()) setup(name='GDX2py', version=__version__, author='Erkka Rinne', author_email='erkka.rinne@vtt.fi', description='Read and write GAMS Data eXchange (GDX) files using Python', python_requires='>=3.6', install_requires=[ 'gdxcc>=7', ], setup_requires=['pytest-runner'], tests_require=['pytest', 'pytest-datadir'], url='https://github.com/ererkka/GDX2py', packages=find_packages(), )
Exclude tests from built distribution
#!/usr/bin/env python from setuptools import setup, find_packages # Get version string with open('gdx2py/version.py') as f: exec(f.read()) setup(name='GDX2py', version=__version__, author='Erkka Rinne', author_email='erkka.rinne@vtt.fi', description='Read and write GAMS Data eXchange (GDX) files using Python', python_requires='>=3.6', install_requires=[ 'gdxcc>=7', ], setup_requires=['pytest-runner'], tests_require=['pytest', 'pytest-datadir'], url='https://github.com/ererkka/GDX2py', packages=find_packages(exclude=['tests']), )
from setuptools import setup setup( name='pytest-ui', description='Text User Interface for running python tests', version='0.1', license='MIT', platforms=['linux', 'osx', 'win32'], packages=['pytui'], url='https://github.com/martinsmid/pytest-ui', author_email='martin.smid@gmail.com', author='Martin Smid', entry_points={ 'pytest11': [ 'pytui = pytui.runner', ] }, install_requires=['urwid>=1.3.1,pytest>=3.0.5'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Utilities', 'Programming Language :: Python', ], )
Split requires into own string
from setuptools import setup setup( name='pytest-ui', description='Text User Interface for running python tests', version='0.1', license='MIT', platforms=['linux', 'osx', 'win32'], packages=['pytui'], url='https://github.com/martinsmid/pytest-ui', author_email='martin.smid@gmail.com', author='Martin Smid', entry_points={ 'pytest11': [ 'pytui = pytui.runner', ] }, install_requires=['urwid>=1.3.1', 'pytest>=3.0.5'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Utilities', 'Programming Language :: Python', ], )
from setuptools import setup from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md')) as f: jgo_long_description = f.read() setup( name='jgo', version='0.2.0', author='Philipp Hanslovsky, Curtis Rueden', author_email='hanslovskyp@janelia.hhmi.org', description='Launch Java code from Python and the CLI, installation-free.', long_description=jgo_long_description, long_description_content_type='text/markdown', license='Public domain', url='https://github.com/scijava/jgo', packages=['jgo'], entry_points={ 'console_scripts': [ 'jgo=jgo.jgo:jgo_main' ] }, python_requires='>=3', )
Bump to next development cycle
from setuptools import setup from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md')) as f: jgo_long_description = f.read() setup( name='jgo', version='0.2.1.dev0', author='Philipp Hanslovsky, Curtis Rueden', author_email='hanslovskyp@janelia.hhmi.org', description='Launch Java code from Python and the CLI, installation-free.', long_description=jgo_long_description, long_description_content_type='text/markdown', license='Public domain', url='https://github.com/scijava/jgo', packages=['jgo'], entry_points={ 'console_scripts': [ 'jgo=jgo.jgo:jgo_main' ] }, python_requires='>=3', )
# coding: utf-8 from setuptools import setup, find_packages setup( name = 'thumbor_aws', version = "1", description = 'Thumbor AWS extensions', author = 'William King', author_email = 'willtrking@gmail.com', zip_safe = False, include_package_data = True, packages=find_packages(), install_requires=['py-dateutil','thumbor','boto'] )
Use python-dateutil instead of py-dateutil
# coding: utf-8 from setuptools import setup, find_packages setup( name = 'thumbor_aws', version = "1", description = 'Thumbor AWS extensions', author = 'William King', author_email = 'willtrking@gmail.com', zip_safe = False, include_package_data = True, packages=find_packages(), install_requires=['python-dateutil','thumbor','boto'] )
from setuptools import setup setup( name='tangled.website', version='0.1.dev0', description='tangledframework.org', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled.website/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.website', ], include_package_data=True, install_requires=[ 'tangled.auth>=0.1a3', 'tangled.session>=0.1a2', 'tangled.site>=0.1a2', 'SQLAlchemy>=0.9.7', ], extras_require={ 'dev': ['coverage'], }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Upgrade SQLAlchemy 0.9.7 => 1.1.6
from setuptools import setup setup( name='tangled.website', version='0.1.dev0', description='tangledframework.org', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled.website/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.website', ], include_package_data=True, install_requires=[ 'tangled.auth>=0.1a3', 'tangled.session>=0.1a2', 'tangled.site>=0.1a2', 'SQLAlchemy>=1.1.6', ], extras_require={ 'dev': ['coverage'], }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
#!/usr/bin/env python from setuptools import setup setup(name='tap-awin', version='0.0.3', description='Singer.io tap for extracting data from the Affiliate Window API', author='Onedox', url='https://github.com/onedox/tap-awin', classifiers=['Programming Language :: Python :: 3 :: Only'], py_modules=['tap_awin'], install_requires=[ 'zeep>=1.4.1', 'singer-python>=3.6.3', 'tzlocal>=1.3', ], entry_points=''' [console_scripts] tap-awin=tap_awin:main ''', packages=['tap_awin'], package_data = { 'tap_awin/schemas': [ "transactions.json", "merchants.json", ], }, include_package_data=True, )
Prepare for release of 0.0.4
#!/usr/bin/env python from setuptools import setup setup(name='tap-awin', version='0.0.4', description='Singer.io tap for extracting data from the Affiliate Window API', author='Onedox', url='https://github.com/onedox/tap-awin', classifiers=['Programming Language :: Python :: 3 :: Only'], py_modules=['tap_awin'], install_requires=[ 'zeep>=1.4.1', 'singer-python>=3.6.3', 'tzlocal>=1.3', ], entry_points=''' [console_scripts] tap-awin=tap_awin:main ''', packages=['tap_awin'], package_data = { 'tap_awin/schemas': [ "transactions.json", "merchants.json", ], }, include_package_data=True, )
from ado.version import ADO_VERSION from setuptools import setup, find_packages setup( name='ado', author='Ana Nelson', packages=find_packages(), version=ADO_VERSION, install_requires = [ 'python-modargs>=1.7', 'Markdown', # for reports 'dexy>=0.9.9' ], entry_points = { 'console_scripts' : [ 'ado = ado.commands:run' ] } )
Set include package data to true.
from ado.version import ADO_VERSION from setuptools import setup, find_packages setup( name='ado', author='Ana Nelson', packages=find_packages(), version=ADO_VERSION, include_package_data = True, install_requires = [ 'python-modargs>=1.7', 'Markdown', # for reports 'dexy>=0.9.9' ], entry_points = { 'console_scripts' : [ 'ado = ado.commands:run' ] } )
import sys from setuptools import find_packages, setup with open('VERSION') as version_fp: VERSION = version_fp.read().strip() install_requires = [ 'django-local-settings>=1.0a13', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='consultants@pdx.edu', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'ldap': [ 'certifi>=2015.11.20.1', 'ldap3>=1.0.4', ], 'dev': [ 'django>=1.7,<1.9', 'djangorestframework>3.3', 'flake8', 'ldap3', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Upgrade certifi 2015.11.20.1 => 2016.2.28
import sys from setuptools import find_packages, setup with open('VERSION') as version_fp: VERSION = version_fp.read().strip() install_requires = [ 'django-local-settings>=1.0a13', 'stashward', ] if sys.version_info[:2] < (3, 4): install_requires.append('enum34') setup( name='django-arcutils', version=VERSION, url='https://github.com/PSU-OIT-ARC/django-arcutils', author='PSU - OIT - ARC', author_email='consultants@pdx.edu', description='Common utilities used in ARC Django projects', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=install_requires, extras_require={ 'ldap': [ 'certifi>=2016.2.28', 'ldap3>=1.0.4', ], 'dev': [ 'django>=1.7,<1.9', 'djangorestframework>3.3', 'flake8', 'ldap3', ], }, entry_points=""" [console_scripts] arcutils = arcutils.__main__:main """, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
from distutils.core import setup setup(name='pandocfilters', version='1.0', description='Utilities for writing pandoc filters in python', author='John MacFarlane', author_email='fiddlosopher@gmail.com', url='http://github.com/jgm/pandocfilters', py_modules=['pandocfilters'], keywords=['pandoc'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Text Processing :: Filters' ], )
INclude README as long description.
from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='pandocfilters', version='1.0', description='Utilities for writing pandoc filters in python', long_description=read('README.rst'), author='John MacFarlane', author_email='fiddlosopher@gmail.com', url='http://github.com/jgm/pandocfilters', py_modules=['pandocfilters'], keywords=['pandoc'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Text Processing :: Filters' ], )
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='python-evrythng', version='0.1', packages=['evrythng', 'evrythng.entities'], package_dir={'': 'src'}, url='https://github.com/GooeeIOT/python-evrythng', license='MIT', author='Lyle Scott, III', author_email='lyle@digitalfoo.net', description='A Python wrapper about the Evrythng REST API.' )
Add requests as a requirement package.
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='python-evrythng', version='0.2', packages=['evrythng', 'evrythng.entities'], package_dir={'': 'src'}, url='https://github.com/GooeeIOT/python-evrythng', license='MIT', author='Gooee, Inc', author_email='lyle@gooee.com', description='A Python wrapper about the Evrythng REST API.', install_requires=[ 'requests', ], )
# Project tasks (for use with invoke task runner) import subprocess from invoke import task @task def test(cover=False): # Run tests using nose called with coverage code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose']) # Also generate coverage reports when --cover flag is given if cover and code == 0: # Add blank line between test report and coverage report print('') subprocess.call(['coverage', 'report']) subprocess.call(['coverage', 'html'])
Improve performance of test task
# Project tasks (for use with invoke task runner) import subprocess from invoke import task @task def test(cover=False): if cover: # Run tests via coverage and generate reports if --cover flag is given code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose']) # Only show coverage report if all tests have passed if code == 0: # Add blank line between test report and coverage report print('') subprocess.call(['coverage', 'report']) subprocess.call(['coverage', 'html']) else: # Otherwise, run tests via nose (which is faster) code = subprocess.call(['nosetests', '--rednose'])
import time class Logger(): def __init__(self, name = "defaultLogFile"): timestamp = time.strftime('%Y_%m_%d-%H_%M_%S') self.name = "Logs/" + timestamp + "_" + name + ".txt" try: self.logfile = open(self.name, 'w') self.opened = True except: self.opened = False def save_line(self,data): time_s = time.time() time_ms = int((time_s - int(time_s))*1000.0) timestamp = time.strftime(('%H_%M_%S'), time.localtime(time_s))+"_" +str(time_ms) + " : " if(self.opened): self.logfile.write(timestamp+data) self.logfile.flush() return 0,"" else: return 1,str(timestamp+data) def close(self): if(self.opened): self.logfile.flush() self.logfile.close() self.opened = False return 0 else: return 1
Add missing CRLF for new line in logs Signed-off-by: TeaPackCZ <a78d8486eff6e2cb08b2d9907449b92187b8e215@gmail.com>
import time class Logger(): def __init__(self, name = "defaultLogFile"): timestamp = time.strftime('%Y_%m_%d-%H_%M_%S') self.name = "Logs/" + timestamp + "_" + name + ".txt" try: self.logfile = open(self.name, 'w') self.opened = True except: self.opened = False def save_line(self,data): time_s = time.time() time_ms = int((time_s - int(time_s))*1000.0) timestamp = time.strftime(('%H_%M_%S'), time.localtime(time_s))+"_" +str(time_ms) + " : " if(self.opened): self.logfile.write(timestamp+data+"\r\n") self.logfile.flush() return 0,"" else: return 1,str(timestamp+data) def close(self): if(self.opened): self.logfile.flush() self.logfile.close() self.opened = False return 0 else: return 1
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token)
Return profile from authenticate method
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) return profile
from .base import * # noqa DEBUG = True INTERNAL_IPS = INTERNAL_IPS + ('', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'app_kdl_dev', 'USER': 'app_kdl', 'PASSWORD': '', 'HOST': '' }, } LOGGING_LEVEL = logging.DEBUG LOGGING['loggers']['kdl']['level'] = LOGGING_LEVEL TEMPLATES[0]['OPTIONS']['debug'] = True # ----------------------------------------------------------------------------- # Django Extensions # http://django-extensions.readthedocs.org/en/latest/ # ----------------------------------------------------------------------------- try: import django_extensions # noqa INSTALLED_APPS = INSTALLED_APPS + ('django_extensions',) except ImportError: pass # ----------------------------------------------------------------------------- # Django Debug Toolbar # http://django-debug-toolbar.readthedocs.org/en/latest/ # ----------------------------------------------------------------------------- try: import debug_toolbar # noqa INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',) MIDDLEWARE_CLASSES = MIDDLEWARE_CLASSES + ( 'debug_toolbar.middleware.DebugToolbarMiddleware',) DEBUG_TOOLBAR_PATCH_SETTINGS = True except ImportError: pass # ----------------------------------------------------------------------------- # Local settings # ----------------------------------------------------------------------------- try: from .local import * # noqa except ImportError: print('dev, failed to import local settings') from .test import * # noqa print('the project is running with test settings') print('please create a local settings file')
Set require debug to be the same as debug.
from .base import * # noqa DEBUG = True REQUIRE_DEBUG = DEBUG INTERNAL_IPS = INTERNAL_IPS + ('', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'app_kdl_dev', 'USER': 'app_kdl', 'PASSWORD': '', 'HOST': '' }, } LOGGING_LEVEL = logging.DEBUG LOGGING['loggers']['kdl']['level'] = LOGGING_LEVEL TEMPLATES[0]['OPTIONS']['debug'] = True # ----------------------------------------------------------------------------- # Django Extensions # http://django-extensions.readthedocs.org/en/latest/ # ----------------------------------------------------------------------------- try: import django_extensions # noqa INSTALLED_APPS = INSTALLED_APPS + ('django_extensions',) except ImportError: pass # ----------------------------------------------------------------------------- # Django Debug Toolbar # http://django-debug-toolbar.readthedocs.org/en/latest/ # ----------------------------------------------------------------------------- try: import debug_toolbar # noqa INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',) MIDDLEWARE_CLASSES = MIDDLEWARE_CLASSES + ( 'debug_toolbar.middleware.DebugToolbarMiddleware',) DEBUG_TOOLBAR_PATCH_SETTINGS = True except ImportError: pass # ----------------------------------------------------------------------------- # Local settings # ----------------------------------------------------------------------------- try: from .local import * # noqa except ImportError: print('dev, failed to import local settings') from .test import * # noqa print('the project is running with test settings') print('please create a local settings file')
from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
Add `Host` to `pyinfra.api` imports.
from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .host import Host # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
# -*- coding: utf-8 -*- from datetime import datetime from django.conf import settings from django.http import HttpResponse, Http404 from django.template import RequestContext from django.shortcuts import render_to_response, get_object_or_404 from Instanssi.tickets.models import Ticket from Instanssi.store.models import StoreTransaction # Logging related import logging logger = logging.getLogger(__name__) # Shows information about a single ticket def ticket(request, ticket_key): # Find ticket ticket = get_object_or_404(Ticket, key=ticket_key) # Render ticket return render_to_response('tickets/ticket.html', { 'ticket': ticket, }, context_instance=RequestContext(request)) # Lists all tickets def tickets(request, transaction_key): # Get transaction transaction = get_object_or_404(StoreTransaction, key=transaction_key) # Get all tickets by this transaction tickets = Ticket.objects.filter(transaction=transaction) # Render tickets return render_to_response('tickets/tickets.html', { 'transaction': transaction, 'tickets': tickets, }, context_instance=RequestContext(request))
tickets: Make sure ticket is paid before it can be viewed
# -*- coding: utf-8 -*- from datetime import datetime from django.conf import settings from django.http import HttpResponse, Http404 from django.template import RequestContext from django.shortcuts import render_to_response, get_object_or_404 from Instanssi.tickets.models import Ticket from Instanssi.store.models import StoreTransaction # Logging related import logging logger = logging.getLogger(__name__) # Shows information about a single ticket def ticket(request, ticket_key): # Find ticket ticket = get_object_or_404(Ticket, key=ticket_key) # Render ticket return render_to_response('tickets/ticket.html', { 'ticket': ticket, }, context_instance=RequestContext(request)) # Lists all tickets def tickets(request, transaction_key): # Get transaction transaction = get_object_or_404(StoreTransaction, key=transaction_key) if not transaction.paid: raise Http404 # Get all tickets by this transaction tickets = Ticket.objects.filter(transaction=transaction) # Render tickets return render_to_response('tickets/tickets.html', { 'transaction': transaction, 'tickets': tickets, }, context_instance=RequestContext(request))
#!/usr/bin/env python """Distutils setup file, used to install or test 'setuptools'""" from setuptools import setup, find_packages, Require from distutils.version import LooseVersion setup( name="setuptools", version="0.3a1", description="Distutils enhancements", author="Phillip J. Eby", author_email="peak@eby-sarna.com", license="PSF or ZPL", test_suite = 'setuptools.tests.test_suite', requires = [ Require('Distutils','1.0.3','distutils', "http://www.python.org/sigs/distutils-sig/" ), Require('PyUnit', None, 'unittest', "http://pyunit.sf.net/"), ], packages = find_packages(), py_modules = ['pkg_resources', 'easy_install'], scripts = ['easy_install.py'] )
Bump version to 0.3a2 for release --HG-- branch : setuptools extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041026
#!/usr/bin/env python """Distutils setup file, used to install or test 'setuptools'""" from setuptools import setup, find_packages, Require from distutils.version import LooseVersion setup( name="setuptools", version="0.3a2", description="Distutils enhancements", author="Phillip J. Eby", author_email="peak@eby-sarna.com", license="PSF or ZPL", test_suite = 'setuptools.tests.test_suite', requires = [ Require('Distutils','1.0.3','distutils', "http://www.python.org/sigs/distutils-sig/" ), Require('PyUnit', None, 'unittest', "http://pyunit.sf.net/"), ], packages = find_packages(), py_modules = ['pkg_resources', 'easy_install'], scripts = ['easy_install.py'] )
from distutils.core import setup # If sphinx is installed, enable the command try: from sphinx.setup_command import BuildDoc cmdclass = {'build_sphinx': BuildDoc} command_options = { 'build_sphinx': { 'version': ('setup.py', version), 'release': ('setup.py', version), } } except ImportError: cmdclass = {} command_options = {} version = '1.0.2' setup(name='simplemediawiki', version=version, description='Extremely low-level wrapper to the MediaWiki API', author='Ian Weller', author_email='iweller@redhat.com', url='https://github.com/ianweller/python-simplemediawiki', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Library or Lesser General Public ' 'License (LGPL)', ], requires=[ 'kitchen', 'simplejson', ], py_modules=['simplemediawiki'], cmdclass=cmdclass, command_options=command_options)
Make sphinx building actually work
from distutils.core import setup version = '1.0.2' # If sphinx is installed, enable the command try: from sphinx.setup_command import BuildDoc cmdclass = {'build_sphinx': BuildDoc} command_options = { 'build_sphinx': { 'version': ('setup.py', version), 'release': ('setup.py', version), } } except ImportError: cmdclass = {} command_options = {} setup(name='simplemediawiki', version=version, description='Extremely low-level wrapper to the MediaWiki API', author='Ian Weller', author_email='iweller@redhat.com', url='https://github.com/ianweller/python-simplemediawiki', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Library or Lesser General Public ' 'License (LGPL)', ], requires=[ 'kitchen', 'simplejson', ], py_modules=['simplemediawiki'], cmdclass=cmdclass, command_options=command_options)
from distutils.core import setup setup( name='BitstampClient', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='kamil.madac@gmail.com', description='Bitstamp API python implementation', requires=['requests'] )
Rename because of clash with original package.
from distutils.core import setup setup( name='bitstamp-python-client', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='kamil.madac@gmail.com', description='Bitstamp API python implementation', requires=['requests'] )
#!/usr/bin/env python from setuptools import setup __version__ = '0.1.0b1' setup( name='highlander-one', version=__version__, author='Christopher T. Cannon', author_email='christophertcannon@gmail.com', description='A simple decorator to ensure that your ' 'program is only running once on a system.', url='https://github.com/chriscannon/highlander', install_requires=[ 'funcy>=1.4', 'psutil>=2.2.1' ], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], download_url='https://github.com/chriscannon/highlander/tarball/{0}'.format(__version__), packages=['highlander'], test_suite='tests.highlander_tests.get_suite', )
Make it not a pre-release so that it can be installed from pip.
#!/usr/bin/env python from setuptools import setup __version__ = '0.1.0' setup( name='highlander-one', version=__version__, author='Christopher T. Cannon', author_email='christophertcannon@gmail.com', description='A simple decorator to ensure that your ' 'program is only running once on a system.', url='https://github.com/chriscannon/highlander', install_requires=[ 'funcy>=1.4', 'psutil>=2.2.1' ], license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], download_url='https://github.com/chriscannon/highlander/tarball/{0}'.format(__version__), packages=['highlander'], test_suite='tests.highlander_tests.get_suite', )
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages version = '0.0.0' try: import bugbuzz version = bugbuzz.__version__ except ImportError: pass tests_require = [ 'mock', 'pytest', 'pytest-cov', 'pytest-xdist', 'pytest-capturelog', 'pytest-mock', ] setup( name='bugbuzz', author='Victor Lin', author_email='hello@victorlin.me', url='https://github.com/victorlin/bugbuzz-python', description='Easy to use web-base online debugger', keywords='debugger debug pdb', license='MIT', version=version, packages=find_packages(), install_requires=[ 'pycrypto', ], extras_require=dict( tests=tests_require, ), tests_require=tests_require, )
Fix requests ca cert file missing bug
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages version = '0.0.0' try: import bugbuzz version = bugbuzz.__version__ except ImportError: pass tests_require = [ 'mock', 'pytest', 'pytest-cov', 'pytest-xdist', 'pytest-capturelog', 'pytest-mock', ] setup( name='bugbuzz', author='Victor Lin', author_email='hello@victorlin.me', url='https://github.com/victorlin/bugbuzz-python', description='Easy to use web-base online debugger', keywords='debugger debug pdb', license='MIT', version=version, packages=find_packages(), package_data={'': ['LICENSE'], 'bugbuzz/packages/requests': ['*.pem']}, include_package_data=True, zip_safe=False, install_requires=[ 'pycrypto', ], extras_require=dict( tests=tests_require, ), tests_require=tests_require, )
from django.contrib import admin from .models import Match from .models import Tip def delete_tips(modeladmin, request, queryset): for match in queryset: tips = Tip.object.filter(match = match) for tip in tips: tip.score = 0 tip.scoring_field = "" tip.is_score_calculated = False delete_tips.delete_tips = "Delete calculated scores for tips for these matches" class MatchAdmin(admin.ModelAdmin): actions = [delete_tips] admin.site.register(Match, MatchAdmin) admin.site.register(Tip)
Add action to zero out tips for given match
from django.contrib import admin from .models import Match from .models import Tip def delete_tips(modeladmin, request, queryset): for match in queryset: tips = Tip.objects.filter(match = match) for tip in tips: tip.score = 0 tip.scoring_field = "" tip.is_score_calculated = False delete_tips.delete_tips = "Delete calculated scores for tips for these matches" class MatchAdmin(admin.ModelAdmin): actions = [delete_tips] admin.site.register(Match, MatchAdmin) admin.site.register(Tip)
from django import forms from django.conf import settings from moviealert.base.widgets import CalendarWidget from .models import TaskList, RegionData class MovieForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MovieForm, self).__init__(*args, **kwargs) self.fields['movie_date'] = forms.DateField( widget=CalendarWidget(attrs={"readonly": "readonly", "style": "background:white;"}), input_formats=settings.ALLOWED_DATE_FORMAT) self.fields["city"] = forms.CharField( widget=forms.TextInput(attrs={"id": "txtSearch"})) self.fields["city"].label = "City Name" def clean(self): cleaned_data = super(MovieForm, self).clean() cleaned_data['city'] = RegionData.objects.get( bms_city=cleaned_data['city']) class Meta: model = TaskList exclude = ("username", "task_completed", "notified", "movie_found",)
Disable entry of cities outside of database. Add a try-except to prevent entry of city name which is not in the database.
from django import forms from django.conf import settings from moviealert.base.widgets import CalendarWidget from .models import TaskList, RegionData class MovieForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MovieForm, self).__init__(*args, **kwargs) self.fields['movie_date'] = forms.DateField( widget=CalendarWidget(attrs={"readonly": "readonly", "style": "background:white;"}), input_formats=settings.ALLOWED_DATE_FORMAT) self.fields["city"] = forms.CharField( widget=forms.TextInput(attrs={"id": "txtSearch"})) self.fields["city"].label = "City Name" def clean(self): cleaned_data = super(MovieForm, self).clean() try: cleaned_data['city'] = RegionData.objects.get( bms_city=cleaned_data['city']) except RegionData.DoesNotExist: self.add_error("city", "Select only values from autocomplete!") class Meta: model = TaskList exclude = ("username", "task_completed", "notified", "movie_found",)
# -*- coding: utf-8 -*- from __future__ import absolute_import from . import api from . import core from . import decoder from . import analyzer from . import grapher from . import encoder __version__ = '0.5.5' __all__ = ['api', 'core', 'decoder', 'analyzer', 'grapher', 'encoder'] def _discover_modules(): import sys import pkgutil import importlib #pkg_path = os.path.abspath() #__import__(pkg) proc_modules = ['decoder', 'analyzer', 'encoder', 'grapher'] for module in proc_modules: pkg = '.'.join([__name__, module]) importlib.import_module(pkg) package = sys.modules[pkg] prefix = pkg + "." for importer, modname, ispkg in pkgutil.walk_packages(package.__path__, prefix): try: importlib.import_module(modname) #__import__(modname) except ImportError as e: if e.message.find('yaafelib'): print 'No Yaafe' elif e.message.find('aubio'): print 'No aubio' else: raise e _discover_modules()
Fix importError for Yaafe and Aubio
# -*- coding: utf-8 -*- from __future__ import absolute_import from . import api from . import core from . import decoder from . import analyzer from . import grapher from . import encoder __version__ = '0.5.5' __all__ = ['api', 'core', 'decoder', 'analyzer', 'grapher', 'encoder'] def _discover_modules(): import sys import pkgutil import importlib #pkg_path = os.path.abspath() #__import__(pkg) proc_modules = ['decoder', 'analyzer', 'encoder', 'grapher'] for module in proc_modules: pkg = '.'.join([__name__, module]) importlib.import_module(pkg) package = sys.modules[pkg] prefix = pkg + "." for importer, modname, ispkg in pkgutil.walk_packages(package.__path__, prefix): try: importlib.import_module(modname) #__import__(modname) except ImportError as e: if e.message.count('yaafelib'): print 'No Yaafe' elif e.message.count('aubio'): print 'No Aubio' else: raise e _discover_modules()
#!/usr/bin/python from fabricate import * programs = ['create', 'read', 'update', 'delete', 'filter', 'nearest'] def build(): for program in programs: sources = [program, 'clustergis'] compile(sources) link(sources, program) def compile(sources): for source in sources: run('mpicc -Wall -O3 `geos-config --cflags` -c ' + source + '.c') def link(sources, program='a.out'): objects = ' '.join(s + '.o' for s in sources) run('mpicc -o ' + program + ' -Wall -O3 `geos-config --cflags` ' + objects + ' `geos-config --ldflags` -lgeos_c') def clean(): autoclean() main()
Build script for examples now works
#!/usr/bin/python from fabricate import * programs = ['create', 'read', 'update', 'delete', 'filter', 'nearest'] def build(): for program in programs: sources = [program, '../src/clustergis'] compile(sources) link(sources, program) def compile(sources): for source in sources: run('mpicc -Wall -O3 -I../src/ `geos-config --cflags` -c ' + source + '.c -o ' + source + '.o') def link(sources, program='a.out'): objects = ' '.join(s + '.o' for s in sources) run('mpicc -o ' + program + ' -Wall -O3 `geos-config --cflags` ' + objects + ' `geos-config --ldflags` -lgeos_c') def clean(): autoclean() main()
""" Object-oriented programming utilities. """ import inspect from taipan._compat import IS_PY26, IS_PY3 from taipan.functional import ensure_callable from taipan.strings import is_string __all__ = ['is_internal', 'is_magic'] def is_internal(member): """Checks whether given class/instance member, or its name, is internal.""" name = _get_member_name(member) return name.startswith('_') and not is_magic(name) def is_magic(member): """Checks whether given class/instance member, or its name, is "magic". Magic fields and methods have names that begin and end with double underscores, such ``__hash__`` or ``__eq__``. """ name = _get_member_name(member) return len(name) > 4 and name.startswith('__') and name.endswith('__') # Utility functions def _get_member_name(member): if is_string(member): return member # Python has no "field declaration" objects, so the only valid # class or instance member is actually a method ensure_method(member) return member.__name__ def _get_first_arg_name(function): argnames, _, _, _ = inspect.getargspec(function) return argnames[0] if argnames else None from .base import * from .methods import * from .modifiers import *
Fix a bug in .objective utility function
""" Object-oriented programming utilities. """ import inspect from taipan.functional import ensure_callable from taipan.strings import is_string __all__ = ['is_internal', 'is_magic'] def is_internal(member): """Checks whether given class/instance member, or its name, is internal.""" name = _get_member_name(member) return name.startswith('_') and not is_magic(name) def is_magic(member): """Checks whether given class/instance member, or its name, is "magic". Magic fields and methods have names that begin and end with double underscores, such ``__hash__`` or ``__eq__``. """ name = _get_member_name(member) return len(name) > 4 and name.startswith('__') and name.endswith('__') # Utility functions def _get_member_name(member): if is_string(member): return member # Python has no "field declaration" objects, so the only valid # class or instance member is actually a method from taipan.objective.methods import ensure_method ensure_method(member) return member.__name__ def _get_first_arg_name(function): argnames, _, _, _ = inspect.getargspec(function) return argnames[0] if argnames else None from .base import * from .methods import * from .modifiers import *
""" Problem repository management for the shell manager. """ import spur, gzip from shutil import copy2 from os.path import join def local_update(repo_path, deb_paths=[]): """ Updates a local deb repository by copying debs and running scanpackages. Args: repo_path: the path to the local repository. dep_paths: list of problem deb paths to copy. """ [copy2(deb_path, repo_path) for deb_path in deb_paths] shell = spur.LocalShell() result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path) packages_path = join(repo_path, "Packages.gz") with gzip.open(packages_path, "wb") as packages: packages.write(result.output) print("Updated problem repository.")
Update repo entrypoint and remote_update stub.
""" Problem repository management for the shell manager. """ import spur, gzip from shutil import copy2 from os.path import join def update_repo(args): """ Main entrypoint for repo update operations. """ if args.repo_type == "local": local_update(args.repository, args.package_paths) else: remote_update(args.repository, args.package_paths) def remote_update(repo_ui, deb_paths=[]): """ Pushes packages to a remote deb repository. Args: repo_uri: location of the repository. deb_paths: list of problem deb paths to copy. """ pass def local_update(repo_path, deb_paths=[]): """ Updates a local deb repository by copying debs and running scanpackages. Args: repo_path: the path to the local repository. dep_paths: list of problem deb paths to copy. """ [copy2(deb_path, repo_path) for deb_path in deb_paths] shell = spur.LocalShell() result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path) packages_path = join(repo_path, "Packages.gz") with gzip.open(packages_path, "wb") as packages: packages.write(result.output) print("Updated problem repository.")
# Import smtplib for the actual sending function import smtplib # Here are the email pacakge modules we'll need from email.MIMEImage import MIMEImage from email.MIMEMultipart import MIMEMultipart COMMASPACE = ', ' # Create the container (outer) email message. msg = MIMEMultipart() msg['Subject'] = 'Our family reunion' # me == the sender's email address # family = the list of all recipients' email addresses msg['From'] = me msg['To'] = COMMASPACE.join(family) msg.preamble = 'Our family reunion' # Guarantees the message ends in a newline msg.epilogue = '' # Assume we know that the image files are all in PNG format for file in pngfiles: # Open the files in binary mode. Let the MIMEImage class automatically # guess the specific image type. fp = open(file, 'rb') img = MIMEImage(fp.read()) fp.close() msg.attach(img) # Send the email via our own SMTP server. s = smtplib.SMTP() s.connect() s.sendmail(me, family, msg.as_string()) s.close()
Fix typo in comment (reported on the pydotorg mailing list).
# Import smtplib for the actual sending function import smtplib # Here are the email package modules we'll need from email.MIMEImage import MIMEImage from email.MIMEMultipart import MIMEMultipart COMMASPACE = ', ' # Create the container (outer) email message. msg = MIMEMultipart() msg['Subject'] = 'Our family reunion' # me == the sender's email address # family = the list of all recipients' email addresses msg['From'] = me msg['To'] = COMMASPACE.join(family) msg.preamble = 'Our family reunion' # Guarantees the message ends in a newline msg.epilogue = '' # Assume we know that the image files are all in PNG format for file in pngfiles: # Open the files in binary mode. Let the MIMEImage class automatically # guess the specific image type. fp = open(file, 'rb') img = MIMEImage(fp.read()) fp.close() msg.attach(img) # Send the email via our own SMTP server. s = smtplib.SMTP() s.connect() s.sendmail(me, family, msg.as_string()) s.close()
#!/usr/bin/env python """ Similar to the autocompletion example. But display all the completions in multiple columns. """ from __future__ import unicode_literals from prompt_toolkit.contrib.completers import WordCompleter from prompt_toolkit.shortcuts import prompt, CompleteStyle animal_completer = WordCompleter([ 'alligator', 'ant', 'ape', 'bat', 'bear', 'beaver', 'bee', 'bison', 'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphine', 'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangoroo', 'leopard', 'lion', 'mouse', 'rabbit', 'rat', 'snake', 'spider', 'turkey', 'turtle', ], ignore_case=True) def main(): text = prompt('Give some animals: ', completer=animal_completer, complete_style=CompleteStyle.MULTI_COLUMN) print('You said: %s' % text) if __name__ == '__main__': main()
Fix typos: `dolphine` -> `dolphin`, `kangoroo` -> `kangaroo`
#!/usr/bin/env python """ Similar to the autocompletion example. But display all the completions in multiple columns. """ from __future__ import unicode_literals from prompt_toolkit.contrib.completers import WordCompleter from prompt_toolkit.shortcuts import prompt, CompleteStyle animal_completer = WordCompleter([ 'alligator', 'ant', 'ape', 'bat', 'bear', 'beaver', 'bee', 'bison', 'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphin', 'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangaroo', 'leopard', 'lion', 'mouse', 'rabbit', 'rat', 'snake', 'spider', 'turkey', 'turtle', ], ignore_case=True) def main(): text = prompt('Give some animals: ', completer=animal_completer, complete_style=CompleteStyle.MULTI_COLUMN) print('You said: %s' % text) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def rename_and_clean(apps, schema_editor): """Rename old content types if necessary, remove permissions.""" ContentType = apps.get_model("contenttypes", "ContentType") for ct in ContentType.objects.filter(app_label="admin"): try: old_ct = ContentType.objects.get( app_label="modoboa_admin", model=ct.model) except ContentType.DoesNotExist: continue old_ct.app_label = "admin" ct.delete() old_ct.save() # Remove DomainAlias permissions from DomainAdmins group Group = apps.get_model("auth", "Group") Permission = apps.get_model("auth", "Permission") group = Group.objects.get(name="DomainAdmins") ct = ContentType.objects.get(app_label="admin", model="domainalias") for permission in Permission.objects.filter(content_type=ct): group.permissions.remove(permission) class Migration(migrations.Migration): dependencies = [ ('admin', '0001_initial'), ] operations = [ migrations.RunPython(rename_and_clean), ]
Handle the fresh install case.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def rename_and_clean(apps, schema_editor): """Rename old content types if necessary, remove permissions.""" ContentType = apps.get_model("contenttypes", "ContentType") for ct in ContentType.objects.filter(app_label="admin"): try: old_ct = ContentType.objects.get( app_label="modoboa_admin", model=ct.model) except ContentType.DoesNotExist: continue old_ct.app_label = "admin" ct.delete() old_ct.save() # Remove DomainAlias permissions from DomainAdmins group Group = apps.get_model("auth", "Group") try: group = Group.objects.get(name="DomainAdmins") except Group.DoesNotExist: return Permission = apps.get_model("auth", "Permission") ct = ContentType.objects.get(app_label="admin", model="domainalias") for permission in Permission.objects.filter(content_type=ct): group.permissions.remove(permission) class Migration(migrations.Migration): dependencies = [ ('admin', '0001_initial'), ] operations = [ migrations.RunPython(rename_and_clean), ]
import json from django.http import HttpResponse from django.template import RequestContext, loader from django_todo.apps.core.models import Task def current_tasks(request): tasks = Task.objects.filter(is_checked=False) template = loader.get_template('core/current_tasks.html') context = RequestContext(request, { 'tasks': tasks, }) return HttpResponse(template.render(context))
Refactor view to use django shortcuts
import json from django.shortcuts import render_to_response from django_todo.apps.core.models import Task def current_tasks(request): tasks = Task.objects.filter(is_checked=False) return render_to_response('core/current_tasks.html', {'tasks': tasks, })
# -*- coding: utf-8 -*- """Japanese rules and tables for the spellnum module""" RULES = """ 1x = 十{x} ab = {a}十{b} 1xx = {100}{x} axx = {a}{100}{x} axxx = {a}千{x} (a)xxxx = {a}{x} """ NUMBERS = { 0: '零', 1: '一', 2: '二', 3: '三', 4: '四', 5: '五', 6: '六', 7: '七', 8: '八', 9: '九', 10: '十', 100: '百', } ORDERS = [ '', '万', '億', 'billón', 'trillón', 'quadrillón', 'quintillion', 'sextillion', 'septillion', 'octillion', 'nonillion', 'decillion', 'undecillion', 'duodecillion', 'tredecillion', 'quattuordecillion', 'quindecillion' ] ORDER_SEP = ''
Add META with 'order_separator' key to Japanese spelling This is required due to a change in how separators are loaded in spellnum.py.
# -*- coding: utf-8 -*- """Japanese rules and tables for the spellnum module""" RULES = """ 1x = 十{x} ab = {a}十{b} 1xx = {100}{x} axx = {a}{100}{x} axxx = {a}千{x} (a)xxxx = {a}{x} """ NUMBERS = { 0: '零', 1: '一', 2: '二', 3: '三', 4: '四', 5: '五', 6: '六', 7: '七', 8: '八', 9: '九', 10: '十', 100: '百', } ORDERS = [ '', '万', '億', 'billón', 'trillón', 'quadrillón', 'quintillion', 'sextillion', 'septillion', 'octillion', 'nonillion', 'decillion', 'undecillion', 'duodecillion', 'tredecillion', 'quattuordecillion', 'quindecillion' ] META = { 'order_separator': '' }
#!/usr/bin/env python from setuptools import setup setup( name='chainer-cuda-deps', version='1.1.0', description='Install dependent packages to use Chainer on CUDA', author='Seiya Tokui', author_email='tokui@preferred.jp', url='http://chainer.org/', packages=[], install_requires=[ 'chainer', 'pycuda>=2014.1', 'scikit-cuda>=0.5.0', 'Mako', 'six>=1.9.0', ], )
Update cuda_deps to 1.1.0.1 (for scikit-cuda)
#!/usr/bin/env python from setuptools import setup setup( name='chainer-cuda-deps', version='1.1.0.1', description='Install dependent packages to use Chainer on CUDA', author='Seiya Tokui', author_email='tokui@preferred.jp', url='http://chainer.org/', packages=[], install_requires=[ 'chainer', 'pycuda>=2014.1', 'scikit-cuda>=0.5.0', 'Mako', 'six>=1.9.0', ], )
"""add enqueue_job column to smtpserver table Revision ID: 849170064430 Revises: a63df077051a Create Date: 2018-11-22 10:04:00.330101 """ # revision identifiers, used by Alembic. revision = '849170064430' down_revision = 'a63df077051a' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('smtpserver', sa.Column('enqueue_job', sa.Boolean(), nullable=False, server_default=sa.false())) def downgrade(): op.drop_column('smtpserver', 'enqueue_job')
Add try-except block to queue migration script
"""add enqueue_job column to smtpserver table Revision ID: 849170064430 Revises: a63df077051a Create Date: 2018-11-22 10:04:00.330101 """ # revision identifiers, used by Alembic. revision = '849170064430' down_revision = 'a63df077051a' from alembic import op import sqlalchemy as sa def upgrade(): try: op.add_column('smtpserver', sa.Column('enqueue_job', sa.Boolean(), nullable=False, server_default=sa.false())) except Exception as exx: print("Could not add column 'smtpserver.enqueue_job'") print(exx) def downgrade(): op.drop_column('smtpserver', 'enqueue_job')
#!/usr/bin/env python # Import gevent monkey and patch everything from gevent import monkey monkey.patch_all(httplib=True) # Import the rest from django.core.handlers.wsgi import WSGIHandler as DjangoWSGIApp from django.core.management import setup_environ from gevent.wsgi import WSGIServer import sys import settings setup_environ(settings) # Configure host and port for the WSGI server host = getattr(settings, 'WSGI_HOST', '127.0.0.1') port = getattr(settings, 'WSGI_PORT', 8080) def runserver(): # Create the server application = DjangoWSGIApp() address = host, port server = WSGIServer( address, application ) # Run the server try: server.serve_forever() except KeyboardInterrupt: server.stop() sys.exit(0) if __name__ == '__main__': runserver()
Fix httplib monkey patching problem with Gevent >= 1.0 From v1.0 on, Gevent doesn't support monkey patching of httplib anymore. CATMAID's example script to run a Gevent WSGI server, however, was still expecting this to be possible. This commit fixes this. Thanks to Mikhail Kandel for reporting.
#!/usr/bin/env python # Import gevent monkey and patch everything from gevent import monkey monkey.patch_all() # Import the rest from django.core.handlers.wsgi import WSGIHandler as DjangoWSGIApp from django.core.management import setup_environ from gevent.wsgi import WSGIServer import sys import settings setup_environ(settings) # Configure host and port for the WSGI server host = getattr(settings, 'WSGI_HOST', '127.0.0.1') port = getattr(settings, 'WSGI_PORT', 8080) def runserver(): # Create the server application = DjangoWSGIApp() address = host, port server = WSGIServer( address, application ) # Run the server try: server.serve_forever() except KeyboardInterrupt: server.stop() sys.exit(0) if __name__ == '__main__': runserver()
from django.forms import ModelForm from mozcal.events.models import Event, Space class EventForm(ModelForm): class Meta: model = Event class SpaceForm(ModelForm): class Meta: model = Space
Add fields properties to all form Meta classes
from django.forms import ModelForm from mozcal.events.models import Event, Space class EventForm(ModelForm): class Meta: model = Event fields = ['title', 'space', 'start', 'end', 'areas', 'description', 'details'] class SpaceForm(ModelForm): class Meta: model = Space fields = ['name', 'address', 'address2', 'city', 'country', 'description', 'lat', 'lon']
#!/usr/bin/env python import re import urllib def build (connection, options): # FIXME: should use urllib.parse match = re.match ('([+0-9A-Za-z]+)://(?:([^#/:@]+)(?::([^#/@]+))?@)?(?:([^#/:]+)(?::([0-9]+))?)?(?:/+([^#]*))?', connection) if match is None: return None host = match.group (4) password = match.group (3) is not None and urllib.unquote_plus (match.group (3)) or match.group (3) path = match.group (6) or '.' port = match.group (5) is not None and int (match.group (5)) or None scheme = match.group (1) user = match.group (2) is not None and urllib.unquote_plus (match.group (2)) or match.group (2) if scheme == 'console': from targets.console import ConsoleTarget return ConsoleTarget () if scheme == 'file': from targets.file import FileTarget return FileTarget (path) if scheme == 'ftp': from targets.ftp import FTPTarget return FTPTarget (host, port, user, password, path, options) if scheme == 'ssh': from targets.ssh import SSHTarget return SSHTarget (host, port, user, path, options) # No known scheme recognized return None
Fix connection parsing for absolute paths.
#!/usr/bin/env python import re import urllib def build (connection, options): # FIXME: should use urllib.parse match = re.match ('([+0-9A-Za-z]+)://(?:([^#/:@]+)(?::([^#/@]+))?@)?(?:([^#/:]+)(?::([0-9]+))?)?(?:/([^#]*))?', connection) if match is None: return None host = match.group (4) password = match.group (3) is not None and urllib.unquote_plus (match.group (3)) or match.group (3) path = match.group (6) or '.' port = match.group (5) is not None and int (match.group (5)) or None scheme = match.group (1) user = match.group (2) is not None and urllib.unquote_plus (match.group (2)) or match.group (2) if scheme == 'console': from targets.console import ConsoleTarget return ConsoleTarget () if scheme == 'file': from targets.file import FileTarget return FileTarget (path) if scheme == 'ftp': from targets.ftp import FTPTarget return FTPTarget (host, port, user, password, path, options) if scheme == 'ssh': from targets.ssh import SSHTarget return SSHTarget (host, port, user, path, options) # No known scheme recognized return None
#!/usr/bin/env python from config import MongoSource from manager import PluginManager from log import LogDocGenerator def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} condition = {} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
Add example for query datetime range
#!/usr/bin/env python from config import MongoSource from manager import PluginManager from log import LogDocGenerator import datetime def main(): # 1. load all plugins plugin_manager = PluginManager() # 2. get one or more mongodb collection ms = MongoSource() collection = ms.get_collection("net-test", "ename_access") # 3. make a log_generator log_generator = LogDocGenerator(collection) # 4. use condition to get filtered logs #condition = {"host":"192.168.1.57"} now = datetime.datetime.now() start = now - datetime.timedelta(hours=8, minutes=10) end = now - datetime.timedelta(hours=8) condition = {"time":{"$gte":start, "$lt":end}} # 5. use keywords plugins to parse logs keywords = ['ip'] for log_doc in log_generator.get_log_docs(condition): plugin_manager.call_method('process', args=log_doc, keywords=keywords) # 6. give a report plugin_manager.call_method('report', args={}, keywords=keywords) if __name__ == '__main__': main()
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-recurly', url="https://chris-lamb.co.uk/projects/django-recurly", version='3.0.1', description="Lightlight Recurly.com integration for Django", author="Chris Lamb", author_email="chris@chris-lamb.co.uk", license="BSD", packages=find_packages(), )
Update Django requirement to latest LTS
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-recurly', url="https://chris-lamb.co.uk/projects/django-recurly", version='3.0.1', description="Lightlight Recurly.com integration for Django", author="Chris Lamb", author_email="chris@chris-lamb.co.uk", license="BSD", packages=find_packages(), install_requires=( 'Django>=1.8', ), )
#! /usr/bin/env python from setuptools import setup setup( name='django-nomination', version='1.0.0', packages=['nomination'], description='', long_description='See the home page for more information.', include_package_data=True, install_requires=[ 'simplejson>=3.8.1', 'django-markup-deprecated>=0.0.3', 'markdown>=2.6.5', 'django-widget-tweaks>=1.4.1', ], zip_safe=False, url='https://github.com/unt-libraries/django-nomination', author='University of North Texas Libraries', author_email='mark.phillips@unt.edu', license='BSD', keywords=[ 'django', 'app', 'UNT', 'url', 'surt', 'nomination', 'web archiving' ], classifiers=[ 'Natural Language :: English', 'Environment :: Web Environment', 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ] )
Fix exclusion of migrations from installed app.
#! /usr/bin/env python from setuptools import setup, find_packages setup( name='django-nomination', version='1.0.0', packages=find_packages(exclude=['tests*']), description='', long_description='See the home page for more information.', include_package_data=True, install_requires=[ 'simplejson>=3.8.1', 'django-markup-deprecated>=0.0.3', 'markdown>=2.6.5', 'django-widget-tweaks>=1.4.1', ], zip_safe=False, url='https://github.com/unt-libraries/django-nomination', author='University of North Texas Libraries', author_email='mark.phillips@unt.edu', license='BSD', keywords=[ 'django', 'app', 'UNT', 'url', 'surt', 'nomination', 'web archiving' ], classifiers=[ 'Natural Language :: English', 'Environment :: Web Environment', 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ] )
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
Include the SQL scripts in the installation.
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
#This file mainly exists to allow python setup.py test to work. import os, sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_project.settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) from django.test.utils import get_runner from django.conf import settings def runtests(): test_runner = get_runner(settings) failures = test_runner().run_tests([]) sys.exit(failures) if __name__ == '__main__': runtests()
Fix the running of tests. Wonder if this is a django regression.
#This file mainly exists to allow python setup.py test to work. import os, sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_project.settings' test_dir = os.path.dirname(__file__) sys.path.insert(0, test_dir) from django.test.utils import get_runner from django.conf import settings def runtests(): test_runner = get_runner(settings) failures = test_runner([]) sys.exit(failures) if __name__ == '__main__': runtests()
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for :py:mod:`flocker.route`. """
Send eliot logs to trial output.
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for :py:mod:`flocker.route`. """ from eliot.twisted import redirectLogsForTrial redirectLogsForTrial() del redirectLogsForTrial