content
stringlengths
128
6.74k
<commit_msg>Fix a bug with crafting keys <commit_before>package net.wayward_realms.waywardlocks; import org.bukkit.ChatColor; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.inventory.CraftItemEvent; import org.bukkit.inventory.ItemStack; public class CraftItemListener implements Listener { private WaywardLocks plugin; public CraftItemListener(WaywardLocks plugin) { this.plugin = plugin; } @EventHandler public void onCraftItem(CraftItemEvent event) { for (ItemStack item : event.getInventory().getContents()) { if (item != null) { if (item.hasItemMeta()) { if (item.getItemMeta().getDisplayName().equalsIgnoreCase("Key")) { event.setCancelled(true); ((Player) event.getWhoClicked()).sendMessage(plugin.getPrefix() + ChatColor.RED + "You may not use keys as a substitute for iron ingots! ;)"); } } } } } } <commit_after>package net.wayward_realms.waywardlocks; import org.bukkit.ChatColor; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.inventory.CraftItemEvent; import org.bukkit.inventory.ItemStack; public class CraftItemListener implements Listener { private WaywardLocks plugin; public CraftItemListener(WaywardLocks plugin) { this.plugin = plugin; } @EventHandler public void onCraftItem(CraftItemEvent event) { for (ItemStack item : event.getInventory().getContents()) { if (item != null) { if (item.hasItemMeta()) { if (item.getItemMeta().hasDisplayName()) { if (item.getItemMeta().getDisplayName().equalsIgnoreCase("Key")) { event.setCancelled(true); ((Player) event.getWhoClicked()).sendMessage(plugin.getPrefix() + ChatColor.RED + "You may not use keys as a substitute for iron ingots! ;)"); } } } } } } }
<commit_msg>Use LooseVersion in version check (to avoid errors for e.g. '3.7.2+') <commit_before>from distutils.version import StrictVersion from platform import python_version min_supported_python_version = '3.6' if StrictVersion(python_version()) < StrictVersion(min_supported_python_version): error_msg = ( "Tohu requires Python {min_supported_python_version} or greater to run " "(currently running under Python {python_version()})" ) raise RuntimeError(error_msg) from . import v6 from .v6.base import * from .v6.primitive_generators import * from .v6.derived_generators import * from .v6.generator_dispatch import * from .v6.custom_generator import CustomGenerator from .v6.logging import logger from .v6.utils import print_generated_sequence, print_tohu_version from .v6 import base from .v6 import primitive_generators from .v6 import derived_generators from .v6 import generator_dispatch from .v6 import custom_generator from .v6 import set_special_methods __all__ = base.__all__ \ + primitive_generators.__all__ \ + derived_generators.__all__ \ + generator_dispatch.__all__ \ + custom_generator.__all__ \ + ['tohu_logger', 'print_generated_sequence', 'print_tohu_version'] from ._version import get_versions __version__ = get_versions()['version'] del get_versions tohu_logger = logger # alias<commit_after>from distutils.version import LooseVersion from platform import python_version min_supported_python_version = '3.6' if LooseVersion(python_version()) < LooseVersion(min_supported_python_version): error_msg = ( "Tohu requires Python {min_supported_python_version} or greater to run " "(currently running under Python {python_version()})" ) raise RuntimeError(error_msg) from . import v6 from .v6.base import * from .v6.primitive_generators import * from .v6.derived_generators import * from .v6.generator_dispatch import * from .v6.custom_generator import CustomGenerator from .v6.logging import logger from .v6.utils import print_generated_sequence, print_tohu_version from .v6 import base from .v6 import primitive_generators from .v6 import derived_generators from .v6 import generator_dispatch from .v6 import custom_generator from .v6 import set_special_methods __all__ = base.__all__ \ + primitive_generators.__all__ \ + derived_generators.__all__ \ + generator_dispatch.__all__ \ + custom_generator.__all__ \ + ['tohu_logger', 'print_generated_sequence', 'print_tohu_version'] from ._version import get_versions __version__ = get_versions()['version'] del get_versions tohu_logger = logger # alias
<commit_msg>Add utf-8 support for long description <commit_before>from setuptools import setup, find_packages from os.path import join, dirname import sys if sys.version_info.major < 3: print("Sorry, currently only Python 3 is supported!") sys.exit(1) setup( name = 'CollectionBatchTool', version=__import__('collectionbatchtool').__version__, description = 'batch import and export of Specify data', long_description = open(join(dirname(__file__), 'README.rst')).read(), packages = find_packages(), py_modules = ['collectionbatchtool', 'specifymodels'], install_requires = ['pandas>=0.16', 'peewee>=2.6', 'pymysql'], author = 'Markus Englund', author_email = 'jan.markus.englund@gmail.com', url = 'https://github.com/jmenglund/CollectionBatchTool', license = 'MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], keywords = ['Specify', 'Collection management'] ) <commit_after>from setuptools import setup, find_packages from os.path import join, dirname import sys if sys.version_info.major < 3: print("Sorry, currently only Python 3 is supported!") sys.exit(1) setup( name = 'CollectionBatchTool', version=__import__('collectionbatchtool').__version__, description = 'batch import and export of Specify data', long_description = open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages = find_packages(), py_modules = ['collectionbatchtool', 'specifymodels'], install_requires = ['pandas>=0.16', 'peewee>=2.6', 'pymysql'], author = 'Markus Englund', author_email = 'jan.markus.englund@gmail.com', url = 'https://github.com/jmenglund/CollectionBatchTool', license = 'MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], keywords = ['Specify', 'Collection management'] )
<commit_msg>Fix in tests: Docstring at wrong location <commit_before> import unittest from gosa.common.components.command import * class CommandTestCase(unittest.TestCase): """Docs""" @Command(__help__="TEST") def test_command(self): pass """Docs""" @Command() def test_command2(self): pass # agent and client terms still in use in command.py <commit_after> import unittest from gosa.common.components.command import * class CommandTestCase(unittest.TestCase): @Command(__help__="TEST") def test_command(self): pass @Command() def test_command2(self): """Docs""" pass # agent and client terms still in use in command.py
<commit_msg>Fix compiling flags for darwin. The OpenBLAS formula is keg-only, which means it was not symlinked into /usr/local. Thus, we need to add the build variables manually. Also, the library is named as openblas, which means `-llapack` and `-llapacke` will cause library not found error. <commit_before> from distutils.core import setup, Extension import numpy.distutils setup( name='Libact', version='0.1.0', description='Active learning package', long_description='Active learning package', author='LSC', author_email='this@is.email', url='http://www.csie.ntu.edu.tw/~htlin/', packages=[ 'libact.base', 'libact.models', 'libact.labelers', 'libact.query_strategies', ], package_dir={ 'libact.base': 'libact/base', 'libact.models': 'libact/models', 'libact.labelers': 'libact/labelers', 'libact.query_strategies': 'libact/query_strategies', }, ext_modules=[ Extension( "libact.query_strategies._variance_reduction", ["libact/query_strategies/variance_reduction.c"], extra_link_args=['-llapacke -llapack -lblas'], extra_compile_args=['-std=c11'], include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ), ], ) <commit_after> from distutils.core import setup, Extension import numpy.distutils import sys if sys.platform == 'darwin': print("Platform Detection: Mac OS X. Link to openblas...") extra_link_args = ['-L/usr/local/opt/openblas/lib -lopenblas'] include_dirs = (numpy.distutils.misc_util.get_numpy_include_dirs() + ['/usr/local/opt/openblas/include']) else: # assume linux otherwise, unless we support Windows in the future... print("Platform Detection: Linux. Link to liblapacke...") extra_link_args = ['-llapacke -llapack -lblas'] include_dirs = numpy.distutils.misc_util.get_numpy_include_dirs() setup( name='Libact', version='0.1.0', description='Active learning package', long_description='Active learning package', author='LSC', author_email='this@is.email', url='http://www.csie.ntu.edu.tw/~htlin/', packages=[ 'libact.base', 'libact.models', 'libact.labelers', 'libact.query_strategies', ], package_dir={ 'libact.base': 'libact/base', 'libact.models': 'libact/models', 'libact.labelers': 'libact/labelers', 'libact.query_strategies': 'libact/query_strategies', }, ext_modules=[ Extension( "libact.query_strategies._variance_reduction", ["libact/query_strategies/variance_reduction.c"], extra_link_args=extra_link_args, extra_compile_args=['-std=c11'], include_dirs=include_dirs, ), ], )
<commit_msg>Make feature_file_paths have no duplicates <commit_before>import os class Core(object): """ The core of the Romaine, provides BDD test API. """ # All located features feature_file_paths = [] instance = None def __init__(self): """ Initialise Romaine core. """ self.steps = {} Core.instance = self def locate_features(self, path): """ Locate any features given a path. Keyword arguments: path -- The path to search for features, recursively. Returns: List of features located in the path given. """ walked_paths = os.walk(path) # Features in this path are stored in an intermediate list before # being added to the class variable so that we can return only the # ones we find on this invocation of locate_features feature_candidates = [] for walked_path in walked_paths: base_directory, sub_directories, feature_files = walked_path for feature_file in feature_files: feature_candidates.append( os.path.join( base_directory, feature_file ) ) self.feature_file_paths.extend(feature_candidates) return feature_candidates <commit_after>import os class Core(object): """ The core of the Romaine, provides BDD test API. """ # All located features feature_file_paths = set() instance = None def __init__(self): """ Initialise Romaine core. """ self.steps = {} Core.instance = self def locate_features(self, path): """ Locate any features given a path. Keyword arguments: path -- The path to search for features, recursively. Returns: List of features located in the path given. """ walked_paths = os.walk(path) # Features in this path are stored in an intermediate list before # being added to the class variable so that we can return only the # ones we find on this invocation of locate_features feature_candidates = [] for walked_path in walked_paths: base_directory, sub_directories, feature_files = walked_path for feature_file in feature_files: feature_candidates.append( os.path.join( base_directory, feature_file ) ) self.feature_file_paths.update(feature_candidates) return feature_candidates
<commit_msg>Fix admin-api to show that users are retrieved by ip, not username <commit_before>package org.keycloak.admin.client.resource; import org.keycloak.representations.idm.UserRepresentation; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.List; public interface UsersResource { @GET @Produces(MediaType.APPLICATION_JSON) public List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @GET @Produces(MediaType.APPLICATION_JSON) public List<UserRepresentation> search(@QueryParam("search") String search, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @POST @Consumes(MediaType.APPLICATION_JSON) Response create(UserRepresentation userRepresentation); @Path("{username}") public UserResource get(@PathParam("username") String username); } <commit_after>package org.keycloak.admin.client.resource; import org.keycloak.representations.idm.UserRepresentation; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.List; public interface UsersResource { @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("search") String search, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @POST @Consumes(MediaType.APPLICATION_JSON) Response create(UserRepresentation userRepresentation); @Path("{id}") UserResource get(@PathParam("id") String id); }
<commit_msg>Fix map update and insert <commit_before>package orm import ( "github.com/go-pg/pg/v10/types" ) type mapModel struct { hookStubs ptr *map[string]interface{} m map[string]interface{} } var _ Model = (*mapModel)(nil) func newMapModel(ptr *map[string]interface{}) *mapModel { return &mapModel{ ptr: ptr, } } func (mapModel) Init() error { return nil } func (m mapModel) NextColumnScanner() ColumnScanner { return m } func (m mapModel) AddColumnScanner(ColumnScanner) error { return nil } func (m mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error { val, err := types.ReadColumnValue(col, rd, n) if err != nil { return err } if m.m == nil { m.m = make(map[string]interface{}) *m.ptr = m.m } m.m[col.Name] = val return nil } func (mapModel) useQueryOne() bool { return true } <commit_after>package orm import ( "github.com/go-pg/pg/v10/types" ) type mapModel struct { hookStubs ptr *map[string]interface{} m map[string]interface{} } var _ Model = (*mapModel)(nil) func newMapModel(ptr *map[string]interface{}) *mapModel { model := &mapModel{ ptr: ptr, } if ptr != nil { model.m = *ptr } return model } func (mapModel) Init() error { return nil } func (m *mapModel) NextColumnScanner() ColumnScanner { return m } func (m mapModel) AddColumnScanner(ColumnScanner) error { return nil } func (m *mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error { val, err := types.ReadColumnValue(col, rd, n) if err != nil { return err } if m.m == nil { m.m = make(map[string]interface{}) *m.ptr = m.m } m.m[col.Name] = val return nil } func (mapModel) useQueryOne() bool { return true }
<commit_msg>Add updated params to interval-congruence ref. reg test <commit_before>// PARAM: --disable ana.int.def_exc --enable ana.int.interval --enable ana.int.congruence #include <assert.h> int main(){ int r = -103; for (int i = 0; i < 40; i++) { r = r + 5; } // At this point r in the congr. dom should be 2 + 5Z int k = r; if (k >= 3) { // After refinement with congruences, the lower bound should be 7 as the numbers 3 - 6 are not in the congr. class assert (k < 7); // FAIL } if (r >= -11 && r <= -4) { assert (r == -8); } return 0; } <commit_after>// PARAM: --disable ana.int.def_exc --enable ana.int.interval --enable ana.int.congruence --enable ana.int.congruence_no_overflow --enable ana.int.refinement #include <assert.h> int main(){ int r = -103; for (int i = 0; i < 40; i++) { r = r + 5; } // At this point r in the congr. dom should be 2 + 5Z int k = r; if (k >= 3) { // After refinement with congruences, the lower bound should be 7 as the numbers 3 - 6 are not in the congr. class assert (k < 7); // FAIL } if (r >= -11 && r <= -4) { assert (r == -8); } return 0; }
<commit_msg>Update code sample for chapter 9. <commit_before>fn main() { let a_vector = ~[1,2,3]; let mut mut_vector = a_vector; mut_vector[0] = 5; println!("The first number is {:d}.", mut_vector[0]) } <commit_after>fn main() { let a_vector = vec![1i, 2i, 3i]; let mut mut_vector = a_vector; *mut_vector.get_mut(0) = 5; println!("The first number is {:d}.", mut_vector[0]) }
<commit_msg>Update url format to support Django 1.10<commit_before>from django.conf.urls import patterns, url from anycluster import views from django.conf import settings urlpatterns = patterns('', url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'), url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'), url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'), url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'), ) <commit_after>from django.conf.urls import url from anycluster import views from django.conf import settings urlpatterns = [ url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'), url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'), url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'), url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'), ]
<commit_msg>Add classifier type to the base class <commit_before>class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classifier, key, default) for key in keys] def set_key_list(self, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classifier, key, value) for key, value in key_value_pairs] def set_key(self, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError() <commit_after>class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classification_type, classifier, key, default) for classifier, key in izip(repeat(classifier), keys)] def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classification_type, classifier, key, value) for classifier, key, value in izip(repeat(classifier), key_value_pairs)] def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
<commit_msg>Correct argparse dependency - argparse already is a part of base python as of 2.7 and 3.2.<commit_before>import os from setuptools import setup install_requires = [ 'mysql-python>=1.2.3', 'psycopg2>=2.4.2', 'pyyaml>=3.10.0', 'argparse', 'pytz', ] if os.name == 'posix': install_requires.append('termcolor>=1.1.0') setup( name='py-mysql2pgsql', version='0.1.6', description='Tool for migrating/converting from mysql to postgresql.', long_description=open('README.rst').read(), license='MIT License', author='Philip Southam', author_email='philipsoutham@gmail.com', url='https://github.com/philipsoutham/py-mysql2pgsql', zip_safe=False, packages=['mysql2pgsql', 'mysql2pgsql.lib'], scripts=['bin/py-mysql2pgsql'], platforms='any', install_requires=install_requires, classifiers=[ 'License :: OSI Approved :: MIT License', 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: System Administrators', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Topic :: Database', 'Topic :: Utilities' ], keywords = 'mysql postgres postgresql pgsql psql migration', ) <commit_after>import os from setuptools import setup install_requires = [ 'mysql-python>=1.2.3', 'psycopg2>=2.4.2', 'pyyaml>=3.10.0', 'pytz', ] if os.name == 'posix': install_requires.append('termcolor>=1.1.0') if version < (2,7) or (3,0) <= version <= (3,1): install_requires += ['argparse'] setup( name='py-mysql2pgsql', version='0.1.6', description='Tool for migrating/converting from mysql to postgresql.', long_description=open('README.rst').read(), license='MIT License', author='Philip Southam', author_email='philipsoutham@gmail.com', url='https://github.com/philipsoutham/py-mysql2pgsql', zip_safe=False, packages=['mysql2pgsql', 'mysql2pgsql.lib'], scripts=['bin/py-mysql2pgsql'], platforms='any', install_requires=install_requires, classifiers=[ 'License :: OSI Approved :: MIT License', 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: System Administrators', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Topic :: Database', 'Topic :: Utilities' ], keywords = 'mysql postgres postgresql pgsql psql migration', )
<commit_msg>Make a direct call to glance-api using requests This change makes this check no longer use the glanceclient tool so we can craft a request that doesn't hit the glance-registry. The reason for this is that the glance-registry itself is tested in a different check and therefore we just need to ensure the glance-api itself is responding. <commit_before> from maas_common import (get_auth_ref, get_glance_client, status_err, status_ok, metric) import sys IMAGE_ENDPOINT = 'http://127.0.0.1:9292' def check(token): glance = get_glance_client(token, IMAGE_ENDPOINT) if glance is None: status_err('Unable to obtain valid glance client, cannot proceed') status_ok() metric('glance_api_local_status', 'uint32', 1) def main(): auth_ref = get_auth_ref() token = auth_ref['token']['id'] check(token) if __name__ == "__main__": main() <commit_after> from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): keystone = get_keystone_client(auth_ref) tenant_id = keystone.tenant_id auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9292/v2' api-status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_token}) try: # Hit something that isn't querying the glance-registry, since we # query glance-registry in separate checks r = s.get('%s/schemas/image' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": main()
<commit_msg>Revert "Fix a bug in caching" This reverts commit 2565df456ecb290f620ce4dadca19c76b0eeb1af. Conflicts: widgets/card.py <commit_before> from flask import render_template from models.person import Person def card(person_or_id, detailed=False, small=False): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, detailed=detailed, small=small) <commit_after> from flask import render_template from models.person import Person def card(person_or_id, **kwargs): if isinstance(person_or_id, Person): person = person_or_id else: person = Person.query.filter_by(id=person_or_id).first() return render_template('widgets/card.html', person=person, **kwargs)
<commit_msg>Fix image path in manifest <commit_before> { "name": "Hierarchical Inventory adjustments", "summary": "Group several Inventory adjustments in a master inventory", "version": "8.0.2.0.0", "depends": ["stock"], "author": u"Numérigraphe,Odoo Community Association (OCA)", "category": "Warehouse Management", "data": ["views/stock_inventory_view.xml", "wizard/generate_inventory_view.xml"], "images": ["inventory_form.png", "inventory_form_actions.png", "wizard.png"], 'license': 'AGPL-3', 'installable': True } <commit_after> { "name": "Hierarchical Inventory adjustments", "summary": "Group several Inventory adjustments in a master inventory", "version": "8.0.2.0.0", "depends": ["stock"], "author": u"Numérigraphe,Odoo Community Association (OCA)", "category": "Warehouse Management", "data": ["views/stock_inventory_view.xml", "wizard/generate_inventory_view.xml"], "images": ["images/inventory_form.png", "images/inventory_form_actions.png", "images/wizard.png"], 'license': 'AGPL-3', 'installable': True }
<commit_msg>Add converters from DateTime to Epoch double dates <commit_before>package io.spacedog.utils; import org.joda.time.DateTimeZone; public class DateTimeZones { public final static DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris"); }<commit_after>package io.spacedog.utils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; public class DateTimeZones { public final static DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris"); public static double toEpochSeconds(DateTime date) { return ((double) date.getMillis()) / 1000; } public static DateTime toDateTime(double epochSeconds) { return new DateTime((long) (epochSeconds * 1000)); } }
<commit_msg>Add croniter as an install dependency. <commit_before>import os from setuptools import find_packages from setuptools import setup base_dir = os.path.dirname(__file__) setup( name='elastalert', version='0.0.72', description='Runs custom filters on Elasticsearch and alerts on matches', author='Quentin Long', author_email='qlo@yelp.com', setup_requires='setuptools', license='Copyright 2014 Yelp', entry_points={ 'console_scripts': ['elastalert-create-index=elastalert.create_index:main', 'elastalert-test-rule=elastalert.test_rule:main', 'elastalert-rule-from-kibana=elastalert.rule_from_kibana:main', 'elastalert=elastalert.elastalert:main']}, packages=find_packages(), package_data={'elastalert': ['schema.yaml']}, install_requires=[ 'argparse', 'elasticsearch', 'jira==0.32', # jira.exceptions is missing from later versions 'jsonschema', 'mock', 'python-dateutil', 'PyStaticConfiguration', 'pyyaml', 'simplejson', 'boto', 'blist' ] ) <commit_after>import os from setuptools import find_packages from setuptools import setup base_dir = os.path.dirname(__file__) setup( name='elastalert', version='0.0.72', description='Runs custom filters on Elasticsearch and alerts on matches', author='Quentin Long', author_email='qlo@yelp.com', setup_requires='setuptools', license='Copyright 2014 Yelp', entry_points={ 'console_scripts': ['elastalert-create-index=elastalert.create_index:main', 'elastalert-test-rule=elastalert.test_rule:main', 'elastalert-rule-from-kibana=elastalert.rule_from_kibana:main', 'elastalert=elastalert.elastalert:main']}, packages=find_packages(), package_data={'elastalert': ['schema.yaml']}, install_requires=[ 'argparse', 'elasticsearch', 'jira==0.32', # jira.exceptions is missing from later versions 'jsonschema', 'mock', 'python-dateutil', 'PyStaticConfiguration', 'pyyaml', 'simplejson', 'boto', 'blist', 'croniter' ] )
<commit_msg>Insert user only if not exists <commit_before>package awesomefb; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.MongoClient; import java.net.UnknownHostException; /** * Created by earl on 5/25/2015. */ public class DatabaseManager { private static DatabaseManager instance = null; private DB database; private DBCollection postsCollection; private DBCollection usersCollection; public static DatabaseManager getInstance() { if (instance == null) { try { MongoClient mongoClient = new MongoClient(); DB db = mongoClient.getDB("awesomefb"); instance = new DatabaseManager(db); } catch (UnknownHostException e) { System.out.println(e.toString()); } } return instance; } protected DatabaseManager(DB database) { this.database = database; postsCollection = this.database.getCollection("posts"); usersCollection = this.database.getCollection("users"); postsCollection.drop(); usersCollection.drop(); } public void insertPost(Post post) { postsCollection.insert(post.toDBObject()); } public void insertUser(User user) { usersCollection.insert(user.toDBObject()); } } <commit_after>package awesomefb; import com.mongodb.*; import java.net.UnknownHostException; /** * Created by earl on 5/25/2015. */ public class DatabaseManager { private static DatabaseManager instance = null; private DB database; private DBCollection postsCollection; private DBCollection usersCollection; public static DatabaseManager getInstance() { if (instance == null) { try { MongoClient mongoClient = new MongoClient(); DB db = mongoClient.getDB("awesomefb"); instance = new DatabaseManager(db); } catch (UnknownHostException e) { System.out.println(e.toString()); } } return instance; } protected DatabaseManager(DB database) { this.database = database; postsCollection = this.database.getCollection("posts"); usersCollection = this.database.getCollection("users"); postsCollection.drop(); usersCollection.drop(); } public void insertPost(Post post) { postsCollection.insert(post.toDBObject()); } public void insertUser(User user) { String facebookId = user.getFacebookId(); usersCollection.update(new BasicDBObject("fb_id", facebookId), user.toDBObject(), true, false); } }
<commit_msg>Add complete field to task and timesheet api <commit_before>from __future__ import unicode_literals from django.contrib.auth.models import User from rest_framework import serializers from core.models import Timesheet, Task, Entry class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id', 'url', 'username',) class TimesheetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Timesheet fields = ('id', 'url', 'name',) class TaskSerializer(serializers.HyperlinkedModelSerializer): timesheet_details = TimesheetSerializer(source='timesheet', read_only=True) class Meta: model = Task fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',) class EntrySerializer(serializers.HyperlinkedModelSerializer): task_details = TaskSerializer(source='task', read_only=True) user_details = UserSerializer(source='user', read_only=True) class Meta: model = Entry fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details', 'date', 'duration', 'note',) <commit_after>from __future__ import unicode_literals from django.contrib.auth.models import User from rest_framework import serializers from core.models import Timesheet, Task, Entry class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id', 'url', 'username',) class TimesheetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Timesheet fields = ('id', 'url', 'name', 'complete',) class TaskSerializer(serializers.HyperlinkedModelSerializer): timesheet_details = TimesheetSerializer(source='timesheet', read_only=True) class Meta: model = Task fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name', 'complete',) class EntrySerializer(serializers.HyperlinkedModelSerializer): task_details = TaskSerializer(source='task', read_only=True) user_details = UserSerializer(source='user', read_only=True) class Meta: model = Entry fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details', 'date', 'duration', 'note',)
<commit_msg>Add eventID to Reparent event for grouping. <commit_before>// Package events defines the structures used for events dispatched from the // wrangler package. package events import ( "github.com/youtube/vitess/go/event" "github.com/youtube/vitess/go/vt/topo" ) // Reparent is an event that describes a single step in the reparent process. type Reparent struct { ShardInfo topo.ShardInfo OldMaster, NewMaster topo.Tablet Status string } // UpdateStatus sets a new status and then dispatches the event. func (r *Reparent) UpdateStatus(status string) { r.Status = status // make a copy since we're calling Dispatch asynchronously ev := *r go event.Dispatch(&ev) } <commit_after>// Package events defines the structures used for events dispatched from the // wrangler package. package events import ( "time" "github.com/youtube/vitess/go/event" "github.com/youtube/vitess/go/vt/topo" ) // Reparent is an event that describes a single step in the reparent process. type Reparent struct { ShardInfo topo.ShardInfo OldMaster, NewMaster topo.Tablet Status string // eventID is used to group the steps of a single reparent in progress. // It is set internally the first time UpdateStatus() is called. eventID int64 } // UpdateStatus sets a new status and then dispatches the event. func (r *Reparent) UpdateStatus(status string) { r.Status = status // initialize event ID if r.eventID == 0 { r.eventID = time.Now().UnixNano() } // make a copy since we're calling Dispatch asynchronously ev := *r go event.Dispatch(&ev) }
<commit_msg>Adjust the order to reduce latency <commit_before>import logmodule from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView def PatchFn(fn): def wrapper(self): try: br = sm.GetService('fleet').GetBroadcastHistory()[0] logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE) if br.name in ("Target", "HealArmor", "HealShield"): sm.GetService('target').TryLockTarget(br.itemID) except: pass return fn(self) return wrapper def RunPatch(): FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory) logmodule.general.Log("Code Injected", logmodule.LGNOTICE) <commit_after>import logmodule from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView def PatchFn(fn): def wrapper(self): ret = fn(self) try: br = sm.GetService('fleet').GetBroadcastHistory()[0] logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE) if br.name in ("Target", "HealArmor", "HealShield"): sm.GetService('target').TryLockTarget(br.itemID) except: pass return ret return wrapper def RunPatch(): FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory) logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
<commit_msg>Add exception for empty db search return <commit_before> class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass <commit_after> class ConfigException(Exception): pass class FillValueException(Exception): """ All of a tile is "fill" values """ pass class ProductNotFoundException(Exception): pass
<commit_msg>Remove bad torrents assumes torrent_to_search_string already ran. <commit_before>from providers.popularity.provider import PopularityProvider from utils.torrent_util import torrent_to_search_string, remove_bad_torrent_matches IDENTIFIER = "Torrentz" class Provider(PopularityProvider): PAGES_TO_FETCH = 1 def get_popular(self): results = [] for page in range(Provider.PAGES_TO_FETCH): terms = ["movies", "hd", "-xxx", "-porn"] url = "https://torrentz.eu/search?q=%s&p=%s" % ( "+".join(terms), page ) results += self.parse_html(url, ".results dt a") results = remove_bad_torrent_matches(results) results = [torrent_to_search_string(name) for name in results] return results <commit_after>from providers.popularity.provider import PopularityProvider from utils.torrent_util import torrent_to_search_string, remove_bad_torrent_matches IDENTIFIER = "Torrentz" class Provider(PopularityProvider): PAGES_TO_FETCH = 1 def get_popular(self): results = [] for page in range(Provider.PAGES_TO_FETCH): terms = ["movies", "hd", "-xxx", "-porn"] url = "https://torrentz.eu/search?q=%s&p=%s" % ( "+".join(terms), page ) results += self.parse_html(url, ".results dt a") results = [torrent_to_search_string(name) for name in results] results = remove_bad_torrent_matches(results) return results
<commit_msg>Handle 404 in get requests <commit_before>package main import ( "fmt" "log" "net/http" "github.com/julienschmidt/httprouter" ) var data map[string]string = map[string]string{} func Get(w http.ResponseWriter, r *http.Request, params httprouter.Params) { key := params.ByName("key") fmt.Fprint(w, data[key]) } func Put(w http.ResponseWriter, r *http.Request, params httprouter.Params) { key := params.ByName("key") value := r.FormValue("data") data[key] = value } func main() { router := httprouter.New() router.GET("/:key", Get) router.POST("/:key", Put) log.Println("Running server on port 3000") log.Fatal(http.ListenAndServe(":3000", router)) } <commit_after>package main import ( "fmt" "log" "net/http" "github.com/julienschmidt/httprouter" ) var data map[string]string = map[string]string{} func Get(w http.ResponseWriter, r *http.Request, params httprouter.Params) { key := params.ByName("key") value, ok := data[key] if !ok { w.Header().Set("Status", "404") fmt.Fprint(w, "Not found") return } else { fmt.Fprint(w, value) } } func Put(w http.ResponseWriter, r *http.Request, params httprouter.Params) { key := params.ByName("key") value := r.FormValue("data") data[key] = value } func main() { router := httprouter.New() router.GET("/:key", Get) router.POST("/:key", Put) log.Println("Running server on port 3000") log.Fatal(http.ListenAndServe(":3000", router)) }
<commit_msg>Call base class activation routine. git-svn-id: 9e2532540f1574e817ce42f20b9d0fb64899e451@198 4068ffdb-0463-0410-8185-8cc71e3bd399 <commit_before> WANProxyConfigClassInterface wanproxy_config_class_interface; bool WANProxyConfigClassInterface::activate(ConfigObject *) { /* Eventually would like to do something more useful here. */ return (true); } <commit_after> WANProxyConfigClassInterface wanproxy_config_class_interface; bool WANProxyConfigClassInterface::activate(ConfigObject *co) { if (!ConfigClassAddress::activate(co)) return (false); /* Eventually would like to do something more useful here. */ return (true); }
<commit_msg>Revert to previous way of setting 'authenticate' This reverts commit a055f97a342f670171f30095cabfd4ba1bfdad17. This reverts commit 4cec5250a3f9058fea5af5ef432a5b230ca94963. <commit_before>import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] if 'ACCESS_KEY' in os.environ: # If OAuth integration is available, take it authenticate.setdefault(fam, {})['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] ) del fam <commit_after>import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path # Things that should be non-easily-overridable for fam in ( 'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia', 'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity', 'wikidata', 'mediawiki' ): usernames[fam]['*'] = os.environ['USER'] del fam # If OAuth integration is available, take it if 'CLIENT_ID' in os.environ: authenticate['*'] = ( os.environ['CLIENT_ID'], os.environ['CLIENT_SECRET'], os.environ['ACCESS_KEY'], os.environ['ACCESS_SECRET'] )
<commit_msg>Fix T41937: NPE when render report with large line height and letter spacing styles in item <commit_before>/*********************************************************************** * Copyright (c) 2009 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation ***********************************************************************/ package org.eclipse.birt.report.engine.nLayout.area.impl; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.report.engine.nLayout.area.IContainerArea; public class HtmlRegionArea extends RegionArea implements IContainerArea { public HtmlRegionArea( ) { super( ); } HtmlRegionArea( HtmlRegionArea area ) { super( area ); } public void close( ) throws BirtException { if ( specifiedHeight >= currentBP ) { finished = true; } else { finished = false; } setContentHeight( specifiedHeight ); checkDisplayNone( ); } public void update( AbstractArea area ) throws BirtException { int aHeight = area.getAllocatedHeight( ); currentBP += aHeight; if ( currentIP + area.getAllocatedWidth( ) > maxAvaWidth ) { setNeedClip( true ); } } public boolean isFinished( ) { return finished; } } <commit_after>/*********************************************************************** * Copyright (c) 2009 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation ***********************************************************************/ package org.eclipse.birt.report.engine.nLayout.area.impl; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.report.engine.nLayout.area.IContainerArea; public class HtmlRegionArea extends RegionArea implements IContainerArea { public HtmlRegionArea( ) { super( ); } HtmlRegionArea( HtmlRegionArea area ) { super( area ); } public void close( ) throws BirtException { if ( specifiedHeight >= currentBP ) { finished = true; } else { finished = false; } setContentHeight( specifiedHeight ); } public void update( AbstractArea area ) throws BirtException { int aHeight = area.getAllocatedHeight( ); currentBP += aHeight; if ( currentIP + area.getAllocatedWidth( ) > maxAvaWidth ) { setNeedClip( true ); } } public boolean isFinished( ) { return finished; } }
<commit_msg>Make ilock lock name user specific <commit_before>import pytest import logging import multiprocessing as mp from oggm import cfg, utils import pickle logger = logging.getLogger(__name__) def pytest_configure(config): if config.pluginmanager.hasplugin('xdist'): try: from ilock import ILock utils.lock = ILock("oggm_xdist_download_lock") logger.info("ilock locking setup successfully for xdist tests") except: logger.warning("could not setup ilock locking for distributed tests") <commit_after>import pytest import logging import getpass from oggm import cfg, utils logger = logging.getLogger(__name__) def pytest_configure(config): if config.pluginmanager.hasplugin('xdist'): try: from ilock import ILock utils.lock = ILock("oggm_xdist_download_lock_" + getpass.getuser()) logger.info("ilock locking setup successfully for xdist tests") except: logger.warning("could not setup ilock locking for distributed tests")
<commit_msg>Create utility function for recursive obtaining directory contents.<commit_before>module Test.Haddock.Utils where import Data.Maybe mlast :: [a] -> Maybe a mlast = listToMaybe . reverse <commit_after>module Test.Haddock.Utils where import Control.Monad import Data.Maybe import System.Directory import System.FilePath mlast :: [a] -> Maybe a mlast = listToMaybe . reverse partitionM :: Monad m => (a -> m Bool) -> [a] -> m ([a], [a]) partitionM _ [] = pure ([], []) partitionM p (x:xs) = do (ss, fs) <- partitionM p xs b <- p x pure $ if b then (x:ss, fs) else (ss, x:fs) getDirectoryTree :: FilePath -> IO [FilePath] getDirectoryTree path = do (dirs, files) <- partitionM isDirectory =<< contents subfiles <- fmap concat . forM dirs $ \dir -> map (dir </>) <$> getDirectoryTree (path </> dir) pure $ files ++ subfiles where contents = filter realEntry <$> getDirectoryContents path isDirectory entry = doesDirectoryExist $ path </> entry realEntry entry = not $ entry == "." || entry == ".."
<commit_msg>Allow disabling password and token auth on jupyter notebooks<commit_before>import os from IPython.lib import passwd c.NotebookApp.ip = '*' c.NotebookApp.port = int(os.getenv('PORT', 8888)) c.NotebookApp.open_browser = False c.MultiKernelManager.default_kernel_name = 'python2' # sets a password if PASSWORD is set in the environment if 'PASSWORD' in os.environ: c.NotebookApp.password = passwd(os.environ['PASSWORD']) del os.environ['PASSWORD'] <commit_after>import os from IPython.lib import passwd c.NotebookApp.ip = '*' c.NotebookApp.port = int(os.getenv('PORT', 8888)) c.NotebookApp.open_browser = False c.MultiKernelManager.default_kernel_name = 'python2' # sets a password if PASSWORD is set in the environment if 'PASSWORD' in os.environ: password = os.environ['PASSWORD'] if password: c.NotebookApp.password = passwd(password) else: c.NotebookApp.password = '' c.NotebookApp.token = '' del os.environ['PASSWORD']
<commit_msg>Add in field for making a `related` field on objects. Big thanks to @coleifer for his [django-generic-m2m][] project, we've got a field for adding `related` to other objects with minimal new code. [django-generic-m2m]: http://github.com/coleifer/django-generic-m2m <commit_before>from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from . import managers class RelatedType(models.Model): title = models.CharField(max_length=100) def __unicode__(self): return self.title class RelatedContent(models.Model): related_type = models.ForeignKey(RelatedType) order = models.IntegerField(default=0) source_type = models.ForeignKey(ContentType, related_name="from") source_id = models.PositiveIntegerField() source_object = generic.GenericForeignKey('source_type', 'source_id') destination_type = models.ForeignKey(ContentType, related_name="to") destination_id = models.PositiveIntegerField() destination_object = generic.GenericForeignKey('destination_type', 'destination_id') objects = managers.RelatedContentManager() class Meta: ordering = ["order"] def __unicode__(self): return u"%s (%d): %s" % (self.related_type, self.order, self.destination_object) <commit_after>from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from genericm2m.models import RelatedObjectsDescriptor from . import managers class RelatedObjectsField(RelatedObjectsDescriptor): def __init__(self, model=None, from_field="source_object", to_field="destination_object"): if not model: model = RelatedContent super(RelatedObjectsField, self).__init__(model, from_field, to_field) class RelatedType(models.Model): title = models.CharField(max_length=100) def __unicode__(self): return self.title class RelatedContent(models.Model): related_type = models.ForeignKey(RelatedType) order = models.IntegerField(default=0) source_type = models.ForeignKey(ContentType, related_name="from") source_id = models.PositiveIntegerField() source_object = generic.GenericForeignKey('source_type', 'source_id') destination_type = models.ForeignKey(ContentType, related_name="to") destination_id = models.PositiveIntegerField() destination_object = generic.GenericForeignKey('destination_type', 'destination_id') objects = managers.RelatedContentManager() class Meta: ordering = ["order"] def __unicode__(self): return u"%s (%d): %s" % (self.related_type, self.order, self.destination_object)
<commit_msg>Store Engine by value on Windows and keep argv in unique_ptr <commit_before>// Copyright 2015-2020 Elviss Strazdins. All rights reserved. #include <cstdlib> #include <shellapi.h> #include "SystemWin.hpp" #include "EngineWin.hpp" #include "../../utils/Log.hpp" int WINAPI WinMain(_In_ HINSTANCE, _In_opt_ HINSTANCE, _In_ LPSTR, _In_ int) { try { int argc; LPWSTR* argv = CommandLineToArgvW(GetCommandLineW(), &argc); auto engine = std::make_unique<ouzel::core::windows::Engine>(argc, argv); if (argv) LocalFree(argv); engine->run(); engine.reset(); // must release engine instance before exit on Windows return EXIT_SUCCESS; } catch (const std::exception& e) { ouzel::logger.log(ouzel::Log::Level::error) << e.what(); return EXIT_FAILURE; } } namespace ouzel::core::windows { } <commit_after>// Copyright 2015-2020 Elviss Strazdins. All rights reserved. #include <cstdlib> #include <shellapi.h> #include "SystemWin.hpp" #include "EngineWin.hpp" #include "../../utils/Log.hpp" int WINAPI WinMain(_In_ HINSTANCE, _In_opt_ HINSTANCE, _In_ LPSTR, _In_ int) { try { int argc; std::unique_ptr<LPWSTR, decltype(&LocalFree)> argv(CommandLineToArgvW(GetCommandLineW(), &argc), LocalFree); ouzel::core::windows::Engine engine(argc, argv.get()); engine.run(); return EXIT_SUCCESS; } catch (const std::exception& e) { ouzel::logger.log(ouzel::Log::Level::error) << e.what(); return EXIT_FAILURE; } } namespace ouzel::core::windows { System::System(int argc, LPWSTR* argv) { } }
<commit_msg>Use new exceptions: AbstractMethodError, AbstractPropertyError <commit_before> class SyncBackendMediaInterface: """ This interface defines required properties and methods of objects passed to deployment back-end class on media synchronization. """ @property def backend_data_value(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') @property def backend_uniqid(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') def delete(self, **kwargs): raise NotImplementedError('This method should be implemented in ' 'subclasses') @property def deleted_at(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') @property def filename(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') @property def hash(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') @property def is_remote_url(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') @property def mimetype(self): raise NotImplementedError('This property should be implemented in ' 'subclasses') <commit_after>from kpi.exceptions import AbstractMethodError, AbstractPropertyError class SyncBackendMediaInterface: """ This interface defines required properties and methods of objects passed to deployment back-end class on media synchronization. """ @property def backend_data_value(self): raise AbstractPropertyError @property def backend_uniqid(self): raise AbstractPropertyError def delete(self, **kwargs): raise AbstractMethodError @property def deleted_at(self): raise AbstractPropertyError @property def filename(self): raise AbstractPropertyError @property def hash(self): raise AbstractPropertyError @property def is_remote_url(self): raise AbstractPropertyError @property def mimetype(self): raise AbstractPropertyError
<commit_msg>Check for async_lru when importing asynchronous subpackage <commit_before> try: import aiohttp import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp and oauthlib to be installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient <commit_after> try: import aiohttp import async_lru import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be " "installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient
<commit_msg>Use passed directory in mako engine. <commit_before>"""Provide the mako templating engine.""" from __future__ import print_function from mako.template import Template from mako.lookup import TemplateLookup from . import Engine class MakoEngine(Engine): """Mako templating engine.""" handle = 'mako' def __init__(self, template, tolerant=False, **kwargs): """Initialize mako template.""" super(MakoEngine, self).__init__(**kwargs) default_filters = ['filter_undefined'] if tolerant else None encoding_errors = 'replace' if tolerant else 'strict' imports = ['def filter_undefined(value):\n' ' if value is UNDEFINED:\n' ' return \'<UNDEFINED>\'\n' ' return value\n'] lookup = TemplateLookup(directories=['.']) self.template = Template(template, default_filters=default_filters, encoding_errors=encoding_errors, imports=imports, lookup=lookup, strict_undefined=not tolerant, ) def apply(self, mapping): """Apply a mapping of name-value-pairs to a template.""" return self.template.render(**mapping) <commit_after>"""Provide the mako templating engine.""" from __future__ import print_function from mako.template import Template from mako.lookup import TemplateLookup from . import Engine class MakoEngine(Engine): """Mako templating engine.""" handle = 'mako' def __init__(self, template, dirname=None, tolerant=False, **kwargs): """Initialize mako template.""" super(MakoEngine, self).__init__(**kwargs) directories = [dirname] if dirname is not None else ['.'] lookup = TemplateLookup(directories=directories) default_filters = ['filter_undefined'] if tolerant else None encoding_errors = 'replace' if tolerant else 'strict' imports = ['def filter_undefined(value):\n' ' if value is UNDEFINED:\n' ' return \'<UNDEFINED>\'\n' ' return value\n'] self.template = Template(template, default_filters=default_filters, encoding_errors=encoding_errors, imports=imports, lookup=lookup, strict_undefined=not tolerant, ) def apply(self, mapping): """Apply a mapping of name-value-pairs to a template.""" return self.template.render(**mapping)
<commit_msg>Use importlib instead of exec (exec was pretty ugly) <commit_before> import argparse import sys from mawslib.manager import Manager configfile="cloudconfig.yaml" parser = argparse.ArgumentParser( #add_help=False, description='AWS Manager', usage='''maws [<options>] <command> <subcommand> [<args>] For help: maws help maws <command> help maws <command> <subcommand> help ''') parser.add_argument('command', help='Command to run', choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds', 'cloudformation', 'cfn' ]) parser.add_argument('--config', help='alternate config file to use (default: cloudconfig.yaml)', action="store") # parse_args defaults to [1:] for args, but you need to # exclude the rest of the args too, or validation will fail args, subargs = parser.parse_known_args() if hasattr(args, "config"): configfile = args.config mgr = Manager(configfile) mgr.showname() if args.command == "cfn": args.command = "cloudformation" if args.command == "r53": args.command = "route53" exec("from cli.%s_cli import processCommand" % args.command) processCommand(mgr, subargs) <commit_after> import argparse import sys from mawslib.manager import Manager import importlib configfile="cloudconfig.yaml" parser = argparse.ArgumentParser( #add_help=False, description='AWS Manager', usage='''maws [<options>] <command> <subcommand> [<args>] For help: maws help maws <command> help maws <command> <subcommand> help ''') parser.add_argument('command', help='Command to run', choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds', 'cloudformation', 'cfn' ]) parser.add_argument('--config', help='alternate config file to use (default: cloudconfig.yaml)', action="store") # parse_args defaults to [1:] for args, but you need to # exclude the rest of the args too, or validation will fail args, subargs = parser.parse_known_args() if hasattr(args, "config"): configfile = args.config mgr = Manager(configfile) mgr.showname() if args.command == "cfn": args.command = "cloudformation" if args.command == "r53": args.command = "route53" cli_mod = importlib.import_module("cli.%s_cli" % args.command) cli_mod.processCommand(mgr, subargs)
<commit_msg>local: Add default config and config schema <commit_before>from __future__ import unicode_literals import mopidy from mopidy import ext __doc__ = """A backend for playing music from a local music archive. This backend handles URIs starting with ``file:``. See :ref:`music-from-local-storage` for further instructions on using this backend. **Issues:** https://github.com/mopidy/mopidy/issues?labels=Local+backend **Dependencies:** - None **Settings:** - :attr:`mopidy.settings.LOCAL_MUSIC_PATH` - :attr:`mopidy.settings.LOCAL_PLAYLIST_PATH` - :attr:`mopidy.settings.LOCAL_TAG_CACHE_FILE` """ class Extension(ext.Extension): name = 'Mopidy-Local' version = mopidy.__version__ def get_default_config(self): return '[ext.local]' def validate_config(self, config): pass def validate_environment(self): pass def get_backend_classes(self): from .actor import LocalBackend return [LocalBackend] <commit_after>from __future__ import unicode_literals import mopidy from mopidy import ext from mopidy.utils import config, formatting default_config = """ [ext.local] # If the local extension should be enabled or not enabled = true # Path to folder with local music music_path = $XDG_MUSIC_DIR # Path to playlist folder with m3u files for local music playlist_path = $XDG_DATA_DIR/mopidy/playlists # Path to tag cache for local music tag_cache_file = $XDG_DATA_DIR/mopidy/tag_cache """ __doc__ = """A backend for playing music from a local music archive. This backend handles URIs starting with ``file:``. See :ref:`music-from-local-storage` for further instructions on using this backend. **Issues:** https://github.com/mopidy/mopidy/issues?labels=Local+backend **Dependencies:** - None **Default config:** .. code-block:: ini %(config)s """ % {'config': formatting.indent(default_config)} class Extension(ext.Extension): name = 'Mopidy-Local' version = mopidy.__version__ def get_default_config(self): return default_config def get_config_schema(self): schema = config.ExtensionConfigSchema() schema['music_path'] = config.String() schema['playlist_path'] = config.String() schema['tag_cache_file'] = config.String() def validate_environment(self): pass def get_backend_classes(self): from .actor import LocalBackend return [LocalBackend]
<commit_msg>Change test UserFactory model to point to User <commit_before>from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.ImagerProfile' django_get_or_create = ('username',) username = 'John' <commit_after>from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.User' django_get_or_create = ('username',) username = factory.Sequence(lambda n: "Agent %03d" % n)
<commit_msg>Fix cert retreival on python 3 <commit_before>import os import urllib TRUSTED_ROOT_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer" ) SECRET_KEY = "notsecr3t" IAP_SETTINGS = { "TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE, "PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations", } if not os.path.isfile(TRUSTED_ROOT_FILE): trusted_root_data = urllib.urlretrieve( "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE ) <commit_after>import os import urllib TRUSTED_ROOT_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer" ) SECRET_KEY = "notsecr3t" IAP_SETTINGS = { "TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE, "PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations", } if not os.path.isfile(TRUSTED_ROOT_FILE): try: trusted_root_data = urllib.urlretrieve( "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE, ) except AttributeError: # Python 3 trusted_root_data = urllib.request.urlretrieve( "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE, )
<commit_msg>Implement unquote special values in newMatcher <commit_before>package main import ( "regexp" "strings" ) var ( branches = regexp.MustCompile(`(?:[^,\\]|\\.)*`) ) func newMatcher(pat string) (*regexp.Regexp, error) { sp := branches.FindAllString(pat, -1) for i := 0; i < len(sp); i++ { sp[i] = strings.Replace(sp[i], `\,`, `,`, -1) sp[i] = regexp.QuoteMeta(sp[i]) } pat = "(" + strings.Join(sp, "|") + ")" return regexp.Compile(pat) } <commit_after>package main import ( "regexp" "strconv" "strings" ) var ( branches = regexp.MustCompile(`(?:[^,\\]|\\.)*`) ) func newMatcher(pat string) (m *regexp.Regexp, err error) { pat = strings.Replace(pat, `\,`, `\\,`, -1) pat = `"` + pat + `"` pat, err = strconv.Unquote(pat) if err != nil { return nil, err } sp := branches.FindAllString(pat, -1) for i := 0; i < len(sp); i++ { sp[i] = strings.Replace(sp[i], `\,`, `,`, -1) sp[i] = regexp.QuoteMeta(sp[i]) } pat = "(" + strings.Join(sp, "|") + ")" return regexp.Compile(pat) }
<commit_msg>Use the GWT date/time formatter. <commit_before>package edu.pdx.cs410J.whitlock.client; import edu.pdx.cs410J.AbstractAppointment; import java.util.Date; public class Appointment extends AbstractAppointment { @Override public String getBeginTimeString() { return "START " + getBeginTime(); } @Override public String getEndTimeString() { return "END + " + getEndTime(); } @Override public Date getEndTime() { return new Date(); } @Override public String getDescription() { return "My description"; } @Override public Date getBeginTime() { return new Date(); } } <commit_after>package edu.pdx.cs410J.whitlock.client; import com.google.gwt.i18n.client.DateTimeFormat; import edu.pdx.cs410J.AbstractAppointment; import java.util.Date; public class Appointment extends AbstractAppointment { @Override public String getBeginTimeString() { return "START " + getBeginTime(); } @Override public String getEndTimeString() { return "END + " + formatDate(getEndTime()); } private String formatDate(Date date) { String pattern = "yyyy/MM/dd hh:mm a"; return DateTimeFormat.getFormat(pattern).format(date); } @Override public Date getEndTime() { return new Date(); } @Override public String getDescription() { return "My description"; } @Override public Date getBeginTime() { return new Date(); } }
<commit_msg>Add all mods to compilation <commit_before> extern crate bosun_emitter; #[macro_use] extern crate chan; extern crate chan_signal; #[macro_use] extern crate log; extern crate env_logger; extern crate rustc_serialize; extern crate toml; pub mod config; <commit_after> extern crate bosun_emitter; #[macro_use] extern crate chan; extern crate chan_signal; #[macro_use] extern crate log; extern crate env_logger; extern crate rustc_serialize; extern crate toml; pub mod bosun; pub mod collectors; pub mod config; pub mod scheduler;
<commit_msg>Update factory registers with new classes. <commit_before>from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') <commit_after>from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
<commit_msg>Remove print of terminal output for debugging <commit_before> import pyqrcode # sudo pip install pyqrcode def getQRArray(text, errorCorrection): """ Takes in text and errorCorrection (letter), returns 2D array of the QR code""" # White is True (1) # Black is False (0) # ECC: L7, M15, Q25, H30 # Create the object qr = pyqrcode.create(text, error=errorCorrection) # Get the terminal representation and split by lines (get rid of top and bottom white spaces) plainOut = qr.terminal().split("\n")[5:-5] print(qr.terminal()) # Initialize the output 2D list out = [] for line in plainOut: thisOut = [] for char in line: if char == u'7': # This is white thisOut.append(1) elif char == u'4': # This is black, it's part of the u'49' thisOut.append(0) # Finally add everything to the output, stipping whitespaces at start and end out.append(thisOut[4:-4]) # Everything is done, return the qr code list return out<commit_after> import pyqrcode # sudo pip install pyqrcode def getQRArray(text, errorCorrection): """ Takes in text and errorCorrection (letter), returns 2D array of the QR code""" # White is True (1) # Black is False (0) # ECC: L7, M15, Q25, H30 # Create the object qr = pyqrcode.create(text, error=errorCorrection) # Get the terminal representation and split by lines (get rid of top and bottom white spaces) plainOut = qr.terminal().split("\n")[5:-5] # Initialize the output 2D list out = [] for line in plainOut: thisOut = [] for char in line: if char == u'7': # This is white thisOut.append(1) elif char == u'4': # This is black, it's part of the u'49' thisOut.append(0) # Finally add everything to the output, stipping whitespaces at start and end out.append(thisOut[4:-4]) # Everything is done, return the qr code list return out
<commit_msg>Add arrays to optimization test file <commit_before>class Opt { public static void main(String[] args) { System.out.println(new Derp().foo()); } } class Derp { public int foo() { int x; boolean b; x = 1; b = false && true; if (b) { x = 2; } else { x = 3 + 2 + 3 * 2; } System.out.println(x); return x; } } <commit_after>class Opt { public static void main(String[] args) { System.out.println(new Derp().foo()); } } class Derp { public int foo() { int x; boolean b; int i; int[] array; x = 1; b = false && true; if (b) { x = 2; } else { x = 3 + 2 + 3 * 2; } System.out.println(x); i = 0; array = new int[15 + 5]; while (i < 5) { array[2 + 2] = 20; System.out.println(array[2 * 1 + 2]); i = i + 1; } return x; } }
<commit_msg>cortex-m: Use PSP for all threads <commit_before>/** * @file * @brief * * @author Anton Kozlov * @date 25.10.2012 */ #include <stdint.h> #include <stdbool.h> #include <stddef.h> #include <assert.h> #include <hal/context.h> #include <asm/modes.h> #include <arm/fpu.h> /* In the RVCT v2.0 and above, all generated code and C library code * will maintain eight-byte stack alignment on external interfaces. */ #define ARM_SP_ALIGNMENT 8 void context_init(struct context *ctx, unsigned int flags, void (*routine_fn)(void), void *sp) { ctx->lr = (uint32_t) routine_fn; ctx->sp = (uint32_t) sp; assertf(((uint32_t) sp % ARM_SP_ALIGNMENT) == 0, "Stack pointer is not aligned to 8 bytes.\n" "Firstly please make sure the thread stack size is aligned to 8 bytes" ); ctx->control = 0; if (!(flags & CONTEXT_PRIVELEGED)) { ctx->control |= CONTROL_NPRIV; } arm_fpu_context_init(&ctx->fpu_data); } <commit_after>/** * @file * @brief * * @author Anton Kozlov * @date 25.10.2012 */ #include <stdint.h> #include <stdbool.h> #include <stddef.h> #include <assert.h> #include <hal/context.h> #include <asm/modes.h> #include <arm/fpu.h> /* In the RVCT v2.0 and above, all generated code and C library code * will maintain eight-byte stack alignment on external interfaces. */ #define ARM_SP_ALIGNMENT 8 void context_init(struct context *ctx, unsigned int flags, void (*routine_fn)(void), void *sp) { ctx->lr = (uint32_t) routine_fn; ctx->sp = (uint32_t) sp; assertf(((uint32_t) sp % ARM_SP_ALIGNMENT) == 0, "Stack pointer is not aligned to 8 bytes.\n" "Firstly please make sure the thread stack size is aligned to 8 bytes" ); ctx->control = CONTROL_SPSEL_PSP; if (!(flags & CONTEXT_PRIVELEGED)) { ctx->control |= CONTROL_NPRIV; } arm_fpu_context_init(&ctx->fpu_data); }
<commit_msg>Make long description not point to Travis since can't guarantee tag <commit_before>from __future__ import absolute_import from setuptools import setup long_description="""TravisCI results .. image:: https://travis-ci.org/nanonyme/simplecpreprocessor.svg """ setup( name = "simplecpreprocessor", author = "Seppo Yli-Olli", author_email = "seppo.yli-olli@iki.fi", description = "Simple C preprocessor for usage eg before CFFI", keywords = "python c preprocessor", license = "BSD", url = "https://github.com/nanonyme/simplecpreprocessor", py_modules=["simplecpreprocessor"], long_description=long_description, use_scm_version=True, setup_requires=["setuptools_scm"], classifiers=[ "Development Status :: 4 - Beta", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], ) <commit_after>from __future__ import absolute_import from setuptools import setup long_description="""http://github.com/nanonyme/simplepreprocessor""" setup( name = "simplecpreprocessor", author = "Seppo Yli-Olli", author_email = "seppo.yli-olli@iki.fi", description = "Simple C preprocessor for usage eg before CFFI", keywords = "python c preprocessor", license = "BSD", url = "https://github.com/nanonyme/simplecpreprocessor", py_modules=["simplecpreprocessor"], long_description=long_description, use_scm_version=True, setup_requires=["setuptools_scm"], classifiers=[ "Development Status :: 4 - Beta", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], )
<commit_msg>Define frames per second in floating point. git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@1163 542714f4-19e9-0310-aa3c-eee0fc999fb1 <commit_before>// // $Id: ActionSequence.java,v 1.2 2001/11/27 08:09:34 mdb Exp $ package com.threerings.cast; import java.awt.Point; import java.io.Serializable; /** * The action sequence class describes a particular character animation * sequence. An animation sequence consists of one or more frames of * animation, renders at a particular frame rate, and has an origin point * that specifies the location of the base of the character in relation to * the bounds of the animation images. */ public class ActionSequence implements Serializable { /** The action sequence name. */ public String name; /** The number of frames per second to show when animating. */ public int framesPerSecond; /** The position of the character's base for this sequence. */ public Point origin = new Point(); /** * Returns a string representation of this action sequence. */ public String toString () { return "[name=" + name + ", framesPerSecond=" + framesPerSecond + ", origin=" + origin + "]"; } } <commit_after>// // $Id: ActionSequence.java,v 1.3 2002/03/27 21:51:33 mdb Exp $ package com.threerings.cast; import java.awt.Point; import java.io.Serializable; /** * The action sequence class describes a particular character animation * sequence. An animation sequence consists of one or more frames of * animation, renders at a particular frame rate, and has an origin point * that specifies the location of the base of the character in relation to * the bounds of the animation images. */ public class ActionSequence implements Serializable { /** The action sequence name. */ public String name; /** The number of frames per second to show when animating. */ public float framesPerSecond; /** The position of the character's base for this sequence. */ public Point origin = new Point(); /** * Returns a string representation of this action sequence. */ public String toString () { return "[name=" + name + ", framesPerSecond=" + framesPerSecond + ", origin=" + origin + "]"; } }
<commit_msg>Improve verbose name for assets <commit_before> from django.apps import AppConfig class GlitterBasicAssetsConfig(AppConfig): name = 'glitter.assets' label = 'glitter_assets' def ready(self): super(GlitterBasicAssetsConfig, self).ready() from . import listeners # noqa <commit_after> from django.apps import AppConfig class GlitterBasicAssetsConfig(AppConfig): name = 'glitter.assets' label = 'glitter_assets' verbose_name = 'Assets' def ready(self): super(GlitterBasicAssetsConfig, self).ready() from . import listeners # noqa
<commit_msg>Remove outdated parameter from docstring <commit_before>"""The Adventure game.""" def load_advent_dat(data): import os from .data import parse datapath = os.path.join(os.path.dirname(__file__), 'advent.dat') with open(datapath, 'r', encoding='ascii') as datafile: parse(data, datafile) def play(seed=None): """Turn the Python prompt into an Adventure game. With `seed` the caller can supply an integer to start the random number generator at a known state. When `quiet` is true, no output is printed as the game is played; the caller of a command has to manually check `_game.output` for the result, which makes it possible to write very quiet tests. """ global _game from .game import Game from .prompt import install_words _game = Game(seed) load_advent_dat(_game) install_words(_game) _game.start() print(_game.output[:-1]) def resume(savefile, quiet=False): global _game from .game import Game from .prompt import install_words _game = Game.resume(savefile) install_words(_game) if not quiet: print('GAME RESTORED\n') <commit_after>"""The Adventure game.""" def load_advent_dat(data): import os from .data import parse datapath = os.path.join(os.path.dirname(__file__), 'advent.dat') with open(datapath, 'r', encoding='ascii') as datafile: parse(data, datafile) def play(seed=None): """Turn the Python prompt into an Adventure game. With optional the `seed` argument the caller can supply an integer to start the Python random number generator at a known state. """ global _game from .game import Game from .prompt import install_words _game = Game(seed) load_advent_dat(_game) install_words(_game) _game.start() print(_game.output[:-1]) def resume(savefile, quiet=False): global _game from .game import Game from .prompt import install_words _game = Game.resume(savefile) install_words(_game) if not quiet: print('GAME RESTORED\n')
<commit_msg>Support negative numbers in qtcreator debugging <commit_before>from dumper import * def qdump__FixedPoint(d, value): d.putNumChild(3) raw = [ value["v"]["s"][i].integer() for i in range( value["v"]["numWords"].integer() ) ] ss = value["v"]["storageSize"].integer() exp = [raw[i] * 2**(i * ss) for i in range(len(raw)) ] d.putValue(sum(exp) * 2**-value["fractionalWidth"].integer()) if d.isExpanded(): with Children(d): d.putSubItem("fractionalWidth", value["fractionalWidth"]) d.putSubItem("integerWidth", value["integerWidth"]) d.putSubItem("v", value["v"]) def qdump__MultiwordInteger(d, value): d.putNumChild(3) raw = [ value["s"][i].integer() for i in range( value["numWords"].integer() ) ] exp = [ raw[i] * 2**(i * value["storageSize"].integer()) for i in range(len(raw)) ] d.putValue(sum(exp)) if d.isExpanded(): with Children(d): d.putSubItem("numWords", value["numWords"]) d.putSubItem("storageSize", value["storageSize"]) d.putSubItem("s", value["s"]) <commit_after>from dumper import * def qdump__FixedPoint(d, value): d.putNumChild(3) raw = [ value["v"]["s"][i].integer() for i in range( value["v"]["numWords"].integer() ) ] ss = value["v"]["storageSize"].integer() exp = [raw[i] * 2**(i * ss) for i in range(len(raw)) ] if raw[-1] >= 2**(ss-1): exp += [ -2**(ss * len(raw)) ] d.putValue(sum(exp) * 2**-value["fractionalWidth"].integer()) if d.isExpanded(): with Children(d): d.putSubItem("fractionalWidth", value["fractionalWidth"]) d.putSubItem("integerWidth", value["integerWidth"]) d.putSubItem("v", value["v"]) def qdump__MultiwordInteger(d, value): d.putNumChild(3) raw = [ value["s"][i].integer() for i in range( value["numWords"].integer() ) ] exp = [ raw[i] * 2**(i * value["storageSize"].integer()) for i in range(len(raw)) ] d.putValue(sum(exp)) if d.isExpanded(): with Children(d): d.putSubItem("numWords", value["numWords"]) d.putSubItem("storageSize", value["storageSize"]) d.putSubItem("s", value["s"])
<commit_msg>Update reset page to use ApiHttp <commit_before>import { Component } from '@angular/core'; import { ActivatedRoute, Router } from '@angular/router'; import { Http } from '@angular/http'; import { MdSnackBar } from '@angular/material'; @Component({ selector: 'reset-password', styleUrls: ['./reset-components.css'], templateUrl: './reset.components.html' }) export class ResetPasswordComponent { public currentToken: any; public reset: any = {}; constructor( public route: ActivatedRoute, public http: Http, public snackBar: MdSnackBar, private router: Router ) { this.currentToken = this.route.snapshot.params['token']; } public resetPassword() { this.http.post('/reset/' + this.currentToken, this.reset) .subscribe((data) => { console.log(data); }, (error) => { let response = JSON.parse(error._body); this.openSnackBar(response.message.msg, 'OK'); }); } public openSnackBar(message: string, action: string) { this.snackBar.open(message, action, { duration: 2000, }); } } <commit_after>import { Component } from '@angular/core'; import { ActivatedRoute, Router } from '@angular/router'; import { ApiHttp } from '../../api-http.service'; import { MdSnackBar } from '@angular/material'; @Component({ selector: 'reset-password', styleUrls: ['./reset-components.css'], templateUrl: './reset.components.html' }) export class ResetPasswordComponent { public currentToken: any; public reset: any = {}; constructor( public route: ActivatedRoute, public http: ApiHttp, public snackBar: MdSnackBar, private router: Router ) { this.currentToken = this.route.snapshot.params['token']; } public resetPassword() { this.http.post('/reset/' + this.currentToken, this.reset) .subscribe((data) => { console.log(data); }, (error) => { let response = JSON.parse(error._body); this.openSnackBar(response.message.msg, 'OK'); }); } public openSnackBar(message: string, action: string) { this.snackBar.open(message, action, { duration: 2000, }); } }
<commit_msg>Reset body limit to default <commit_before>import express, {Application} from 'express' import helmet from 'helmet' import cors from 'cors' import bodyParser from 'body-parser' import routes from '../routes' import connectDb from './connectDb' import {httpLoggerMiddleware as httpLogger} from './logger' export default async (): Promise<Application> => { const app = express() app.use(helmet()) app.use(cors()) app.use(bodyParser.json({limit: '1mb'})) app.use(httpLogger) await connectDb() app.use('/', routes) return app } <commit_after>import express, {Application} from 'express' import helmet from 'helmet' import cors from 'cors' import bodyParser from 'body-parser' import routes from '../routes' import connectDb from './connectDb' import {httpLoggerMiddleware as httpLogger} from './logger' export default async (): Promise<Application> => { const app = express() app.use(helmet()) app.use(cors()) app.use(bodyParser.json()) app.use(httpLogger) await connectDb() app.use('/', routes) return app }
<commit_msg>Fix example build under clang <commit_before> int main() { inform_dist *dist = inform_dist_create((uint32_t[4]){3, 0, 1, 2}, 4); assert(abs(inform_dist_prob(dist, 0) - 0.5) < 1e-6); assert(abs(inform_dist_prob(dist, 1) - 0.0) < 1e-6); assert(abs(inform_dist_prob(dist, 2) - 0.1666) < 1e-6); assert(abs(inform_dist_prob(dist, 3) - 0.3333) < 1e-6); inform_dist_free(dist); } <commit_after> int main() { inform_dist *dist = inform_dist_create((uint32_t[4]){3, 0, 1, 2}, 4); assert(fabs(inform_dist_prob(dist, 0) - 0.5) < 1e-6); assert(fabs(inform_dist_prob(dist, 1) - 0.0) < 1e-6); assert(fabs(inform_dist_prob(dist, 2) - 0.1666) < 1e-6); assert(fabs(inform_dist_prob(dist, 3) - 0.3333) < 1e-6); for (size_t i = 0; i < 4; ++i) { printf("%lf ", inform_dist_prob(dist, i)); } printf("\n"); inform_dist_free(dist); }
<commit_msg>Allow uploads even if a file was not provided. This happens when a url is provided, but not a file <commit_before>package pixur import ( "encoding/json" "mime/multipart" "net/http" ) func (s *Server) uploadHandler(w http.ResponseWriter, r *http.Request) error { if r.Method != "POST" { http.Error(w, "Unsupported Method", http.StatusMethodNotAllowed) return nil } var filename string var filedata multipart.File var fileURL string if uploadedFile, fileHeader, err := r.FormFile("file"); err != nil { if err != http.ErrMissingFile { return err } } else { filename = fileHeader.Filename filedata = uploadedFile } fileURL = r.FormValue("url") var task = &CreatePicTask{ pixPath: s.pixPath, db: s.db, FileData: filedata, Filename: filename, FileURL: fileURL, } defer task.Reset() if err := task.Run(); err != nil { return nil } w.Header().Set("Content-Type", "application/json") enc := json.NewEncoder(w) if err := enc.Encode(task.CreatedPic.ToInterface()); err != nil { return err } return nil } <commit_after>package pixur import ( "encoding/json" "mime/multipart" "net/http" ) func (s *Server) uploadHandler(w http.ResponseWriter, r *http.Request) error { if r.Method != "POST" { http.Error(w, "Unsupported Method", http.StatusMethodNotAllowed) return nil } var filename string var filedata multipart.File var fileURL string if uploadedFile, fileHeader, err := r.FormFile("file"); err != nil { if err != http.ErrMissingFile && err != http.ErrNotMultipart { return err } } else { filename = fileHeader.Filename filedata = uploadedFile } fileURL = r.FormValue("url") var task = &CreatePicTask{ pixPath: s.pixPath, db: s.db, FileData: filedata, Filename: filename, FileURL: fileURL, } defer task.Reset() if err := task.Run(); err != nil { return nil } w.Header().Set("Content-Type", "application/json") enc := json.NewEncoder(w) if err := enc.Encode(task.CreatedPic.ToInterface()); err != nil { return err } return nil }
<commit_msg>Use the new assert() macro <commit_before> uint128_t fnv_1a(const void * restrict const buf, const size_t len, const size_t skip_pos, const size_t skip_len) { assert((skip_pos <= len) && (skip_pos + skip_len <= len)); static const uint128_t prime = (((uint128_t)0x0000000001000000) << 64) | 0x000000000000013B; uint128_t hash = (((uint128_t)0x6C62272E07BB0142) << 64) | 0x62B821756295C58D; // two consecutive loops should be faster than one loop with an "if" const uint8_t * restrict const bytes = buf; for (size_t i = 0; i < skip_pos; i++) { hash ^= bytes[i]; hash *= prime; } for (size_t i = skip_pos + skip_len; i < len; i++) { hash ^= bytes[i]; hash *= prime; } return hash; } <commit_after> uint128_t fnv_1a(const void * restrict const buf, const size_t len, const size_t skip_pos, const size_t skip_len) { assert((skip_pos <= len) && (skip_pos + skip_len <= len), "len %zu, skip_pos %zu, skip_len %zu", len, skip_pos, skip_len); static const uint128_t prime = (((uint128_t)0x0000000001000000) << 64) | 0x000000000000013B; uint128_t hash = (((uint128_t)0x6C62272E07BB0142) << 64) | 0x62B821756295C58D; // two consecutive loops should be faster than one loop with an "if" const uint8_t * restrict const bytes = buf; for (size_t i = 0; i < skip_pos; i++) { hash ^= bytes[i]; hash *= prime; } for (size_t i = skip_pos + skip_len; i < len; i++) { hash ^= bytes[i]; hash *= prime; } return hash; }
<commit_msg>Add check for outstanding fibres in unit tests <commit_before>global._ = require("underscore"); global.$injector = require("../lib/yok").injector; $injector.require("config", "../lib/config"); $injector.require("resources", "../lib/resource-loader");<commit_after>global._ = require("underscore"); global.$injector = require("../lib/yok").injector; $injector.require("config", "../lib/config"); $injector.require("resources", "../lib/resource-loader"); process.on('exit', (code: number) => { require("fibers/future").assertNoFutureLeftBehind(); });
<commit_msg>Move the pragma: nocover to except block <commit_before>import gym import pytest # Import for side-effect of registering environment import imitation.examples.airl_envs # noqa: F401 import imitation.examples.model_envs # noqa: F401 ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all() if env_spec.id.startswith('imitation/')] @pytest.mark.parametrize("env_name", ENV_NAMES) def test_envs(env_name): # pragma: no cover """Check that our custom environments don't crash on `step`, and `reset`.""" try: env = gym.make(env_name) except gym.error.DependencyNotInstalled as e: if e.args[0].find('mujoco_py') != -1: pytest.skip("Requires `mujoco_py`, which isn't installed.") else: raise env.reset() obs_space = env.observation_space for _ in range(4): act = env.action_space.sample() obs, rew, done, info = env.step(act) assert obs in obs_space <commit_after>import gym import pytest # Import for side-effect of registering environment import imitation.examples.airl_envs # noqa: F401 import imitation.examples.model_envs # noqa: F401 ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all() if env_spec.id.startswith('imitation/')] @pytest.mark.parametrize("env_name", ENV_NAMES) def test_envs(env_name): """Check that our custom environments don't crash on `step`, and `reset`.""" try: env = gym.make(env_name) except gym.error.DependencyNotInstalled as e: # pragma: nocover if e.args[0].find('mujoco_py') != -1: pytest.skip("Requires `mujoco_py`, which isn't installed.") else: raise env.reset() obs_space = env.observation_space for _ in range(4): act = env.action_space.sample() obs, rew, done, info = env.step(act) assert obs in obs_space
<commit_msg>Rewrite to apply functions once <commit_before>/** @module react-elementary/lib/mergeProps */ import classNames = require('classnames') import { mergeWithKey } from 'ramda' export interface IReducers { [key: string]: (...args: any[]) => any } function customizeMerges(reducers: IReducers) { return function mergeCustomizer(key: string, ...values: any[]) { const reducer = reducers[key] if (typeof reducer === 'function') { return reducer(...values) } return values[values.length - 1] } } /** * Takes a map of reducer function and returns a merge function. * @param {object.<function>} reducers - a map of keys to functions * @return {function} - merges the props of a number of * objects */ export function createCustomMerge(reducers: IReducers) { const mergeCustomizer = customizeMerges(reducers) return function mergeProps(...objs: object[]) { return objs.reduce(mergeWithKey(mergeCustomizer)) } } /** * Merges a number of objects, applying the classnames library to the className * prop. * @function * @param {...object} objs - the objects to be merged * @return {object} - the result of the merge */ export default createCustomMerge({ className: classNames }) <commit_after>/** @module react-elementary/lib/mergeProps */ import classNames = require('classnames') import { apply, evolve, map, mapObjIndexed, merge, mergeAll, nth, pickBy, pipe, pluck, prop, unapply, } from 'ramda' export interface IReducers { [key: string]: (...args: any[]) => any } function isNotUndefined(x: any) { return typeof x !== 'undefined' } /** * Takes a map of reducer function and returns a merge function. * @param {object.<function>} reducers - a map of keys to functions * @return {function} - merges the props of a number of * objects */ export function createCustomMerge(reducers: IReducers) { return function mergeProps(...objs: object[]) { const merged = mergeAll(objs) const plucked = mapObjIndexed( pipe(unapply(nth(1)), key => pluck(key, objs).filter(isNotUndefined)), reducers, ) const evolved = evolve( map(apply, reducers), pickBy(prop('length'), plucked), ) return merge(merged, evolved) } } /** * Merges a number of objects, applying the classnames library to the className * prop. * @function * @param {...object} objs - the objects to be merged * @return {object} - the result of the merge */ export default createCustomMerge({ className: classNames })
<commit_msg>arm-hyp: Correct previous merge with master <commit_before>/* * Copyright 2016, General Dynamics C4 Systems * * This software may be distributed and modified according to the terms of * the GNU General Public License version 2. Note that NO WARRANTY is provided. * See "LICENSE_GPLv2.txt" for details. * * @TAG(GD_GPL) */ #ifndef __PLAT_MACHINE_HARDWARE_H #define __PLAT_MACHINE_HARDWARE_H #define physBase 0x80000000 #define kernelBase 0xe0000000 #define physMappingOffset (kernelBase - physBase) #define BASE_OFFSET physMappingOffset #define PPTR_TOP 0xfff00000 #define PADDR_TOP (PPTR_TOP - BASE_OFFSET) #endif <commit_after>/* * Copyright 2016, General Dynamics C4 Systems * * This software may be distributed and modified according to the terms of * the GNU General Public License version 2. Note that NO WARRANTY is provided. * See "LICENSE_GPLv2.txt" for details. * * @TAG(GD_GPL) */ #ifndef __PLAT_MACHINE_HARDWARE_H #define __PLAT_MACHINE_HARDWARE_H #include <plat/machine/hardware_gen.h> #define physBase 0x80000000 #define kernelBase 0xe0000000 #define physMappingOffset (kernelBase - physBase) #define BASE_OFFSET physMappingOffset #define PPTR_TOP 0xfff00000 #define PADDR_TOP (PPTR_TOP - BASE_OFFSET) #endif
<commit_msg>Add the RLS client import to init file Signed-off-by: drummersbrother <d12fd520b57756512907f841763cabff8eb36464@icloud.com> <commit_before> __author__ = 'Hugo Berg' __email__ = 'hb11002@icloud.com' __version__ = '0.1.0' from rocket_snake.constants import * <commit_after> __author__ = 'Hugo Berg' __email__ = 'hb11002@icloud.com' __version__ = '0.1.0' from rocket_snake.client import RLS_Client from rocket_snake.constants import *
<commit_msg>Fix name of spell checker. <commit_before>import traceback from routes import Mapper import ppp_core import example_ppp_module as flower import ppp_questionparsing_grammatical as qp_grammatical import ppp_cas #import ppp_nlp_ml_standalone class Application: def __init__(self): self.mapper = Mapper() self.mapper.connect('core', '/core/', app=ppp_core.app) self.mapper.connect('qp_grammatical', '/qp_grammatical/', app=qp_grammatical.app) self.mapper.connect('flower', '/flower/', app=flower.app) self.mapper.connect('cas', '/cas/', app=ppp_cas.app) self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_cas.app) #self.mapper.connect('nlp_ml_standalone', '/nlp_ml_standalone/', app=ppp_nlp_ml_standalone.app) def __call__(self, environ, start_response): match = self.mapper.routematch(environ=environ) app = match[0]['app'] if match else self.not_found try: return app(environ, start_response) except KeyboardInterrupt: raise except Exception as e: traceback.print_exc(e) def not_found(self, environ, start_response): headers = [('Content-Type', 'text/plain')] start_response('404 Not Found', headers) return [b'Not found.'] app = Application() <commit_after>import traceback from routes import Mapper import ppp_core import example_ppp_module as flower import ppp_questionparsing_grammatical as qp_grammatical import ppp_cas import ppp_spell_checker #import ppp_nlp_ml_standalone class Application: def __init__(self): self.mapper = Mapper() self.mapper.connect('core', '/core/', app=ppp_core.app) self.mapper.connect('qp_grammatical', '/qp_grammatical/', app=qp_grammatical.app) self.mapper.connect('flower', '/flower/', app=flower.app) self.mapper.connect('cas', '/cas/', app=ppp_cas.app) self.mapper.connect('spellcheck', '/spell_checker/', app=ppp_spell_checker.app) #self.mapper.connect('nlp_ml_standalone', '/nlp_ml_standalone/', app=ppp_nlp_ml_standalone.app) def __call__(self, environ, start_response): match = self.mapper.routematch(environ=environ) app = match[0]['app'] if match else self.not_found try: return app(environ, start_response) except KeyboardInterrupt: raise except Exception as e: traceback.print_exc(e) def not_found(self, environ, start_response): headers = [('Content-Type', 'text/plain')] start_response('404 Not Found', headers) return [b'Not found.'] app = Application()
<commit_msg>Make exception messages more descriptive <commit_before>"""This module defines error classes for typedjsonrpc.""" class Error(Exception): """Base class for all errors.""" code = 0 message = None data = None def __init__(self, data=None): super(Error, self).__init__() self.data = data def as_error_object(self): """Turns the error into an error object.""" return { "code": self.code, "message": self.message, "data": self.data } class ParseError(Error): """Invalid JSON was received by the server / JSON could not be parsed.""" code = -32700 message = "Parse error" class InvalidRequestError(Error): """The JSON sent is not a valid request object.""" code = -32600 message = "Invalid request" class MethodNotFoundError(Error): """The method does not exist.""" code = -32601 message = "Method not found" class InvalidParamsError(Error): """Invalid method parameter(s).""" code = -32602 message = "Invalid params" class InternalError(Error): """Internal JSON-RPC error.""" code = -32603 message = "Internal error" class ServerError(Error): """Something else went wrong.""" code = -32000 message = "Server error" <commit_after>"""This module defines error classes for typedjsonrpc.""" class Error(Exception): """Base class for all errors.""" code = 0 message = None data = None def __init__(self, data=None): super(Error, self).__init__(self.code, self.message, data) self.data = data def as_error_object(self): """Turns the error into an error object.""" return { "code": self.code, "message": self.message, "data": self.data } class ParseError(Error): """Invalid JSON was received by the server / JSON could not be parsed.""" code = -32700 message = "Parse error" class InvalidRequestError(Error): """The JSON sent is not a valid request object.""" code = -32600 message = "Invalid request" class MethodNotFoundError(Error): """The method does not exist.""" code = -32601 message = "Method not found" class InvalidParamsError(Error): """Invalid method parameter(s).""" code = -32602 message = "Invalid params" class InternalError(Error): """Internal JSON-RPC error.""" code = -32603 message = "Internal error" class ServerError(Error): """Something else went wrong.""" code = -32000 message = "Server error"
<commit_msg>Undo member changes in test <commit_before>from django.test import TestCase from django.contrib.auth.models import User from mks.models import Member from .models import Suggestion class SuggestionsTests(TestCase): def setUp(self): self.member = Member.objects.create(name='mk_1') self.regular_user = User.objects.create_user('reg_user') def test_simple_text_suggestion(self): MK_SITE = 'http://mk1.example.com' suggestion = Suggestion.objects.create_suggestion( suggested_by=self.regular_user, content_object=self.member, suggestion_action=Suggestion.UPDATE, suggested_field='website', suggested_text=MK_SITE ) self.assertIsNone(self.member.website) suggestion.auto_apply() mk = Member.objects.get(pk=self.member.pk) self.assertEqual(mk.website, MK_SITE) <commit_after>from django.test import TestCase from django.contrib.auth.models import User from mks.models import Member from .models import Suggestion class SuggestionsTests(TestCase): def setUp(self): self.member = Member.objects.create(name='mk_1') self.regular_user = User.objects.create_user('reg_user') def test_simple_text_suggestion(self): MK_SITE = 'http://mk1.example.com' suggestion = Suggestion.objects.create_suggestion( suggested_by=self.regular_user, content_object=self.member, suggestion_action=Suggestion.UPDATE, suggested_field='website', suggested_text=MK_SITE ) self.assertIsNone(self.member.website) suggestion.auto_apply() mk = Member.objects.get(pk=self.member.pk) self.assertEqual(mk.website, MK_SITE) # cleanup mk.website = None mk.save() self.member = mk
<commit_msg>Add created_at DateTimeField to Twitter example <commit_before>import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url()) <commit_after>import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
<commit_msg>Delete excess code in the latest test scenario. <commit_before>import unittest import os import re os.sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import parsePoaXml import generatePoaXml # Import test settings last in order to override the regular settings import poa_test_settings as settings def override_settings(): # For now need to override settings to use test data generatePoaXml.settings = settings def create_test_directories(): try: os.mkdir(settings.TEST_TEMP_DIR) except OSError: pass try: os.mkdir(settings.TARGET_OUTPUT_DIR) except OSError: pass class TestParsePoaXml(unittest.TestCase): def setUp(self): override_settings() create_test_directories() self.passes = [] self.passes.append('elife-02935-v2.xml') self.passes.append('elife-04637-v2.xml') self.passes.append('elife-15743-v1.xml') self.passes.append('elife-02043-v2.xml') def test_parse(self): for xml_file_name in self.passes: file_path = settings.XLS_PATH + xml_file_name articles = parsePoaXml.build_articles_from_article_xmls([file_path]) self.assertEqual(len(articles), 1) if __name__ == '__main__': unittest.main() <commit_after>import unittest import os import re os.sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import parsePoaXml import generatePoaXml # Import test settings last in order to override the regular settings import poa_test_settings as settings class TestParsePoaXml(unittest.TestCase): def setUp(self): self.passes = [] self.passes.append('elife-02935-v2.xml') self.passes.append('elife-04637-v2.xml') self.passes.append('elife-15743-v1.xml') self.passes.append('elife-02043-v2.xml') def test_parse(self): for xml_file_name in self.passes: file_path = settings.XLS_PATH + xml_file_name articles = parsePoaXml.build_articles_from_article_xmls([file_path]) self.assertEqual(len(articles), 1) if __name__ == '__main__': unittest.main()
<commit_msg>Change qm7 example to use MultiFeature <commit_before>import numpy from sklearn.linear_model import Ridge from sklearn.kernel_ridge import KernelRidge from sklearn.metrics import mean_absolute_error as MAE from molml.features import EncodedBond, Connectivity from utils import load_qm7 if __name__ == "__main__": # This is just boiler plate code to load the data Xin_train, Xin_test, y_train, y_test = load_qm7() feats = [ EncodedBond(n_jobs=-1, max_depth=3), Connectivity(depth=1, n_jobs=-1), Connectivity(depth=3, use_coordination=True, n_jobs=-1), ] train_feats = [] test_feats = [] for tf in feats: X_train = tf.fit_transform(Xin_train) X_test = tf.transform(Xin_test) train_feats.append(X_train) test_feats.append(X_test) X_train = numpy.hstack(train_feats) X_test = numpy.hstack(test_feats) clfs = [ Ridge(alpha=0.01), KernelRidge(alpha=1e-9, gamma=1e-5, kernel="rbf"), ] for clf in clfs: print clf clf.fit(X_train, y_train) train_error = MAE(clf.predict(X_train), y_train) test_error = MAE(clf.predict(X_test), y_test) print "Train MAE: %.4f Test MAE: %.4f" % (train_error, test_error) print <commit_after>from sklearn.linear_model import Ridge from sklearn.kernel_ridge import KernelRidge from sklearn.metrics import mean_absolute_error as MAE from molml.features import EncodedBond, Connectivity, MultiFeature from utils import load_qm7 if __name__ == "__main__": # This is just boiler plate code to load the data Xin_train, Xin_test, y_train, y_test = load_qm7() feats = [ EncodedBond(n_jobs=-1, max_depth=3), Connectivity(depth=1, n_jobs=-1), Connectivity(depth=3, use_coordination=True, n_jobs=-1), ] full_feat = MultiFeature(feats) X_train = full_feat.fit_transform(Xin_train) X_test = full_feat.transform(Xin_test) clfs = [ Ridge(alpha=0.01), KernelRidge(alpha=1e-9, gamma=1e-5, kernel="rbf"), ] for clf in clfs: print clf clf.fit(X_train, y_train) train_error = MAE(clf.predict(X_train), y_train) test_error = MAE(clf.predict(X_test), y_test) print "Train MAE: %.4f Test MAE: %.4f" % (train_error, test_error) print
<commit_msg>Add back the /metrics endpoint <commit_before>from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()<commit_after>from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
<commit_msg>Add missing String() method in interface <commit_before>package column type Column interface { Len() uint Append(row interface{}) error At(index uint) (value interface{}, exists bool) } <commit_after>package column type Column interface { Len() uint Append(row interface{}) error At(index uint) (value interface{}, exists bool) String() string }
<commit_msg>Remove duplicated split call from EmploymentParser. <commit_before>from typing import Tuple from linkedin_scraper.parsers.base import BaseParser class EmploymentParser(BaseParser): def __init__(self): self.professions_list = self.get_lines_from_datafile( 'professions_list.txt') def parse(self, item: str) -> Tuple[str, str]: """ Parse LinkedIn employment string into position and company. :param item: employment string :return: position, company """ if ' at ' in item: # Simplest case, standard LinkedIn format <position> at <company> return tuple(item.split(' at ', maxsplit=1)) words = item.split() for index, word in enumerate(reversed(item.split())): normalized_word = word.strip(',.-').lower() if normalized_word in self.professions_list: founded_profession_index = len(words) - index break else: # We don't know which is which so return whole string as a position return item, '' # We found profession name in employment string, everything # after it is company name return (' '.join(words[:founded_profession_index]).rstrip(',.- '), ' '.join(words[founded_profession_index:]).lstrip(',.- ')) <commit_after>from typing import Tuple from linkedin_scraper.parsers.base import BaseParser class EmploymentParser(BaseParser): def __init__(self): self.professions_list = self.get_lines_from_datafile( 'professions_list.txt') def parse(self, item: str) -> Tuple[str, str]: """ Parse LinkedIn employment string into position and company. :param item: employment string :return: position, company """ if ' at ' in item: # Simplest case, standard LinkedIn format <position> at <company> return tuple(item.split(' at ', maxsplit=1)) words = item.split() for index, word in enumerate(reversed(words)): normalized_word = word.strip(',.-').lower() if normalized_word in self.professions_list: founded_profession_index = len(words) - index break else: # We don't know which is which so return whole string as a position return item, '' # We found profession name in employment string, everything # after it is company name return (' '.join(words[:founded_profession_index]).rstrip(',.- '), ' '.join(words[founded_profession_index:]).lstrip(',.- '))
<commit_msg>Clean more elements on pressian.com <commit_before>import * as $ from 'jquery'; import { clearStyles } from '../util'; import { Article } from 'index'; export const cleanup = () => { $('#scrollDiv, body>img').remove(); } export function parse(): Article { return { title: $('.text-info .title').text().trim(), subtitle: $('.hboxsubtitle').text().trim(), content: clearStyles($('#news_body_area')[0].cloneNode(true)).innerHTML, timestamp: { created: new Date($('.byotherspan .date').text().trim().replace(/\./g, '-').replace(/\s+/, 'T') + '+09:00'), lastModified: undefined }, reporters: [{ name: $('.head_writer_fullname .byother').text().trim(), mail: undefined }] }; } <commit_after>import * as $ from 'jquery'; import { clearStyles } from '../util'; import { Article } from 'index'; export const cleanup = () => { $('#scrollDiv, body>img, body>div:not([id]), html>iframe, body>iframe, body>script, #fb-root, #sliderAdScript').remove(); } export function parse(): Article { return { title: $('.text-info .title').text().trim(), subtitle: $('.hboxsubtitle').text().trim(), content: clearStyles($('#news_body_area')[0].cloneNode(true)).innerHTML, timestamp: { created: new Date($('.byotherspan .date').text().trim().replace(/\./g, '-').replace(/\s+/, 'T') + '+09:00'), lastModified: undefined }, reporters: [{ name: $('.head_writer_fullname .byother').text().trim(), mail: undefined }] }; }
<commit_msg>Load the command handler before the commands <commit_before> from twisted.plugin import IPlugin from desertbot.moduleinterface import IModule, BotModule from zope.interface import implementer @implementer(IPlugin, IModule) class CommandHandler(BotModule): def actions(self): return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand), ('message-user', 1, self.handleCommand)] def handleCommand(self, message): if message.command: return self.bot.moduleHandler.runGatheringAction('botmessage', message) commandhandler = CommandHandler() <commit_after> from twisted.plugin import IPlugin from desertbot.moduleinterface import IModule, BotModule from zope.interface import implementer @implementer(IPlugin, IModule) class CommandHandler(BotModule): def __init__(self): BotModule.__init__(self) self.loadingPriority = 10 def actions(self): return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand), ('message-user', 1, self.handleCommand)] def handleCommand(self, message): if message.command: return self.bot.moduleHandler.runGatheringAction('botmessage', message) commandhandler = CommandHandler()
<commit_msg>Enable mutable config in blazar New releases of oslo.config support a 'mutable' parameter in Opts. oslo.service provides an option [1] which allows services to tell it they want mutate_config_files to be called by passing a parameter. This commit is to use the same approach. This allows Blazar to benefit from [2], where the 'debug' option (owned by oslo.log) is made mutable. We should be able to turn debug logging on and off by changing the config and sending a SIGHUP signal to blazar-manager. However, please note that blazar-manager currently doesn't work correctly after receiving a SIGHUP. As a result the mutable config is not yet usable. Operators should continue restarting blazar-manager after changing blazar.conf. TC goal: https://governance.openstack.org/tc/goals/rocky/enable-mutable-configuration.html [1] https://review.openstack.org/263312/ [2] https://review.openstack.org/254821/ Change-Id: Ieea9043b6f3a28dc92717680585614a68227120e <commit_before> import eventlet eventlet.monkey_patch() import gettext import sys from oslo_config import cfg from oslo_service import service gettext.install('blazar') from blazar.db import api as db_api from blazar.manager import service as manager_service from blazar.notification import notifier from blazar.utils import service as service_utils def main(): cfg.CONF(project='blazar', prog='blazar-manager') service_utils.prepare_service(sys.argv) db_api.setup_db() notifier.init() service.launch( cfg.CONF, manager_service.ManagerService() ).wait() if __name__ == '__main__': main() <commit_after> import eventlet eventlet.monkey_patch() import gettext import sys from oslo_config import cfg from oslo_service import service gettext.install('blazar') from blazar.db import api as db_api from blazar.manager import service as manager_service from blazar.notification import notifier from blazar.utils import service as service_utils def main(): cfg.CONF(project='blazar', prog='blazar-manager') service_utils.prepare_service(sys.argv) db_api.setup_db() notifier.init() service.launch( cfg.CONF, manager_service.ManagerService(), restart_method='mutate' ).wait() if __name__ == '__main__': main()
<commit_msg>Add django app and tests to source distribution <commit_before>import versioneer from setuptools import setup setup( name='domain_events', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='Send and receive domain events via RabbitMQ', author='Ableton AG', author_email='webteam@ableton.com', url='https://github.com/AbletonAG/domain-events', license='MIT', packages=['domain_events'], install_requires=["pika >= 0.10.0"], zip_safe=False, ) <commit_after>import versioneer from setuptools import setup, find_packages setup( name='domain_events', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='Send and receive domain events via RabbitMQ', author='Ableton AG', author_email='webteam@ableton.com', url='https://github.com/AbletonAG/domain-events', license='MIT', packages=find_packages(), install_requires=["pika >= 0.10.0"], zip_safe=False, )
<commit_msg>Update Python/Django: Restore admin.autodiscover() for Django 1.6 compatibility <commit_before>from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^selectable/', include('selectable.urls')), url(r'', include('timepiece.urls')), # authentication views url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='auth_login'), url(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login', name='auth_logout'), url(r'^accounts/password-change/$', 'django.contrib.auth.views.password_change', name='change_password'), url(r'^accounts/password-change/done/$', 'django.contrib.auth.views.password_change_done'), url(r'^accounts/password-reset/$', 'django.contrib.auth.views.password_reset', name='reset_password'), url(r'^accounts/password-reset/done/$', 'django.contrib.auth.views.password_reset_done'), url(r'^accounts/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm'), url(r'^accounts/reset/done/$', 'django.contrib.auth.views.password_reset_complete'), ] <commit_after>from django.conf.urls import include, url from django.contrib import admin admin.autodiscover() # For Django 1.6 urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^selectable/', include('selectable.urls')), url(r'', include('timepiece.urls')), # authentication views url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='auth_login'), url(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login', name='auth_logout'), url(r'^accounts/password-change/$', 'django.contrib.auth.views.password_change', name='change_password'), url(r'^accounts/password-change/done/$', 'django.contrib.auth.views.password_change_done'), url(r'^accounts/password-reset/$', 'django.contrib.auth.views.password_reset', name='reset_password'), url(r'^accounts/password-reset/done/$', 'django.contrib.auth.views.password_reset_done'), url(r'^accounts/reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm'), url(r'^accounts/reset/done/$', 'django.contrib.auth.views.password_reset_complete'), ]
<commit_msg>Make converter props optional where necessary <commit_before>export type CookieAttributes = object & { path?: string domain?: string expires?: number | Date sameSite?: string secure?: boolean [property: string]: any } export type ReadConverter = (value: string, name?: string) => any export type WriteConverter = (value: any, name?: string) => string export type CookieConverter = object & { read?: ReadConverter write?: WriteConverter } type CookiesConfig = object & { readonly converter: CookieConverter readonly attributes: CookieAttributes } type CookiesApi = object & { set: ( name: string, value: any, attributes?: CookieAttributes ) => string | undefined get: ( name?: string | undefined | null ) => string | undefined | (object & { [property: string]: any }) remove: (name: string, attributes?: CookieAttributes) => void withAttributes: (attributes: CookieAttributes) => Cookies withConverter: (converter: CookieConverter) => Cookies } export type Cookies = CookiesConfig & CookiesApi <commit_after>export type CookieAttributes = object & { path?: string domain?: string expires?: number | Date sameSite?: string secure?: boolean [property: string]: any } export type ReadConverter = (value: string, name?: string) => any export type WriteConverter = (value: any, name?: string) => string export type CookieConverter = object & { read: ReadConverter write: WriteConverter } type CookiesConfig = object & { readonly converter: CookieConverter readonly attributes: CookieAttributes } type CookiesApi = object & { set: ( name: string, value: any, attributes?: CookieAttributes ) => string | undefined get: ( name?: string | undefined | null ) => string | undefined | (object & { [property: string]: any }) remove: (name: string, attributes?: CookieAttributes) => void withAttributes: (attributes: CookieAttributes) => Cookies withConverter: (converter: { write?: WriteConverter, read?: ReadConverter }) => Cookies } export type Cookies = CookiesConfig & CookiesApi
<commit_msg>BUG(501): Fix compilation on OpenBSD (from Bernhard Leiner) <commit_before> /* Windows */ #ifdef _MSC_VER #include <Winsock2.h> #include <Ws2tcpip.h> typedef SOCKET xmms_socket_t; typedef int socklen_t; #define XMMS_EINTR WSAEINTR #define XMMS_EAGAIN WSAEWOULDBLOCK /* UNIX */ #else #define SOCKET_ERROR (-1) #define XMMS_EINTR EINTR #define XMMS_EAGAIN EWOULDBLOCK #include <sys/socket.h> #include <sys/select.h> #include <sys/types.h> #include <netinet/in.h> #include <netinet/tcp.h> #include <arpa/inet.h> #include <netdb.h> #include <fcntl.h> #include <unistd.h> #include <errno.h> typedef int xmms_socket_t; #endif int xmms_sockets_initialize(); int xmms_socket_set_nonblock(xmms_socket_t socket); int xmms_socket_valid(xmms_socket_t socket); void xmms_socket_close(xmms_socket_t socket); int xmms_socket_errno(); bool xmms_socket_error_recoverable(); #endif <commit_after> /* Windows */ #ifdef _MSC_VER #include <Winsock2.h> #include <Ws2tcpip.h> typedef SOCKET xmms_socket_t; typedef int socklen_t; #define XMMS_EINTR WSAEINTR #define XMMS_EAGAIN WSAEWOULDBLOCK /* UNIX */ #else #define SOCKET_ERROR (-1) #define XMMS_EINTR EINTR #define XMMS_EAGAIN EWOULDBLOCK #include <sys/types.h> #include <sys/socket.h> #include <sys/select.h> #include <netinet/in.h> #include <netinet/tcp.h> #include <arpa/inet.h> #include <netdb.h> #include <fcntl.h> #include <unistd.h> #include <errno.h> typedef int xmms_socket_t; #endif int xmms_sockets_initialize(); int xmms_socket_set_nonblock(xmms_socket_t socket); int xmms_socket_valid(xmms_socket_t socket); void xmms_socket_close(xmms_socket_t socket); int xmms_socket_errno(); bool xmms_socket_error_recoverable(); #endif
<commit_msg>Fix formatting, remove unused vars <commit_before>from __future__ import absolute_import, division, print_function import os import attr import pytest import environ @environ.config(prefix="APP") class AppConfig(object): host = environ.var("127.0.0.1") port = environ.var(5000, converter=int) def test_default(): cfg = AppConfig.from_environ() assert cfg.host == "127.0.0.1" assert cfg.port == 5000 def test_env(): env = {"APP_HOST": "0.0.0.0"} cfg = AppConfig.from_environ(environ=env) assert cfg.host == "0.0.0.0" assert cfg.port == 5000 <commit_after>from __future__ import absolute_import, division, print_function import environ @environ.config(prefix="APP") class AppConfig(object): host = environ.var("127.0.0.1") port = environ.var(5000, converter=int) def test_default(): cfg = AppConfig.from_environ() assert cfg.host == "127.0.0.1" assert cfg.port == 5000 def test_env(): env = {"APP_HOST": "0.0.0.0"} cfg = AppConfig.from_environ(environ=env) assert cfg.host == "0.0.0.0" assert cfg.port == 5000
<commit_msg>Set content-type for Qute templates in RESTEasy Reactive Fixes: #25932 <commit_before>package io.quarkus.resteasy.reactive.qute.runtime; import static io.quarkus.resteasy.reactive.qute.runtime.Util.*; import static io.quarkus.resteasy.reactive.qute.runtime.Util.toUni; import org.jboss.resteasy.reactive.server.core.ResteasyReactiveRequestContext; import org.jboss.resteasy.reactive.server.spi.ServerRestHandler; import io.quarkus.arc.Arc; import io.quarkus.qute.Engine; import io.quarkus.qute.TemplateInstance; import io.smallrye.mutiny.Uni; public class TemplateResponseUniHandler implements ServerRestHandler { private volatile Engine engine; @Override public void handle(ResteasyReactiveRequestContext requestContext) { Object result = requestContext.getResult(); if (!(result instanceof TemplateInstance)) { return; } if (engine == null) { synchronized (this) { if (engine == null) { engine = Arc.container().instance(Engine.class).get(); } } } requestContext.setResult(createUni(requestContext, (TemplateInstance) result, engine)); } private Uni<String> createUni(ResteasyReactiveRequestContext requestContext, TemplateInstance result, Engine engine) { setSelectedVariant(result, requestContext.getRequest(), requestContext.getHttpHeaders().getAcceptableLanguages()); return toUni(result, engine); } } <commit_after>package io.quarkus.resteasy.reactive.qute.runtime; import static io.quarkus.resteasy.reactive.qute.runtime.Util.*; import static io.quarkus.resteasy.reactive.qute.runtime.Util.toUni; import javax.ws.rs.core.MediaType; import org.jboss.resteasy.reactive.server.core.ResteasyReactiveRequestContext; import org.jboss.resteasy.reactive.server.spi.ServerRestHandler; import io.quarkus.arc.Arc; import io.quarkus.qute.Engine; import io.quarkus.qute.TemplateInstance; import io.smallrye.mutiny.Uni; public class TemplateResponseUniHandler implements ServerRestHandler { private volatile Engine engine; @Override public void handle(ResteasyReactiveRequestContext requestContext) { Object result = requestContext.getResult(); if (!(result instanceof TemplateInstance)) { return; } if (engine == null) { synchronized (this) { if (engine == null) { engine = Arc.container().instance(Engine.class).get(); } } } requestContext.setResult(createUni(requestContext, (TemplateInstance) result, engine)); } private Uni<String> createUni(ResteasyReactiveRequestContext requestContext, TemplateInstance result, Engine engine) { MediaType mediaType = setSelectedVariant(result, requestContext.getRequest(), requestContext.getHttpHeaders().getAcceptableLanguages()); requestContext.setResponseContentType(mediaType); return toUni(result, engine); } }
<commit_msg>Add a description to the library <commit_before>extern crate arguments; extern crate hiredis; extern crate mcpat; extern crate sql; extern crate sqlite; #[macro_use] extern crate log; /// Raise an error. #[macro_export] macro_rules! raise( ($message:expr) => (return Err($crate::Error::new($message))); ); /// Unwrap a result or raise an error. #[macro_export] macro_rules! ok( ($result:expr) => ( match $result { Ok(result) => result, Err(error) => raise!(error), } ); ); mod result; mod system; pub mod database; pub mod server; pub use result::{Error, Result}; pub use system::System; <commit_after>//! Recording of workload patterns. extern crate arguments; extern crate hiredis; extern crate mcpat; extern crate sql; extern crate sqlite; #[macro_use] extern crate log; /// Raise an error. #[macro_export] macro_rules! raise( ($message:expr) => (return Err($crate::Error::new($message))); ); /// Unwrap a result or raise an error. #[macro_export] macro_rules! ok( ($result:expr) => ( match $result { Ok(result) => result, Err(error) => raise!(error), } ); ); mod result; mod system; pub mod database; pub mod server; pub use result::{Error, Result}; pub use system::System;
<commit_msg>Fix an issue with updating builds. <commit_before>package com.campmongoose.serversaturday.common.submission; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> { protected final Map<String, B> builds = new HashMap<>(); @Nonnull protected final String name; @Nonnull protected final UUID uuid; protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) { this.name = name; this.uuid = uuid; } @Nullable public B getBuild(@Nonnull String name) { return builds.get(name); } @Nonnull public List<B> getBuilds() { return new ArrayList<>(builds.values()); } @Nonnull public abstract I getMenuRepresentation(); @Nonnull public String getName() { return name; } @Nonnull public UUID getUUID() { return uuid; } @Nonnull public abstract B newBuild(@Nonnull String name, @Nonnull L location); public boolean removeBuild(@Nonnull String name) { return builds.remove(name) != null; } public abstract void save(@Nonnull File file); } <commit_after>package com.campmongoose.serversaturday.common.submission; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> { protected final Map<String, B> builds = new HashMap<>(); @Nonnull protected final String name; @Nonnull protected final UUID uuid; protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) { this.name = name; this.uuid = uuid; } @Nullable public B getBuild(@Nonnull String name) { return builds.get(name); } @Nonnull public List<B> getBuilds() { return new ArrayList<>(builds.values()); } @Nonnull public abstract I getMenuRepresentation(); @Nonnull public String getName() { return name; } @Nonnull public UUID getUUID() { return uuid; } @Nonnull public abstract B newBuild(@Nonnull String name, @Nonnull L location); public boolean removeBuild(@Nonnull String name) { return builds.remove(name) != null; } public void renameBuild(String newName, B build) { builds.remove(build.getName()); build.setName(newName); builds.put(newName, build); } public abstract void save(@Nonnull File file); }
<commit_msg>ext: Remove env check as Mopidy checks deps automatically <commit_before>from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def validate_environment(self): try: import requests # noqa except ImportError as e: raise ExtensionError('Library requests not found', e) def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend] <commit_after>from __future__ import unicode_literals import os from mopidy import ext, config from mopidy.exceptions import ExtensionError __version__ = '1.0.18' __url__ = 'https://github.com/mopidy/mopidy-soundcloud' class SoundCloudExtension(ext.Extension): dist_name = 'Mopidy-SoundCloud' ext_name = 'soundcloud' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(SoundCloudExtension, self).get_config_schema() schema['explore'] = config.List() schema['explore_pages'] = config.Integer() schema['auth_token'] = config.Secret() return schema def validate_config(self, config): if not config.getboolean('soundcloud', 'enabled'): return if not config.get('soundcloud', 'auth_token'): raise ExtensionError("In order to use SoundCloud extension you\ must provide auth_token, for more information referrer to \ https://github.com/mopidy/mopidy-soundcloud/") def get_backend_classes(self): from .actor import SoundCloudBackend return [SoundCloudBackend]
<commit_msg>Reword guard test on upper bounds <commit_before>import pytest import itertools from main import Primes, Sieve def test_sieve_limit(): limit = 10000 with Sieve(limit) as s: assert s.upper_bound() >= limit def test_upper_bound_exception(): limit = 10 with Sieve(limit) as s: with pytest.raises(IndexError): s.is_prime(101) def test_zero_is_not_in_prime_list(): with Primes() as p: n = 20 assert 0 not in list(itertools.islice(p, n)) def test_number_primes_asked_is_given(): with Primes() as p: n = 20 assert len(list(itertools.islice(p, n))) == n <commit_after>import pytest import itertools from main import Primes, Sieve def test_sieve_limit(): limit = 10000 with Sieve(limit) as s: assert s.upper_bound() >= limit def test_checking_above_upper_bound_is_an_error(): limit = 10 with Sieve(limit) as s: with pytest.raises(IndexError): s.is_prime(101) def test_zero_is_not_in_prime_list(): with Primes() as p: n = 20 assert 0 not in list(itertools.islice(p, n)) def test_number_primes_asked_is_given(): with Primes() as p: n = 20 assert len(list(itertools.islice(p, n))) == n
<commit_msg>Fix compilation failure in unit tests on Windows. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@346999 91177308-0d34-0410-b5e6-96231b3b80d8 <commit_before>//===-- GDBRemoteTestUtils.cpp ----------------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #include "GDBRemoteTestUtils.h" namespace lldb_private { namespace process_gdb_remote { void GDBRemoteTest::SetUpTestCase() { #if defined(_MSC_VER) WSADATA data; ::WSAStartup(MAKEWORD(2, 2), &data); #endif } void GDBRemoteTest::TearDownTestCase() { #if defined(_MSC_VER) ::WSACleanup(); #endif } } // namespace process_gdb_remote } // namespace lldb_private <commit_after>//===-- GDBRemoteTestUtils.cpp ----------------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #include "GDBRemoteTestUtils.h" #if defined(_MSC_VER) #include "lldb/Host/windows/windows.h" #include <WinSock2.h> #endif namespace lldb_private { namespace process_gdb_remote { void GDBRemoteTest::SetUpTestCase() { #if defined(_MSC_VER) WSADATA data; ::WSAStartup(MAKEWORD(2, 2), &data); #endif } void GDBRemoteTest::TearDownTestCase() { #if defined(_MSC_VER) ::WSACleanup(); #endif } } // namespace process_gdb_remote } // namespace lldb_private
<commit_msg>Add unicode declaration on new regression test <commit_before>import pytest from ... import load as load_spacy def test_issue913(en_tokenizer): '''Test that spaCy doesn't hang on many periods.''' string = '0' for i in range(1, 100): string += '.%d' % i doc = en_tokenizer(string) # Don't want tests to fail if they haven't installed pytest-timeout plugin try: test_issue913 = pytest.mark.timeout(5)(test_issue913) except NameError: pass <commit_after>from __future__ import unicode_literals import pytest from ... import load as load_spacy def test_issue957(en_tokenizer): '''Test that spaCy doesn't hang on many periods.''' string = '0' for i in range(1, 100): string += '.%d' % i doc = en_tokenizer(string) # Don't want tests to fail if they haven't installed pytest-timeout plugin try: test_issue913 = pytest.mark.timeout(5)(test_issue913) except NameError: pass
<commit_msg>Rename field `hasAlreadySet` -> `hasAlreadyStarted` <commit_before>package com.cookpad.android.puree.retryable; import android.os.Handler; public class RetryableTaskRunner { private Handler handler; private boolean hasAlreadySet; private Runnable callback; private BuckoffCounter buckoffCounter; public RetryableTaskRunner(final Runnable task, final int interval) { this.buckoffCounter = new BuckoffCounter(interval); this.handler = new Handler(); this.hasAlreadySet = false; this.callback = new Runnable() { @Override public void run() { task.run(); } }; } public synchronized void tryToStart() { if (hasAlreadySet) { return; } buckoffCounter.resetRetryCount(); startDelayed(); } private synchronized void startDelayed() { handler.removeCallbacks(callback); handler.postDelayed(callback, buckoffCounter.time()); hasAlreadySet = true; } public synchronized void reset() { hasAlreadySet = false; buckoffCounter.resetRetryCount(); } public synchronized void retryLater() { buckoffCounter.incrementRetryCount(); startDelayed(); } } <commit_after>package com.cookpad.android.puree.retryable; import android.os.Handler; public class RetryableTaskRunner { private Handler handler; private boolean hasAlreadyStarted; private Runnable callback; private BuckoffCounter buckoffCounter; public RetryableTaskRunner(final Runnable task, final int interval) { this.buckoffCounter = new BuckoffCounter(interval); this.handler = new Handler(); this.hasAlreadyStarted = false; this.callback = new Runnable() { @Override public void run() { task.run(); } }; } public synchronized void tryToStart() { if (hasAlreadyStarted) { return; } buckoffCounter.resetRetryCount(); startDelayed(); } private synchronized void startDelayed() { handler.removeCallbacks(callback); handler.postDelayed(callback, buckoffCounter.time()); hasAlreadyStarted = true; } public synchronized void reset() { hasAlreadyStarted = false; buckoffCounter.resetRetryCount(); } public synchronized void retryLater() { buckoffCounter.incrementRetryCount(); startDelayed(); } }
<commit_msg>Return error writing on closed channel. <commit_before>package main import ( "fmt" ) func main() { output := make(chan int, 1) // create channel write(output, 1) close(output) // close channel write(output, 2) } // how to write on possibly closed channel func write(out chan int, i int) { defer func() { // recover from panic caused by writing to a closed channel if err := recover(); err != nil { fmt.Printf("write: error writing %d on channel: %v\n", i, err) return } fmt.Printf("write: wrote %d on channel\n", i) }() out <- i // write on possibly closed channel } <commit_after>package main import ( "fmt" ) func main() { output := make(chan int, 1) // create channel write(output, 1) close(output) // close channel write(output, 2) } // how to write on possibly closed channel func write(out chan int, i int) (err error) { defer func() { // recover from panic caused by writing to a closed channel if r := recover(); r != nil { err = fmt.Errorf("%v", r) fmt.Printf("write: error writing %d on channel: %v\n", i, err) return } fmt.Printf("write: wrote %d on channel\n", i) }() out <- i // write on possibly closed channel return err }
<commit_msg>Extend Users API to conform to tests <commit_before>from django.contrib.auth import get_user_model from rest_framework import filters from rest_framework import viewsets from yunity.users.serializers import UserSerializer class UserViewSet(viewsets.ModelViewSet): queryset = get_user_model().objects.all() serializer_class = UserSerializer filter_backends = (filters.SearchFilter,) search_fields = ('display_name', 'first_name', 'last_name') <commit_after>from django.contrib.auth import get_user_model from rest_framework import filters from rest_framework import viewsets from rest_framework.permissions import IsAuthenticated, AllowAny, BasePermission from yunity.users.serializers import UserSerializer class IsRequestUser(BasePermission): message = 'You can modify only your own user data.' def has_object_permission(self, request, view, obj): return request.user == obj class UserViewSet(viewsets.ModelViewSet): queryset = get_user_model().objects.all() serializer_class = UserSerializer filter_backends = (filters.SearchFilter,) search_fields = ('display_name', 'first_name', 'last_name') def get_permissions(self): if self.action == 'create': self.permission_classes = (AllowAny,) elif self.action in ('list', 'retrieve'): self.permission_classes = (IsAuthenticated,) else: self.permission_classes = (IsRequestUser,) return super().get_permissions()
<commit_msg>Clarify that the MathJax comment is Notebook specific. <commit_before>"""Simple magics for display formats""" #----------------------------------------------------------------------------- # Copyright (c) 2012 The IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Our own packages from IPython.core.display import display, Javascript, Latex, SVG, HTML from IPython.core.magic import ( Magics, magics_class, cell_magic ) #----------------------------------------------------------------------------- # Magic implementation classes #----------------------------------------------------------------------------- @magics_class class DisplayMagics(Magics): """Magics for displaying various output types with literals Defines javascript/latex/svg/html cell magics for writing blocks in those languages, to be rendered in the frontend. """ @cell_magic def javascript(self, line, cell): """Run the cell block of Javascript code""" display(Javascript(cell)) @cell_magic def latex(self, line, cell): """Render the cell as a block of latex This magic only renders the subset of latex defined by MathJax [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" display(Latex(cell)) @cell_magic def svg(self, line, cell): """Render the cell as an SVG literal""" display(SVG(cell)) @cell_magic def html(self, line, cell): """Render the cell as a block of HTML""" display(HTML(cell)) <commit_after>"""Simple magics for display formats""" #----------------------------------------------------------------------------- # Copyright (c) 2012 The IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Our own packages from IPython.core.display import display, Javascript, Latex, SVG, HTML from IPython.core.magic import ( Magics, magics_class, cell_magic ) #----------------------------------------------------------------------------- # Magic implementation classes #----------------------------------------------------------------------------- @magics_class class DisplayMagics(Magics): """Magics for displaying various output types with literals Defines javascript/latex/svg/html cell magics for writing blocks in those languages, to be rendered in the frontend. """ @cell_magic def javascript(self, line, cell): """Run the cell block of Javascript code""" display(Javascript(cell)) @cell_magic def latex(self, line, cell): """Render the cell as a block of latex The subset of latex which is support depends on the implementation in the client. In the Jupyter Notebook, this magic only renders the subset of latex defined by MathJax [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" display(Latex(cell)) @cell_magic def svg(self, line, cell): """Render the cell as an SVG literal""" display(SVG(cell)) @cell_magic def html(self, line, cell): """Render the cell as a block of HTML""" display(HTML(cell))
<commit_msg>Use raw command method to run all commands in wrapper <commit_before> import subprocess class SessionExists(Exception): description = "Session already exists." pass class ServerConnectionError(Exception): description = "tmux server is not currently running." pass class SessionDoesNotExist(Exception): description = "Session does not exist." pass def command(command): p = subprocess.Popen("tmux " + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return p.communicate() def kill(session): p = subprocess.Popen("tmux kill-session -t {}".format(session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if "session not found" in err: raise SessionDoesNotExist(session) if "failed to connect to server" in err: raise ServerConnectionError() def list(): p = subprocess.Popen("tmux ls", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if "failed to connect to server" in err: raise ServerConnectionError() return out def create(session): p = subprocess.Popen("tmux new -s {}".format(session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if "duplicate session" in err: raise SessionExists(session) def attach(session): p = subprocess.Popen("tmux attach-session -t {}".format(session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if "no sessions" in err: raise SessionDoesNotExist(session) def create_or_attach(session): create(session) except SessionExists: attach(session) <commit_after> import subprocess class SessionExists(Exception): description = "Session already exists." pass class ServerConnectionError(Exception): description = "tmux server is not currently running." pass class SessionDoesNotExist(Exception): description = "Session does not exist." pass def command(command): p = subprocess.Popen("tmux " + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return p.communicate() def kill(session): out, err = command("kill-session -t {}".format(session)) if "session not found" in err: raise SessionDoesNotExist(session) if "failed to connect to server" in err: raise ServerConnectionError() def list(): out, err = command("ls") if "failed to connect to server" in err: raise ServerConnectionError() return out def create(session): out, err = command("new -s {}".format(session)) if "duplicate session" in err: raise SessionExists(session) def attach(session): out, err = command("attach-session -t {}".format(session)) if "no sessions" in err: raise SessionDoesNotExist(session) def create_or_attach(session): try: create(session) except SessionExists: attach(session)
<commit_msg>Handle that we could use the object version of Boolean and it could be null <commit_before>/* * Copyright (c) 2015. Troels Liebe Bentsen <tlb@nversion.dk> * Licensed under the MIT license (LICENSE.txt) */ package dk.nversion.copybook.converters; import dk.nversion.copybook.exceptions.TypeConverterException; public class IntegerToBoolean extends IntegerToInteger { @Override public void validate(Class<?> type, int size, int decimals) { if(!(Boolean.class.equals(type) || Boolean.TYPE.equals(type))) { throw new TypeConverterException("Only supports converting to and from int or Integer"); } } @Override public Object to(byte[] bytes, int offset, int length, int decimals, boolean removePadding) { return (int)super.to(bytes, offset, length, decimals, removePadding) != 0; } @Override public byte[] from(Object value, int length, int decimals, boolean addPadding) { return super.from((boolean)value ? 1 : 0, length, decimals, addPadding); } } <commit_after>/* * Copyright (c) 2015. Troels Liebe Bentsen <tlb@nversion.dk> * Licensed under the MIT license (LICENSE.txt) */ package dk.nversion.copybook.converters; import dk.nversion.copybook.exceptions.TypeConverterException; public class IntegerToBoolean extends IntegerToInteger { @Override public void validate(Class<?> type, int size, int decimals) { if(!(Boolean.class.equals(type) || Boolean.TYPE.equals(type))) { throw new TypeConverterException("Only supports converting to and from int or Integer"); } } @Override public Object to(byte[] bytes, int offset, int length, int decimals, boolean removePadding) { return (int)super.to(bytes, offset, length, decimals, removePadding) != 0; } @Override public byte[] from(Object value, int length, int decimals, boolean addPadding) { return super.from(value != null ? ((boolean)value ? 1 : 0) : null, length, decimals, addPadding); } }
<commit_msg>Add consolescripts entry to produce 'dotsecrets' cli script <commit_before>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyyaml', ] setup(name='dotsecrets', version='0.0', description='Manage dot files with secrets in Git', long_description=README + '\n\n' + CHANGES, license='BSD', classifiers=[ "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Software Development :: Version Control", "Topic :: System :: Systems Administration", "Topic :: Utilities" ], author='Olaf Conradi', author_email='olaf@conradi.org', url='https://github.com/oohlaf/dotsecrets', keywords='dotfiles git secret manage private', packages=find_packages(), install_requires=requires, tests_require=requires, test_suite="dotsecrets.tests", ) <commit_after>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyyaml', ] setup(name='dotsecrets', version='0.0', description='Manage dot files with secrets in Git', long_description=README + '\n\n' + CHANGES, license='BSD', classifiers=[ "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Software Development :: Version Control", "Topic :: System :: Systems Administration", "Topic :: Utilities" ], author='Olaf Conradi', author_email='olaf@conradi.org', url='https://github.com/oohlaf/dotsecrets', keywords='dotfiles git secret manage private', packages=find_packages(), entry_points = { 'console_scripts': ['dotsecrets=dotsecrets.dotsecrets:main'], }, install_requires=requires, tests_require=requires, test_suite="dotsecrets.tests", )
<commit_msg>Replace empty list creation with Collections.emptyList() <commit_before>package com.alexrnl.subtitlecorrector.correctionstrategy; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.logging.Logger; import com.alexrnl.subtitlecorrector.service.SessionParameters; /** * Abstract strategy implementation.<br /> * Provide a basic body for the actual strategies. There is no logic in this class. * @author Alex */ public abstract class AbstractStrategy implements Strategy { /** Logger */ private static final Logger LG = Logger.getLogger(AbstractStrategy.class.getName()); @Override public void startSession (final SessionParameters parameters) { // Nothing to do here, override if strategy depends on session state } @Override public void stopSession () { // Nothing to do here, override if strategy depends on session state } @Override public List<Parameter<?>> getParameters () { return new ArrayList<>(0); } @Override public Parameter<?> getParameterByName (final String name) { Objects.requireNonNull(name); for (final Parameter<?> parameter : getParameters()) { if (parameter.getDescription().equals(name)) { return parameter; } } LG.info("No parameter with name " + name + " found"); return null; } } <commit_after>package com.alexrnl.subtitlecorrector.correctionstrategy; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.logging.Logger; import com.alexrnl.subtitlecorrector.service.SessionParameters; /** * Abstract strategy implementation.<br /> * Provide a basic body for the actual strategies. There is no logic in this class. * @author Alex */ public abstract class AbstractStrategy implements Strategy { /** Logger */ private static final Logger LG = Logger.getLogger(AbstractStrategy.class.getName()); @Override public void startSession (final SessionParameters parameters) { // Nothing to do here, override if strategy depends on session state } @Override public void stopSession () { // Nothing to do here, override if strategy depends on session state } @Override public List<Parameter<?>> getParameters () { return Collections.emptyList(); } @Override public Parameter<?> getParameterByName (final String name) { Objects.requireNonNull(name); for (final Parameter<?> parameter : getParameters()) { if (parameter.getDescription().equals(name)) { return parameter; } } LG.info("No parameter with name " + name + " found"); return null; } }
<commit_msg>Implement properties and methods for the User model class to enable the Flask-Login module <commit_before>from app_factory import db from models.session import Session class User(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) name = db.Column('name', db.String(50)) username = db.Column('username', db.String(50)) password = db.Column('password', db.String(50)) email = db.Column('email', db.String(128)) session = db.relationship( Session, uselist=False, backref=db.backref('user', order_by=id) ) def __repr__(self): return '' '<User(name={name}, username={username}, ' 'password={password}, email={email})>'.format( name=self.name, username=self.username, password=self.password, email=self.email ) def __init__(self, name, username, password, email): self.name = name self.username = username self.password = password self.email = email <commit_after>from app_factory import db from models.session import Session class User(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) name = db.Column('name', db.String(50)) username = db.Column('username', db.String(50)) password = db.Column('password', db.String(50)) email = db.Column('email', db.String(128)) session = db.relationship( Session, uselist=False, backref=db.backref('user', order_by=id) ) def __init__(self, name, username, password, email): self.name = name self.username = username self.password = password self.email = email def __repr__(self): return '' '<User(name={name}, username={username}, ' 'password={password}, email={email})>'.format( name=self.name, username=self.username, password=self.password, email=self.email ) def is_authenticated(self): return (hasattr(self.session.session_id) and self.session.session_id is not None) def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id
<commit_msg>Add argparse to install_requires for Python 2.6 <commit_before>from distutils.core import setup import traitscli setup( name='traitscli', version=traitscli.__version__, py_modules=['traitscli'], author=traitscli.__author__, author_email='aka.tkf@gmail.com', url='https://github.com/tkf/traitscli', license=traitscli.__license__, description='traitscli - CLI generator based on class traits', long_description=traitscli.__doc__, keywords='CLI, traits', classifiers=[ "Development Status :: 3 - Alpha", # see: http://pypi.python.org/pypi?%3Aaction=list_classifiers ], install_requires=[ 'traits', ] ) <commit_after>from distutils.core import setup import traitscli setup( name='traitscli', version=traitscli.__version__, py_modules=['traitscli'], author=traitscli.__author__, author_email='aka.tkf@gmail.com', url='https://github.com/tkf/traitscli', license=traitscli.__license__, description='traitscli - CLI generator based on class traits', long_description=traitscli.__doc__, keywords='CLI, traits', classifiers=[ "Development Status :: 3 - Alpha", # see: http://pypi.python.org/pypi?%3Aaction=list_classifiers ], install_requires=[ 'argparse', 'traits', ] )
<commit_msg>Make the problems/generated directory usage relative <commit_before>/* * Author: Filip Dvořák <filip.dvorak@runbox.com> * * Copyright (c) 2013 Filip Dvořák <filip.dvorak@runbox.com>, all rights reserved * * Publishing, providing further or using this program is prohibited * without previous written permission of the author. Publishing or providing * further the contents of this file is prohibited without previous written * permission of the author. */ package fape.scripting; import fape.core.planning.Planner; import java.io.File; import java.io.FileFilter; /** * * @author FD */ public class Exp1 extends ExperimentRunner { public static void main(String[] args) throws InterruptedException { run("C:/ROOT/PROJECTS/fape/FAPE/problems/generated"); } } <commit_after>/* * Author: Filip Dvořák <filip.dvorak@runbox.com> * * Copyright (c) 2013 Filip Dvořák <filip.dvorak@runbox.com>, all rights reserved * * Publishing, providing further or using this program is prohibited * without previous written permission of the author. Publishing or providing * further the contents of this file is prohibited without previous written * permission of the author. */ package fape.scripting; import fape.core.planning.Planner; import java.io.File; import java.io.FileFilter; /** * * @author FD */ public class Exp1 extends ExperimentRunner { public static void main(String[] args) throws InterruptedException { run("problems/generated"); } }
<commit_msg>ENH: Put file reader class into top-level namespace <commit_before> try: from __dev_version import version as __version__ from __dev_version import git_revision as __git_revision__ except ImportError: from __version import version as __version__ from __version import git_revision as __git_revision__ import trmm3b4xrt <commit_after> try: from __dev_version import version as __version__ from __dev_version import git_revision as __git_revision__ except ImportError: from __version import version as __version__ from __version import git_revision as __git_revision__ from trmm3b4xrt import *
<commit_msg>chore(webpage): Add header when open plip pdf <commit_before>import React from 'react'; import EyeIcon from '../icons/EyeIcon'; interface PdfButtonProps { dataPdf: string } const PdfButton = (props: PdfButtonProps) => { const handleClick = (event:any) => { event?.preventDefault() window.open(encodeURI(props.dataPdf)); }; return ( <> <button onClick={handleClick}> <EyeIcon /> Ver ficha de assinatura </button> </> ); }; export default PdfButton; <commit_after>import React from 'react'; import EyeIcon from '../icons/EyeIcon'; interface PdfButtonProps { dataPdf: string } const PdfButton = (props: PdfButtonProps) => { const handleClick = (event:any) => { event?.preventDefault() window.open(encodeURI(`data:application/pdf;filename=generated.pdf;base64,${props.dataPdf}`)); }; return ( <> <button onClick={handleClick}> <EyeIcon /> Ver ficha de assinatura </button> </> ); }; export default PdfButton;
<commit_msg>Allow more than one test class (at least for annotation reading). <commit_before>import java.util.*; import java.lang.*; import java.lang.reflect.*; import java.lang.annotation.*; public class ReadForbidden { public static void main(String args[]) throws Exception { if(args.length != 1) { System.err.println("missing class argument"); System.exit(-1); } String tcln = args[0]; ClassLoader cl = ClassLoader.getSystemClassLoader(); Class newClass = cl.loadClass(tcln); Forbidden forbidden = (Forbidden) newClass.getAnnotation(Forbidden.class); String grep = "egrep '(java/lang/ClassLoader|java\\.lang\\.ClassLoader|java/lang/reflect|java\\.lang\\.reflect"; for (String s : forbidden.value()) { String escape = s.replaceAll("\\.", "\\\\."); grep += "|" + escape; escape = s.replaceAll("\\.", "/"); grep += "|" + escape; } grep += ")'"; System.out.println(grep); } } <commit_after>import java.util.*; import java.lang.*; import java.lang.reflect.*; import java.lang.annotation.*; public class ReadForbidden { public static void main(String args[]) throws Exception { if(args.length != 1) { System.err.println("missing class argument"); System.exit(-1); } String grep = "egrep '(java/lang/ClassLoader|java\\.lang\\.ClassLoader|java/lang/reflect|java\\.lang\\.reflect"; ClassLoader cl = ClassLoader.getSystemClassLoader(); for (String tcln : args) { Class newClass = cl.loadClass(tcln); Forbidden forbidden = (Forbidden) newClass.getAnnotation(Forbidden.class); for (String s : forbidden.value()) { String escape = s.replaceAll("\\.", "\\\\."); grep += "|" + escape; escape = s.replaceAll("\\.", "/"); grep += "|" + escape; } } grep += ")'"; System.out.println(grep); } }