hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
a3b02f3f8b78ed36eaf05c78a91920eebef19b4c
diff --git a/moto/iam/models.py b/moto/iam/models.py index <HASH>..<HASH> 100644 --- a/moto/iam/models.py +++ b/moto/iam/models.py @@ -135,6 +135,14 @@ class User(object): def get_all_access_keys(self): return self.access_keys + def delete_access_key(self, access_key_id): + for key in self.access_keys: + if key.access_key_id == access_key_id: + self.access_keys.remove(key) + break + else: + raise BotoServerError(404, 'Not Found') + class IAMBackend(BaseBackend): @@ -207,7 +215,6 @@ class IAMBackend(BaseBackend): return group def get_group(self, group_name, marker=None, max_items=None): - group = None try: group = self.groups[group_name] @@ -224,6 +231,15 @@ class IAMBackend(BaseBackend): self.users[user_name] = user return user + def get_user(self, user_name): + user = None + try: + user = self.users[user_name] + except KeyError: + raise BotoServerError(404, 'Not Found') + + return user + def add_user_to_group(self, group_name, user_name): group = None user = None @@ -292,6 +308,13 @@ class IAMBackend(BaseBackend): return keys + def delete_access_key(self, access_key_id, user_name): + try: + user = self.users[user_name] + user.delete_access_key(access_key_id) + except KeyError: + raise BotoServerError(404, 'Not Found') + def delete_user(self, user_name): try: del self.users[user_name] diff --git a/moto/iam/responses.py b/moto/iam/responses.py index <HASH>..<HASH> 100644 --- a/moto/iam/responses.py +++ b/moto/iam/responses.py @@ -106,6 +106,12 @@ class IamResponse(BaseResponse): template = Template(USER_TEMPLATE) return template.render(action='Create', user=user) + def get_user(self): + user_name = self._get_param('UserName') + user = iam_backend.get_user(user_name) + template = Template(USER_TEMPLATE) + return template.render(action='Get', user=user) + def add_user_to_group(self): group_name = self._get_param('GroupName') user_name = self._get_param('UserName') @@ -165,6 +171,14 @@ class IamResponse(BaseResponse): template = Template(LIST_ACCESS_KEYS_TEMPLATE) return template.render(user_name=user_name, keys=keys) + def delete_access_key(self): + user_name = self._get_param('UserName') + access_key_id = self._get_param('AccessKeyId') + + iam_backend.delete_access_key(access_key_id, user_name) + template = Template(GENERIC_EMPTY_TEMPLATE) + return template.render(name='DeleteAccessKey') + def delete_user(self): user_name = self._get_param('UserName') iam_backend.delete_user(user_name) diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py index <HASH>..<HASH> 100644 --- a/tests/test_iam/test_iam.py +++ b/tests/test_iam/test_iam.py @@ -85,6 +85,14 @@ def test_create_user(): @mock_iam() +def test_get_user(): + conn = boto.connect_iam() + assert_raises(BotoServerError, conn.get_user, 'my-user') + conn.create_user('my-user') + conn.get_user('my-user') + + +@mock_iam() def test_add_user_to_group(): conn = boto.connect_iam() assert_raises(BotoServerError, conn.add_user_to_group, 'my-group', 'my-user') @@ -130,6 +138,13 @@ def test_get_all_access_keys(): ) +@mock_iam() +def test_delete_access_key(): + conn = boto.connect_iam() + conn.create_user('my-user') + access_key_id = conn.create_access_key('my-user')['create_access_key_response']['create_access_key_result']['access_key']['access_key_id'] + conn.delete_access_key(access_key_id, 'my-user') + @mock_iam() def test_delete_user():
Added get_user and delete_access_key endpoints.
spulec_moto
train
1623ae0d7c71f9149c958250b0221ea9d78cc157
diff --git a/API/ReferenceResolverInterface.php b/API/ReferenceResolverInterface.php index <HASH>..<HASH> 100644 --- a/API/ReferenceResolverInterface.php +++ b/API/ReferenceResolverInterface.php @@ -9,7 +9,7 @@ namespace Kaliop\eZMigrationBundle\API; interface ReferenceResolverInterface { /** - * @param string $stringIdentifier + * @param string|mixed $stringIdentifier * @return bool true if the given $stringIdentifier identifies a reference */ public function isReference($stringIdentifier); @@ -26,9 +26,9 @@ interface ReferenceResolverInterface * * In pseudocode: return $this->isReference($stringIdentifier) ? $this->getReferenceValue($stringIdentifier) : $stringIdentifier * - * @param string $stringIdentifier + * @param string|mixed $stringIdentifier * @return mixed $stringIdentifier if not a reference, otherwise the reference vale - * @throws \Exception if the given Identifier is not a reference + * @throws \Exception (when ?) */ public function resolveReference($stringIdentifier); } diff --git a/Core/Executor/RoleManager.php b/Core/Executor/RoleManager.php index <HASH>..<HASH> 100644 --- a/Core/Executor/RoleManager.php +++ b/Core/Executor/RoleManager.php @@ -35,7 +35,8 @@ class RoleManager extends RepositoryExecutor implements MigrationGeneratorInterf $roleService = $this->repository->getRoleService(); $userService = $this->repository->getUserService(); - $roleCreateStruct = $roleService->newRoleCreateStruct($step->dsl['name']); + $roleName = $this->referenceResolver->resolveReference($step->dsl['name']); + $roleCreateStruct = $roleService->newRoleCreateStruct($roleName); // Publish new role $role = $roleService->createRole($roleCreateStruct); @@ -82,7 +83,8 @@ class RoleManager extends RepositoryExecutor implements MigrationGeneratorInterf // Updating role name if (isset($step->dsl['new_name'])) { $update = $roleService->newRoleUpdateStruct(); - $update->identifier = $step->dsl['new_name']; + $newRoleName = $this->referenceResolver->resolveReference($step->dsl['new_name']); + $update->identifier = $this->referenceResolver->resolveReference($newRoleName); $role = $roleService->updateRole($role, $update); } diff --git a/Core/ReferenceResolver/ChainResolver.php b/Core/ReferenceResolver/ChainResolver.php index <HASH>..<HASH> 100644 --- a/Core/ReferenceResolver/ChainResolver.php +++ b/Core/ReferenceResolver/ChainResolver.php @@ -95,7 +95,7 @@ class ChainResolver implements EmbeddedReferenceResolverBagInterface, Enumerable $stringIdentifier = $this->resolveEmbeddedReferences($stringIdentifier); } - /// @todo sould we throw if $stringIdentifier is not a string any more? + /// @todo should we throw if $stringIdentifier is not a string any more? // for speed, we avoid calling $this->isReference(), and call directly getReferenceValue() try { diff --git a/WHATSNEW.md b/WHATSNEW.md index <HASH>..<HASH> 100644 --- a/WHATSNEW.md +++ b/WHATSNEW.md @@ -1,3 +1,9 @@ +Version X.Y.Z +============= + +* New: the `role/create` migration step now resolves references for role names. Same for `role/update`. + + Version 5.8.0 ============= @@ -20,6 +26,7 @@ Version 5.8.0 - the `kaliop:migration:generate` command now uses as default language for the generated migrations the default one of the current siteaccess, instead of 'eng-GB' + Version 5.7.3 ============= @@ -29,6 +36,7 @@ Version 5.7.3 * Fix: creating migrations for Content creation or update with contents which have empty Image/File/Media fields would crash + Version 5.7.2 =============
Allow resolving references when setting role names
kaliop-uk_ezmigrationbundle
train
c5f1e98124f2df4b1f1194727865a1423c381883
diff --git a/structure.go b/structure.go index <HASH>..<HASH> 100644 --- a/structure.go +++ b/structure.go @@ -188,14 +188,6 @@ func flattenInstances(list []elb.Instance) []string { // Takes the result of flatmap.Expand for an array of strings // and returns a []string func expandStringList(configured []interface{}) []string { - // here we special case the * expanded lists. For example: - // - // instances = ["${aws_instance.foo.*.id}"] - // - if len(configured) == 1 && strings.Contains(configured[0].(string), ",") { - return strings.Split(configured[0].(string), ",") - } - vs := make([]string, 0, len(configured)) for _, v := range configured { vs = append(vs, v.(string))
helper/schema: test schema diffs with the interpolation splits
terraform-providers_terraform-provider-aws
train
671c1476b7bcc20da69bed82df1075f7b7ef0674
diff --git a/src/Jobby/BackgroundJob.php b/src/Jobby/BackgroundJob.php index <HASH>..<HASH> 100644 --- a/src/Jobby/BackgroundJob.php +++ b/src/Jobby/BackgroundJob.php @@ -109,8 +109,8 @@ class BackgroundJob */ private function getLockFile() { - $tmp = $this->tmpDir; - $job = $this->job; + $tmp = $this->helper->escape($this->tmpDir); + $job = $this->helper->escape($this->job); if (!empty($this->config['environment'])) { $env = $this->config['environment']; diff --git a/src/Jobby/Helper.php b/src/Jobby/Helper.php index <HASH>..<HASH> 100644 --- a/src/Jobby/Helper.php +++ b/src/Jobby/Helper.php @@ -136,4 +136,18 @@ EOF; return str_replace(array("\r\n", "\n"), '', $code); } + + /** + * @param string $input + * @return string + */ + public function escape($input) + { + $input = strtolower($input); + $input = preg_replace("/[^a-z0-9_.\- ]+/", "", $input); + $input = trim($input); + $input = str_replace(" ", "_", $input); + $input = preg_replace("/_{2,}/", "_", $input); + return $input; + } }
proper escape the name for the lockfile
jobbyphp_jobby
train
dc8a302ddab17fd4b6dcaa552ee039adc24eb9c5
diff --git a/app.py b/app.py index <HASH>..<HASH> 100644 --- a/app.py +++ b/app.py @@ -18,7 +18,7 @@ from config import config # Set up logging logfilepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), - 'server.log') + config.get("Server Parameters", "logfile")) loglevels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] loglevel = loglevels[config.getint('Server Parameters', 'loglevel')] @@ -153,8 +153,6 @@ def get_people(people): #---------------------------------------------- # Experiment counterbalancing code. #---------------------------------------------- - - def get_random_condcount(): """ HITs can be in one of three states:
Configured app.py to find the logfile setting in config.txt
NYUCCL_psiTurk
train
3486a27bee3c039d4b0b0c241643271ace3efaa7
diff --git a/sharding-core/src/main/java/io/shardingsphere/core/rewrite/SQLRewriteEngine.java b/sharding-core/src/main/java/io/shardingsphere/core/rewrite/SQLRewriteEngine.java index <HASH>..<HASH> 100644 --- a/sharding-core/src/main/java/io/shardingsphere/core/rewrite/SQLRewriteEngine.java +++ b/sharding-core/src/main/java/io/shardingsphere/core/rewrite/SQLRewriteEngine.java @@ -37,6 +37,7 @@ import io.shardingsphere.core.parsing.parser.token.RowCountToken; import io.shardingsphere.core.parsing.parser.token.SQLToken; import io.shardingsphere.core.parsing.parser.token.SchemaToken; import io.shardingsphere.core.parsing.parser.token.TableToken; +import io.shardingsphere.core.property.DataSourcePropertyManager; import io.shardingsphere.core.rewrite.placeholder.IndexPlaceholder; import io.shardingsphere.core.rewrite.placeholder.InsertValuesPlaceholder; import io.shardingsphere.core.rewrite.placeholder.SchemaPlaceholder; @@ -79,6 +80,8 @@ public final class SQLRewriteEngine { private final List<Object> parameters; + private final DataSourcePropertyManager dataSourcePropertyManager; + /** * Constructs SQL rewrite engine. * @@ -90,7 +93,8 @@ public final class SQLRewriteEngine { * @param parameters parameters */ public SQLRewriteEngine(final ShardingRule shardingRule, final String originalSQL, final DatabaseType databaseType, - final SQLStatement sqlStatement, final ShardingConditions shardingConditions, final List<Object> parameters) { + final SQLStatement sqlStatement, final ShardingConditions shardingConditions, + final List<Object> parameters, final DataSourcePropertyManager dataSourcePropertyManager) { this.shardingRule = shardingRule; this.originalSQL = originalSQL; this.databaseType = databaseType; @@ -98,6 +102,7 @@ public final class SQLRewriteEngine { this.shardingConditions = shardingConditions; this.parameters = parameters; sqlTokens.addAll(sqlStatement.getSqlTokens()); + this.dataSourcePropertyManager = dataSourcePropertyManager; } /** diff --git a/sharding-core/src/main/java/io/shardingsphere/core/routing/router/sharding/ParsingSQLRouter.java b/sharding-core/src/main/java/io/shardingsphere/core/routing/router/sharding/ParsingSQLRouter.java index <HASH>..<HASH> 100644 --- a/sharding-core/src/main/java/io/shardingsphere/core/routing/router/sharding/ParsingSQLRouter.java +++ b/sharding-core/src/main/java/io/shardingsphere/core/routing/router/sharding/ParsingSQLRouter.java @@ -96,7 +96,7 @@ public final class ParsingSQLRouter implements ShardingRouter { setGeneratedKeys(result, generatedKey); } RoutingResult routingResult = route(parameters, sqlStatement, shardingConditions); - SQLRewriteEngine rewriteEngine = new SQLRewriteEngine(shardingRule, logicSQL, databaseType, sqlStatement, shardingConditions, parameters); + SQLRewriteEngine rewriteEngine = new SQLRewriteEngine(shardingRule, logicSQL, databaseType, sqlStatement, shardingConditions, parameters, dataSourcePropertyManager); boolean isSingleRouting = routingResult.isSingleRouting(); if (sqlStatement instanceof SelectStatement && null != ((SelectStatement) sqlStatement).getLimit()) { processLimit(parameters, (SelectStatement) sqlStatement, isSingleRouting);
pass through DataSourcePropertyManager.
apache_incubator-shardingsphere
train
1dd7b386d0e1e943b16dd2f941e163e17b04687c
diff --git a/src/Providers/LighthouseServiceProvider.php b/src/Providers/LighthouseServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/Providers/LighthouseServiceProvider.php +++ b/src/Providers/LighthouseServiceProvider.php @@ -18,7 +18,7 @@ class LighthouseServiceProvider extends ServiceProvider $this->mergeConfigFrom(__DIR__.'/../../config/config.php', 'lighthouse'); if (config('lighthouse.controller')) { - $this->loadRoutesFrom(__DIR__.'/../Support/Http/routes.php'); + require realpath(__DIR__.'/../Support/Http/routes.php'); } $this->registerSchema();
remove method (not available in Lumen)
nuwave_lighthouse
train
bc4590ad4f7c5633569215557c353a80c196217a
diff --git a/h2o-algos/src/test/java/hex/tree/drf/DRFTest.java b/h2o-algos/src/test/java/hex/tree/drf/DRFTest.java index <HASH>..<HASH> 100755 --- a/h2o-algos/src/test/java/hex/tree/drf/DRFTest.java +++ b/h2o-algos/src/test/java/hex/tree/drf/DRFTest.java @@ -1210,7 +1210,7 @@ public class DRFTest extends TestUtil { Scope.enter(); try { - tfr = parse_test_file("./smalldata/junit/cars.head10.csv"); + tfr = parse_test_file("./smalldata/junit/cars.csv"); for (String s : new String[]{ "name", }) { @@ -1233,7 +1233,7 @@ public class DRFTest extends TestUtil { gbm = job.trainModel().get(); ModelMetricsRegression mm = (ModelMetricsRegression)gbm._output._training_metrics; - assertEquals(0.4934239, mm.mse(), 1e-4); + assertEquals(0.12765426703095312, mm.mse(), 1e-4); job.remove(); } finally {
Fix DRF JUnit path issue.
h2oai_h2o-3
train
1c1570fdcac915232660758be02e91f636d2f16c
diff --git a/pyemma/util/_config.py b/pyemma/util/_config.py index <HASH>..<HASH> 100644 --- a/pyemma/util/_config.py +++ b/pyemma/util/_config.py @@ -17,6 +17,7 @@ from __future__ import absolute_import, print_function +import six from six.moves.configparser import ConfigParser import os import shutil @@ -32,6 +33,10 @@ import pkg_resources class ReadConfigException(Exception): pass +if six.PY2: + class NotADirectoryError(Exception): + pass + __all__ = ('Config', )
fix py2 issue (missing exception)
markovmodel_PyEMMA
train
9c2e5e74540b339f49b98fd74b149ea78d32ffb7
diff --git a/decidim-proposals/lib/decidim/proposals/component.rb b/decidim-proposals/lib/decidim/proposals/component.rb index <HASH>..<HASH> 100644 --- a/decidim-proposals/lib/decidim/proposals/component.rb +++ b/decidim-proposals/lib/decidim/proposals/component.rb @@ -108,7 +108,7 @@ Decidim.register_component(:proposals) do |component| component.register_stat :endorsements_count, priority: Decidim::StatsRegistry::MEDIUM_PRIORITY do |components, start_at, end_at| proposals = Decidim::Proposals::FilteredProposals.for(components, start_at, end_at).not_hidden - Decidim::Endorsement.where(resource_id: proposals.pluck(:id), resource_type: Decidim::Proposals::Proposal.name).count + proposals.sum(:endorsements_count) end component.register_stat :comments_count, tag: :comments do |components, start_at, end_at|
Simplify SQL query for endorsement stats (#<I>)
decidim_decidim
train
fde75762f9d11379f646aea06d03d354db989850
diff --git a/test/smart_titles/helper_test.rb b/test/smart_titles/helper_test.rb index <HASH>..<HASH> 100644 --- a/test/smart_titles/helper_test.rb +++ b/test/smart_titles/helper_test.rb @@ -7,9 +7,26 @@ class SmartTitlesHelperTest < ActionView::TestCase super @av = ActionView::Base.new @view_flow = ActionView::OutputFlow.new + end + + def inside_view @virtual_path = 'posts/new' end + def inside_layout + @virtual_path = 'layouts/application' + end + + def title(*args) + inside_view + super + end + + def head_title(*args) + inside_layout + super + end + def teardown I18n.backend.reload! end @@ -47,7 +64,6 @@ class SmartTitlesHelperTest < ActionView::TestCase def test_head_title_with_translated_title store_global_title store_page_title - assert_equal "New post", head_title title assert_equal "New post", head_title end @@ -76,6 +92,7 @@ class SmartTitlesHelperTest < ActionView::TestCase store_global_title store_page_title store_title_template + title assert_equal "d New post b", head_title end
Lame excuse! Caught the previous bug in test
semaperepelitsa_smart_titles
train
8d6041ef85d8d18a48a928d0e358b58da3495d02
diff --git a/structurizr-core/src/com/structurizr/AbstractWorkspace.java b/structurizr-core/src/com/structurizr/AbstractWorkspace.java index <HASH>..<HASH> 100644 --- a/structurizr-core/src/com/structurizr/AbstractWorkspace.java +++ b/structurizr-core/src/com/structurizr/AbstractWorkspace.java @@ -116,41 +116,6 @@ public abstract class AbstractWorkspace { } /** - * Gets the URL where the content of this workspace can be found. - * - * @return a URL (as a String) - */ - public String getSource() { - return source; - } - - /** - * Sets the URL where the content of this workspace can be found. - * - * @param source a URL, as a String - * @throws IllegalArgumentException if the URL is not a valid URL - */ - public void setSource(String source) { - if (source != null && source.trim().length() > 0) { - try { - URL url = new URL(source); - this.source = source; - } catch (MalformedURLException murle) { - throw new IllegalArgumentException(source + " is not a valid URL."); - } - } - } - - /** - * Determines whether this workspace has a source URL set. - * - * @return true if a source URL has been set, false otherwise - */ - public boolean hasSource() { - return this.source != null && this.source.trim().length() > 0; - } - - /** * Gets the URL of the API where the content of this workspace can be found. * * @return the URL, as a String diff --git a/structurizr-core/src/com/structurizr/encryption/EncryptedWorkspace.java b/structurizr-core/src/com/structurizr/encryption/EncryptedWorkspace.java index <HASH>..<HASH> 100644 --- a/structurizr-core/src/com/structurizr/encryption/EncryptedWorkspace.java +++ b/structurizr-core/src/com/structurizr/encryption/EncryptedWorkspace.java @@ -49,7 +49,6 @@ public final class EncryptedWorkspace extends AbstractWorkspace { setDescription(workspace.getDescription()); setVersion(workspace.getVersion()); setThumbnail(workspace.getThumbnail()); - setSource(workspace.getSource()); setApi(workspace.getApi()); this.plaintext = plaintext; diff --git a/structurizr-core/test/unit/com/structurizr/WorkspaceTests.java b/structurizr-core/test/unit/com/structurizr/WorkspaceTests.java index <HASH>..<HASH> 100644 --- a/structurizr-core/test/unit/com/structurizr/WorkspaceTests.java +++ b/structurizr-core/test/unit/com/structurizr/WorkspaceTests.java @@ -15,50 +15,6 @@ public class WorkspaceTests { private Workspace workspace = new Workspace("Name", "Description"); @Test - public void test_setSource_DoesNotThrowAnException_WhenANullUrlIsSpecified() { - workspace.setSource(null); - } - - @Test - public void test_setSource_DoesNotThrowAnException_WhenAnEmptyUrlIsSpecified() { - workspace.setSource(""); - } - - @Test - public void test_setSource_ThrowsAnException_WhenAnInvalidUrlIsSpecified() { - try { - workspace.setSource("www.somedomain.com"); - fail(); - } catch (Exception e) { - assertEquals("www.somedomain.com is not a valid URL.", e.getMessage()); - } - } - - @Test - public void test_setSource_DoesNotThrowAnException_WhenAnValidUrlIsSpecified() { - workspace.setSource("https://www.somedomain.com"); - assertEquals("https://www.somedomain.com", workspace.getSource()); - } - - @Test - public void test_hasSource_ReturnsFalse_WhenANullSourceIsSpecified() { - workspace.setSource(null); - assertFalse(workspace.hasSource()); - } - - @Test - public void test_hasSource_ReturnsFalse_WhenAnEmptySourceIsSpecified() { - workspace.setSource(" "); - assertFalse(workspace.hasSource()); - } - - @Test - public void test_hasSource_ReturnsTrue_WhenAUrlIsSpecified() { - workspace.setSource("https://www.somedomain.com"); - assertTrue(workspace.hasSource()); - } - - @Test public void test_setApi_DoesNotThrowAnException_WhenANullUrlIsSpecified() { workspace.setApi(null); }
Removed the workspace source property, as this feature is no longer supported.
structurizr_java
train
78fbca0a1c15f8c38fa482ecb8d316eacd3f5e6f
diff --git a/bmemcached/protocol.py b/bmemcached/protocol.py index <HASH>..<HASH> 100644 --- a/bmemcached/protocol.py +++ b/bmemcached/protocol.py @@ -444,6 +444,32 @@ class Protocol(threading.local): flags, value = struct.unpack('!L%ds' % (bodylen - 4, ), extra_content) return self.deserialize(value, flags), cas + + def noop(self): + """ + Send a NOOP command + + :return: Returns the status. + :rtype: int + """ + logger.debug('Sending NOOP') + data = struct.pack(self.HEADER_STRUCT + + self.COMMANDS['noop']['struct'], + self.MAGIC['request'], + self.COMMANDS['noop']['command'], + 0, 0, 0, 0, 0, 0, 0) + self._send(data) + + (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, + cas, extra_content) = self._get_response() + + logger.debug('Value Length: %d. Body length: %d. Data type: %d', + extlen, bodylen, datatype) + + if status != self.STATUS['success']: + logger.debug('NOOP failed (status is %d). Message: %s' % (status, extra_content)) + + return int(status) def get_multi(self, keys): """
Added NOOP support (#<I>) We need some basic `is_alive()` function for our memcache client. We currently just call `set()` on a special key and check the result, a NOOP would be better
jaysonsantos_python-binary-memcached
train
e8e9ad614cb35cadb4a49bf4d7e102f09933bc4c
diff --git a/components/dashboards-web-component/src/utils/WidgetClassRegistry.js b/components/dashboards-web-component/src/utils/WidgetClassRegistry.js index <HASH>..<HASH> 100644 --- a/components/dashboards-web-component/src/utils/WidgetClassRegistry.js +++ b/components/dashboards-web-component/src/utils/WidgetClassRegistry.js @@ -16,6 +16,8 @@ * under the License. */ +import Widget from '@wso2-dashboards/widget'; + /** * Registry that maintains widget classes against their names. */ @@ -67,7 +69,7 @@ export default instance; * @private */ function extendsFromDeprecatedWidgetClassVersion(widgetClass) { - return !Object.getPrototypeOf(widgetClass.prototype).version; + return Object.getPrototypeOf(widgetClass.prototype).constructor.version !== Widget.version; } /** @@ -79,17 +81,7 @@ function extendsFromDeprecatedWidgetClassVersion(widgetClass) { function patchWidgetClass(widgetClass) { const superWidgetClassPrototype = Object.getPrototypeOf(widgetClass.prototype); // Patch subscribe method. - superWidgetClassPrototype.subscribe = function (listenerCallback, publisherId, context) { - const self = this; - if (!publisherId) { - const publisherIds = self.props.configs.pubsub.publishers; - if (publisherIds && Array.isArray(publisherIds)) { - publisherIds.forEach(id => self.props.glEventHub.on(id, listenerCallback)); - } - } else { - self.props.glEventHub.on(publisherId, listenerCallback, context); - } - }; + superWidgetClassPrototype.subscribe = Widget.prototype.subscribe; } global.dashboard = {};
Use base Widget class to check version and patch subscribe method.
wso2_carbon-dashboards
train
e79193f852238816a9eeb26fbf1e94b369911718
diff --git a/activesupport/test/time_zone_test.rb b/activesupport/test/time_zone_test.rb index <HASH>..<HASH> 100644 --- a/activesupport/test/time_zone_test.rb +++ b/activesupport/test/time_zone_test.rb @@ -274,6 +274,11 @@ class TimeZoneTest < Test::Unit::TestCase assert_raise(ArgumentError) { ActiveSupport::TimeZone[false] } end + def test_unknown_zone_shouldnt_have_tzinfo + zone = ActiveSupport::TimeZone.create("bogus") + assert_nil zone.tzinfo + end + def test_new assert_equal ActiveSupport::TimeZone["Central Time (US & Canada)"], ActiveSupport::TimeZone.new("Central Time (US & Canada)") end
Test for unknown zone with nil utc_offset
rails_rails
train
9382684a6d2e11dd25e41a31b455bcbdc434d0df
diff --git a/lib/protocol/Connection.js b/lib/protocol/Connection.js index <HASH>..<HASH> 100644 --- a/lib/protocol/Connection.js +++ b/lib/protocol/Connection.js @@ -28,6 +28,7 @@ var part = require('./part'); var MessageType = common.MessageType; var MessageTypeName = common.MessageTypeName; var SegmentKind = common.SegmentKind; +var ErrorLevel = common.ErrorLevel; var PartKind = common.PartKind; var bignum = util.bignum; var debug = util.debuglog('hdb'); @@ -289,8 +290,12 @@ Connection.prototype.receive = function receive(buffer, cb) { reply = this._parseReplySegment(buffer); this.setStatementContext(reply.statementContext); this.setTransactionFlags(reply.transactionFlags); - if (reply.kind === SegmentKind.ERROR) { - error = reply.error; + if (reply.kind === SegmentKind.ERROR && util.isObject(reply.error)) { + if (reply.error.level === ErrorLevel.WARNING) { + this.emit('warning', reply.error); + } else { + error = reply.error; + } } else if (this._transaction.error) { error = this._transaction.error; } diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ }, "name": "hdb", "description": "SAP HANA Database Client for Node", - "version": "0.5.2", + "version": "0.5.3", "repository": { "type": "git", "url": "git://github.com/SAP/node-hdb.git" diff --git a/test/lib.Connection.js b/test/lib.Connection.js index <HASH>..<HASH> 100644 --- a/test/lib.Connection.js +++ b/test/lib.Connection.js @@ -21,6 +21,7 @@ var Connection = lib.Connection; var MessageType = lib.common.MessageType; var FunctionCode = lib.common.FunctionCode; var SegmentKind = lib.common.SegmentKind; +var ErrorLevel = lib.common.ErrorLevel; var PartKind = lib.common.PartKind; function connect(options, connectListener) { @@ -381,5 +382,25 @@ describe('Lib', function () { queue.resume(); }); + it('should receive a warning', function () { + var connection = createConnection(); + var replySegment = { + kind: SegmentKind.ERROR, + error: new Error('WARNING') + }; + replySegment.error.level = ErrorLevel.WARNING; + connection._parseReplySegment = function parseReplySegment() { + return replySegment; + }; + connection.receive(new Buffer(0), function (err, reply) { + (!!err).should.be.not.ok; + reply.should.equal(replySegment); + }); + connection.once('warning', function onwarning(warning) { + warning.should.equal(replySegment.error); + done(); + }) + }); + }); }); \ No newline at end of file
do not treat an error segment with only a warning as an error. This commit closes #<I>.
SAP_node-hdb
train
fcb50cdf195a39887c979cedd70214c829665806
diff --git a/src/javascript/lib/core/erlang_compat/lists.js b/src/javascript/lib/core/erlang_compat/lists.js index <HASH>..<HASH> 100644 --- a/src/javascript/lib/core/erlang_compat/lists.js +++ b/src/javascript/lib/core/erlang_compat/lists.js @@ -41,10 +41,10 @@ function foldr(fun, acc0, list) { return foldl(fun, acc0, reverse(list)); } -function keyfind(key, n, tupleList) { - for (const tuple of tupleList) { - if (tuple.get(n - 1) === key) { - return tuple; +function keyfind(key, n, list) { + for (const ele of list) { + if (ele instanceof ErlangTypes.Tuple && ele.get(n - 1) === key) { + return ele; } } diff --git a/src/javascript/tests/core/erlang_compat/lists_spec.js b/src/javascript/tests/core/erlang_compat/lists_spec.js index <HASH>..<HASH> 100644 --- a/src/javascript/tests/core/erlang_compat/lists_spec.js +++ b/src/javascript/tests/core/erlang_compat/lists_spec.js @@ -23,3 +23,19 @@ test('foldl', (t) => { test('foldr', (t) => { t.deepEqual(Core.lists.foldr((v, acc) => acc + v.toString(), '', [1, 2, 3]), '321'); }); + +test('member/2', (t) => { + let result = Core.lists.member('abc', ['abc']); + t.deepEqual(result, true); + + result = Core.lists.member('abc', ['abcd']); + t.deepEqual(result, false); +}); + +test('keyfind/3', (t) => { + let result = Core.lists.keyfind('abc', 1, ['abc']); + t.deepEqual(result, false); + + result = Core.lists.keyfind('abc', 1, [{'abc'}]); + t.deepEqual(result, true); +});
fix lists:keyfind/3, dont assume member is a tuple
elixirscript_elixirscript
train
f4ac42f681163d293e38e16e0264c20613f7f3ec
diff --git a/openid/server/trustroot.py b/openid/server/trustroot.py index <HASH>..<HASH> 100644 --- a/openid/server/trustroot.py +++ b/openid/server/trustroot.py @@ -233,9 +233,6 @@ class TrustRoot(object): @rtype: C{NoneType} or C{L{TrustRoot}} """ - if not isinstance(trust_root, (str, unicode)): - return None - url_parts = _parseURL(trust_root) if url_parts is None: return None
[project @ server.trustroot.TrustRoot.parse: don't return None if other types are passed in.] If code is passing in funny values for this, I don't want silent failure.
necaris_python3-openid
train
d6dd5f3a23add0b9799ff30ed7da783f9944adeb
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -296,6 +296,7 @@ function ngraphSvg(graph, settings) { function onNodePan(pos, model) { return function onNodePan(e) { var clickPosition = getModelPosition(e.center); + resetStable(); if (e.type === 'panmove') { var status = panSession[model.id];
Reset stable when user moves ndoe
anvaka_ngraph.svg
train
fb79110d1aee5f732d0e2944d6109a5294cceae3
diff --git a/library/CM/Process.php b/library/CM/Process.php index <HASH>..<HASH> 100644 --- a/library/CM/Process.php +++ b/library/CM/Process.php @@ -57,10 +57,16 @@ class CM_Process { return (false !== posix_getsid($processId)); } - public function killChildren() { + /** + * @param float|null $timeoutKill + */ + public function killChildren($timeoutKill = null) { + if (null === $timeoutKill) { + $timeoutKill = 30; + } + $timeoutKill = (float) $timeoutKill; $signal = SIGTERM; $timeStart = microtime(true); - $timeoutKill = 30; $timeoutReached = false; $timeOutput = $timeStart; diff --git a/tests/library/CM/ProcessTest.php b/tests/library/CM/ProcessTest.php index <HASH>..<HASH> 100644 --- a/tests/library/CM/ProcessTest.php +++ b/tests/library/CM/ProcessTest.php @@ -120,6 +120,39 @@ Parent terminated. } /** + * @runInSeparateProcess + * @preserveGlobalState disabled + */ + public function testKillChildrenSigKill() { + $loopEcho = function () { + while (true) { + usleep(50 * 1000); + echo "hello\n"; + } + }; + $loopEchoStayingAlive = function () { + pcntl_signal(SIGTERM, function () { + }, false); + while (true) { + usleep(50 * 1000); + echo "hello\n"; + } + }; + + $process = CM_Process::getInstance(); + $process->fork($loopEcho); + $process->fork($loopEchoStayingAlive); + $pidListBefore = $this->_getChildrenPidList(); + + $timeStart = microtime(true); + $process->killChildren(0.5); + + $this->assertCount(2, $pidListBefore); + $this->assertCount(0, $this->_getChildrenPidList()); + $this->assertSameTime(0.5, microtime(true) - $timeStart, 0.1); + } + + /** * @param string $message */ public static function writeln($message) {
Add test for forceful child process killing
cargomedia_cm
train
0a17a4e7dd9e09198ffecfe33064e2360d0463ce
diff --git a/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageAutoConfiguration.java b/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageAutoConfiguration.java index <HASH>..<HASH> 100644 --- a/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageAutoConfiguration.java +++ b/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageAutoConfiguration.java @@ -1,5 +1,5 @@ /** - * Copyright 2015-2016 The OpenZipkin Authors + * Copyright 2015-2017 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at @@ -13,7 +13,6 @@ */ package zipkin.autoconfigure.storage.mysql; -import com.zaxxer.hikari.HikariDataSource; import java.util.concurrent.Executor; import javax.sql.DataSource; import org.jooq.ExecuteListenerProvider; @@ -49,19 +48,7 @@ public class ZipkinMySQLStorageAutoConfiguration { } @Bean @ConditionalOnMissingBean(DataSource.class) DataSource mysqlDataSource() { - StringBuilder url = new StringBuilder("jdbc:mysql://"); - url.append(mysql.getHost()).append(":").append(mysql.getPort()); - url.append("/").append(mysql.getDb()); - url.append("?autoReconnect=true"); - url.append("&useSSL=").append(mysql.isUseSsl()); - url.append("&useUnicode=yes&characterEncoding=UTF-8"); - HikariDataSource result = new HikariDataSource(); - result.setDriverClassName("org.mariadb.jdbc.Driver"); - result.setJdbcUrl(url.toString()); - result.setMaximumPoolSize(mysql.getMaxActive()); - result.setUsername(mysql.getUsername()); - result.setPassword(mysql.getPassword()); - return result; + return mysql.toDataSource(); } @Bean StorageComponent storage(Executor executor, DataSource dataSource, diff --git a/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageProperties.java b/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageProperties.java index <HASH>..<HASH> 100644 --- a/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageProperties.java +++ b/zipkin-autoconfigure/storage-mysql/src/main/java/zipkin/autoconfigure/storage/mysql/ZipkinMySQLStorageProperties.java @@ -13,7 +13,9 @@ */ package zipkin.autoconfigure.storage.mysql; +import com.zaxxer.hikari.HikariDataSource; import java.io.Serializable; +import javax.sql.DataSource; import org.springframework.boot.context.properties.ConfigurationProperties; @ConfigurationProperties("zipkin.storage.mysql") @@ -83,4 +85,20 @@ public class ZipkinMySQLStorageProperties implements Serializable { // for Spark public void setUseSsl(boolean useSsl) { this.useSsl = useSsl; } + + public DataSource toDataSource() { + StringBuilder url = new StringBuilder("jdbc:mysql://"); + url.append(getHost()).append(":").append(getPort()); + url.append("/").append(getDb()); + url.append("?autoReconnect=true"); + url.append("&useSSL=").append(isUseSsl()); + url.append("&useUnicode=yes&characterEncoding=UTF-8"); + HikariDataSource result = new HikariDataSource(); + result.setDriverClassName("org.mariadb.jdbc.Driver"); + result.setJdbcUrl(url.toString()); + result.setMaximumPoolSize(getMaxActive()); + result.setUsername(getUsername()); + result.setPassword(getPassword()); + return result; + } }
Exposes property derived MySQL datasource so sparkstreaming can use it (#<I>) The only thing ZipkinMySQLStorageProperties is used for is creating a datasource. This exposes that so that the logic doesn't have to be copy pasted into zipkin-sparkstreaming.
apache_incubator-zipkin
train
119c2ffc8d1e4da119c5bc10e969d1869401d87a
diff --git a/lib/api.js b/lib/api.js index <HASH>..<HASH> 100755 --- a/lib/api.js +++ b/lib/api.js @@ -1,6 +1,5 @@ var irc = require(__dirname + '/irc'), - stub = require(__dirname + '/stub.js'), - streamer = require(__dirname + '/streamer.js'), + readWriteStream = require(__dirname + '/stub.js').ReadWriteNetStream, Events = irc.Events, Client = irc.Client; @@ -28,12 +27,62 @@ Api.prototype.createClient = function(key, object, dummy) { key: key, options: object, dummy: dummy, - + irc: new Client(key, object, socket), + events: {} }; - this._clients[key] = new Client(key, object, socket); + this._clients[key] = ret; + + return ret; +}; +// ======================================== + +// ======================================== +// the following functions handle hooking onto events +// and unhooking them + +Api.prototype.hookEvent = function(key, e, callback, once) { + var once = once || false; + // check for once at the end, if so only apply event once + + if (!key in this._clients) { + return false; + } + // no client, just exit quietly + + var client = this._clients[key]; + // find the client in our clients object + + if (!once) { + client.events[e] = callback; + } + // push an event to the array so we know how many we've got + + if (once) { + Events.once([client.key, e], callback); + } else { + Events.on([client.key, e], callback); + } + // add the hook +}; + +Api.prototype.unhookEvent = function(key, e) { + if (!key in this._clients) { + return false; + } + // no client, just exit quietly + + var client = this._clients[key]; + // find the client in our clients object + + if (e in client.events) { + delete client.events[e]; + } + // delete the event if it exists, if it doesn't exist it's been pushed on once + // so we'll never need to call unhookEvent() - + Events.off([client.key, e], callback); + // add the hook }; // ======================================== diff --git a/lib/irc.js b/lib/irc.js index <HASH>..<HASH> 100755 --- a/lib/irc.js +++ b/lib/irc.js @@ -10,6 +10,8 @@ var Events = new EventEmitter2({ wildcard: true, maxListeners: 0 }); + +Events.setMaxListeners(0); // we create an event emitter here, but only one, and use it for every irc client we initiate // that way we don't have 2 event emitters for each client and use an insane amount of memory // when we scale up to hundreds of clients, we use namespaces for individual clients @@ -37,12 +39,16 @@ function Client(key, options, GenericSocket) { self.connection.connect(); // connect the socket - self.connection.once('ready', function () { + self.connection.on('ready', function () { Events.emit([self.key, 'ready'], {}); // we just send an empty object here, this is just an identifier to say we're connected and ready // do what you want with me yeah baby. }); + self.connection.on('close', function () { + Events.emit([self.key, 'close'], {}); + }); + self.connection.on('data', function (line) { var message = self._parse(line); @@ -596,6 +602,10 @@ Client.prototype._ircEvents = { // ctcp or normal message? // the + infront of it xchat was sending? Not sure why, but we accomodate for that now. }, + + PING: function(message) { + Events.emit([this.key, 'ping'], {timestamp: +new Date()}); + } }; // ======================================== diff --git a/lib/main.js b/lib/main.js index <HASH>..<HASH> 100755 --- a/lib/main.js +++ b/lib/main.js @@ -1,6 +1,6 @@ var api = require(__dirname + '/api').api; -api.createClient('unique-key', { +var cli = api.createClient('unique-key', { server: 'irc.freenode.net', nick: 'simpleircsocket', user: 'node', @@ -12,8 +12,6 @@ api.createClient('unique-key', { password: null }, false); -/*var client = new Client('unique-key', ); - -Events.on('*.ctcp_request', function(m) { +api.hookEvent('unique-key', 'capabilities', function(m) { console.log(m); -});*/ \ No newline at end of file +}); \ No newline at end of file
Added event hooks in the API, also added PING event incase someone wants to setup a timer for example
ircanywhere_irc-factory
train
f5b3f73051938781f5c9d63630a008b2c0392e7e
diff --git a/lib/src/main/java/com/auth0/android/lock/errors/LoginAuthenticationErrorBuilder.java b/lib/src/main/java/com/auth0/android/lock/errors/LoginAuthenticationErrorBuilder.java index <HASH>..<HASH> 100644 --- a/lib/src/main/java/com/auth0/android/lock/errors/LoginAuthenticationErrorBuilder.java +++ b/lib/src/main/java/com/auth0/android/lock/errors/LoginAuthenticationErrorBuilder.java @@ -66,7 +66,7 @@ public class LoginAuthenticationErrorBuilder implements AuthenticationErrorBuild if (throwable instanceof APIException) { APIException exception = (APIException) throwable; Map errorResponse = exception.getResponseError(); - final String errorCode = (String) errorResponse.get(ERROR_KEY); + final String errorCode = errorResponse.containsKey(ERROR_KEY) ? (String) errorResponse.get(ERROR_KEY) : (String) errorResponse.get(CODE_KEY); final String errorDescription = (String) errorResponse.get(ERROR_DESCRIPTION_KEY); if (INVALID_USER_PASSWORD_ERROR.equalsIgnoreCase(errorCode)) { return new AuthenticationError(invalidCredentialsResource, ErrorType.INVALID_CREDENTIALS, throwable);
consider 'code' as well as 'error' key when parsing login errors
auth0_Lock.Android
train
bf9986d577ddd4925b36ae5ace8812f2ffec80aa
diff --git a/src/com/opencms/core/CmsClassLoader.java b/src/com/opencms/core/CmsClassLoader.java index <HASH>..<HASH> 100755 --- a/src/com/opencms/core/CmsClassLoader.java +++ b/src/com/opencms/core/CmsClassLoader.java @@ -1,8 +1,8 @@ /* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/core/Attic/CmsClassLoader.java,v $ -* Date : $Date: 2001/02/20 15:20:17 $ -* Version: $Revision: 1.21 $ +* Date : $Date: 2001/02/20 15:29:51 $ +* Version: $Revision: 1.22 $ * * Copyright (C) 2000 The OpenCms Group * @@ -107,7 +107,7 @@ import java.lang.reflect.*; * with a parent classloader. Normally this should be the classloader * that loaded this loader. * @author Alexander Lucas - * @version $Revision: 1.21 $ $Date: 2001/02/20 15:20:17 $ + * @version $Revision: 1.22 $ $Date: 2001/02/20 15:29:51 $ * @see java.lang.ClassLoader */ public class CmsClassLoader extends ClassLoader implements I_CmsLogChannels { @@ -144,6 +144,9 @@ public class CmsClassLoader extends ClassLoader implements I_CmsLogChannels { */ private Vector m_filenames = new Vector(); + /** + * Some Objects we need to read classes from the virtual file system. + */ private Object m_cms; private Class m_cmsObjectClass; private Class m_cmsFileClass; @@ -184,6 +187,7 @@ public class CmsClassLoader extends ClassLoader implements I_CmsLogChannels { + " is not a valid \"String\" instance"); } } + // get the method we need to read files from opencms. try{ m_cmsObjectClass = Class.forName("com.opencms.file.CmsObject", true, this); m_cmsFileClass = Class.forName("com.opencms.file.CmsFile", true, this); @@ -489,6 +493,12 @@ public class CmsClassLoader extends ClassLoader implements I_CmsLogChannels { } + /** + * Returns all the files loaded from the virtual file system + * this is for checking if the classes are changed. + * + * @return a Vector with Strings containing absolute path of the classes. + */ public Vector getFilenames(){ return m_filenames; }
working on the reload function of the CmsClassloader
alkacon_opencms-core
train
4d6fc64485ddc2d6259016a78a97d51351180157
diff --git a/src/main/java/org/cactoos/iterator/Endless.java b/src/main/java/org/cactoos/iterator/Endless.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/cactoos/iterator/Endless.java +++ b/src/main/java/org/cactoos/iterator/Endless.java @@ -45,11 +45,6 @@ public final class Endless<T> implements Iterator<T> { private final Unchecked<T> origin; /** - * Identifies if element or scalar is null. - */ - private final boolean nullable; - - /** * Ctor. * @param element Element to repeat */ @@ -71,19 +66,18 @@ public final class Endless<T> implements Iterator<T> { */ public Endless(final Unchecked<T> scalar) { this.origin = scalar; - this.nullable = this.origin == null || this.origin.value() == null; } @Override public boolean hasNext() { - return !this.nullable; + return this.origin.value() != null; } @Override public T next() { if (!this.hasNext()) { throw new NoSuchElementException( - "The iterator doesn't have items any more" + "The iterator doesn't have item" ); } return this.origin.value(); diff --git a/src/test/java/org/cactoos/iterable/EndlessTest.java b/src/test/java/org/cactoos/iterable/EndlessTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/cactoos/iterable/EndlessTest.java +++ b/src/test/java/org/cactoos/iterable/EndlessTest.java @@ -38,7 +38,7 @@ import org.llorllale.cactoos.matchers.Throws; public final class EndlessTest { @Test - public void endlessIterableTest() throws Exception { + public void endlessIterableTest() { MatcherAssert.assertThat( "Can't get unique endless iterable item", new Endless<>(1), @@ -47,7 +47,7 @@ public final class EndlessTest { } @Test - public void noElementTest() throws Exception { + public void elementsIsNullTest() { new Assertion<>( "Must get sliced iterable of elements", () -> new Endless<>(null).iterator().next(),
(#<I>) Refactored endless iterator
yegor256_cactoos
train
0e8d844f110f9de453ba15c7623af789674fe6d4
diff --git a/src/main/java/org/asteriskjava/manager/ManagerConnection.java b/src/main/java/org/asteriskjava/manager/ManagerConnection.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/asteriskjava/manager/ManagerConnection.java +++ b/src/main/java/org/asteriskjava/manager/ManagerConnection.java @@ -209,6 +209,10 @@ public interface ManagerConnection * handler to be called when the corresponding {@link ManagerResponse} * is received. * + * Be very careful that your callbackHandler terminates very quickly and + * does not do any fancy processing because it is called from the reader thread + * which is blocked for the time it takes to execute your callbackHandler. + * * @param action the action to send to the Asterisk server * @param callbackHandler the callback handler to call when the response is * received or <code>null</code> if you are not interested in
Added warning about using the async version of sendAction
asterisk-java_asterisk-java
train
c2730cf18227fda2362adf0f66c8892bd89480fc
diff --git a/src/MaglMarkdown/Adapter/GithubMarkdownAdapter.php b/src/MaglMarkdown/Adapter/GithubMarkdownAdapter.php index <HASH>..<HASH> 100644 --- a/src/MaglMarkdown/Adapter/GithubMarkdownAdapter.php +++ b/src/MaglMarkdown/Adapter/GithubMarkdownAdapter.php @@ -44,10 +44,10 @@ class GithubMarkdownAdapter implements MarkdownAdapterInterface public function transformText($text) { - $requestArray = [ + $requestArray = array( 'text' => $text, 'mode' => $this->options->getMarkdownMode(), - ]; + ); $this->request->setUri($this->options->getMarkdownApiUri()); $this->request->setMethod(Request::METHOD_POST); diff --git a/src/MaglMarkdown/Cache/CacheListener.php b/src/MaglMarkdown/Cache/CacheListener.php index <HASH>..<HASH> 100644 --- a/src/MaglMarkdown/Cache/CacheListener.php +++ b/src/MaglMarkdown/Cache/CacheListener.php @@ -39,8 +39,8 @@ class CacheListener implements ListenerAggregateInterface */ public function attach(EventManagerInterface $events, $priority = 1) { - $this->listeners[] = $events->attach('markdown.render.pre', [$this, 'preRender'], $priority); - $this->listeners[] = $events->attach('markdown.render.post', [$this, 'postRender'], $priority); + $this->listeners[] = $events->attach('markdown.render.pre', array($this, 'preRender'), $priority); + $this->listeners[] = $events->attach('markdown.render.post', array($this, 'postRender'), $priority); } /** diff --git a/src/MaglMarkdown/Service/Markdown.php b/src/MaglMarkdown/Service/Markdown.php index <HASH>..<HASH> 100644 --- a/src/MaglMarkdown/Service/Markdown.php +++ b/src/MaglMarkdown/Service/Markdown.php @@ -39,7 +39,7 @@ class Markdown public function render($markdown) { // first check if there's something within the cache - $cachedMarkdown = $this->triggerEvent('markdown.render.pre', ['markdown' => $markdown]); + $cachedMarkdown = $this->triggerEvent('markdown.render.pre', array('markdown' => $markdown)); if (false !== $cachedMarkdown) { return $cachedMarkdown; } @@ -49,10 +49,10 @@ class Markdown $renderedMarkdown = $this->markdownAdapter->transformText($markdown); // save the rendered markdown to the cache - $this->triggerEvent('markdown.render.post', [ + $this->triggerEvent('markdown.render.post', array( 'markdown' => $markdown, 'renderedMarkdown' => $renderedMarkdown, - ]); + )); return $renderedMarkdown; } diff --git a/tests/MaglMarkdownTest/ModuleTest.php b/tests/MaglMarkdownTest/ModuleTest.php index <HASH>..<HASH> 100644 --- a/tests/MaglMarkdownTest/ModuleTest.php +++ b/tests/MaglMarkdownTest/ModuleTest.php @@ -63,7 +63,8 @@ class ModuleTest extends \PHPUnit_Framework_TestCase public function testGetServiceFactories() { - $config = $this->instance->getConfig()['service_manager']; + $config = $this->instance->getConfig(); + $config = $config['service_manager']; $this->assertTrue(array_key_exists('factories', $config)); $this->assertTrue(array_key_exists('MaglMarkdown\Adapter\GithubMarkdownAdapter', $config['factories']));
Removed short array syntax for PHP <I> compatibility
maglnet_MaglMarkdown
train
4d1dbbc476d1235c6644d0d1871427e268af5a29
diff --git a/src/Client.php b/src/Client.php index <HASH>..<HASH> 100644 --- a/src/Client.php +++ b/src/Client.php @@ -856,7 +856,7 @@ class Client */ public function execResize($exec, $w, $h) { - return $this->browser->get( + return $this->browser->post( $this->uri->expand( '/exec/{exec}/resize{?w,h}', array( diff --git a/tests/ClientTest.php b/tests/ClientTest.php index <HASH>..<HASH> 100644 --- a/tests/ClientTest.php +++ b/tests/ClientTest.php @@ -357,7 +357,7 @@ class ClientTest extends TestCase public function testExecResize() { - $this->expectRequestFlow('get', '/exec/123/resize?w=800&h=600', $this->createResponse(), 'expectEmpty'); + $this->expectRequestFlow('POST', '/exec/123/resize?w=800&h=600', $this->createResponse(), 'expectEmpty'); $this->expectPromiseResolveWith('', $this->client->execResize(123, 800, 600)); }
Fix execResize() to issue POST request
clue_php-docker-react
train
12dbc61d6ff2c394237c4fd1a3a73a8813c2e834
diff --git a/src/pyrocore/scripts/rtcontrol.py b/src/pyrocore/scripts/rtcontrol.py index <HASH>..<HASH> 100644 --- a/src/pyrocore/scripts/rtcontrol.py +++ b/src/pyrocore/scripts/rtcontrol.py @@ -468,28 +468,29 @@ class RtorrentControl(ScriptBaseWithConfig): self.parser.error("No filter conditions given!") # Check special action options - action = None + actions = [] if self.options.ignore: - action = Bunch(name="ignore", method="ignore", label="IGNORE" if int(self.options.ignore) else "HEED", - help="commands on torrent", interactive=False, args=(self.options.ignore,)) + actions.append(Bunch(name="ignore", method="ignore", label="IGNORE" if int(self.options.ignore) else "HEED", + help="commands on torrent", interactive=False, args=(self.options.ignore,))) # Check standard action options # TODO: Allow certain combinations of actions (like --tag foo --stop etc.) # How do we get a sensible order of execution? for action_mode in self.ACTION_MODES: if getattr(self.options, action_mode.name): - if action: + if actions: self.parser.error("Options --%s and --%s are mutually exclusive" % ( - action.name.replace('_', '-'), action_mode.name.replace('_', '-'), + ", --".join(i.name.replace('_', '-') for i in actions), + action_mode.name.replace('_', '-'), )) - action = action_mode - if action.argshelp: - action.args = (getattr(self.options, action.name),) - if not action and self.options.flush: - action = Bunch(name="flush", method="flush", label="FLUSH", - help="flush session data", interactive=False, args=()) + if action_mode.argshelp: + action_mode.args = (getattr(self.options, action_mode.name),) + actions.append(action_mode) + if not actions and self.options.flush: + actions.append(Bunch(name="flush", method="flush", label="FLUSH", + help="flush session data", interactive=False, args=())) self.options.flush = False # No need to flush twice - if action and action.interactive: + if any(i.interactive for i in actions): self.options.interactive = True selection = None @@ -509,7 +510,7 @@ class RtorrentControl(ScriptBaseWithConfig): # Preparation steps default_output_format = "default" - if action: + if actions: default_output_format = "action_cron" if self.options.cron else "action" self.validate_output_format(default_output_format) sort_key = self.validate_sort_fields() @@ -568,7 +569,8 @@ class RtorrentControl(ScriptBaseWithConfig): summary.add(field, getattr(item, field)) # Execute action? - if action: + if actions: + action = actions[0] # TODO: loop over it self.LOG.log(logging.DEBUG if self.options.cron else logging.INFO, "%s %s %d out of %d torrents." % ( "Would" if self.options.dry_run else "About to", action.label, len(matches), view.size(), ))
refactoring: changes to allow more than one action (in theory, so far)
pyroscope_pyrocore
train
baacb35ce739ec0db3142370344d941ff0d95a73
diff --git a/examples/analyze_mef.py b/examples/analyze_mef.py index <HASH>..<HASH> 100644 --- a/examples/analyze_mef.py +++ b/examples/analyze_mef.py @@ -156,11 +156,13 @@ if __name__ == "__main__": # histograms below. In this case, we will plot the forward/side scatter # channels in the density plot, and the fluorescence channels FL1 and FL3 # below as two separate histograms. - # Note that we are using data both before (``beads_sample``) and after - # (``beads_sample_gated``) gating. In addition, we are providing - # ``gate_contour`` from the density gating step, which will be displayed in - # the density diagram. This will result in a convenient representation of - # the data both before and after gating. + # Note that we are providing data both before (``beads_sample``) and after + # (``beads_sample_gated``) gating. Each fluorescence histogram will display + # the ungated dataset with transparency, and the gated dataset in front with + # a solid color. In addition, we are providing ``gate_contour`` from the + # density gating step, which will be displayed in the density diagram. This + # will result in a convenient representation of the data both before and + # after gating. FlowCal.plot.density_and_hist( beads_sample, beads_sample_gated,
Improved comments on density_and_hist in example script.
taborlab_FlowCal
train
a1c48a752f8d4749cc047830f16d74f12ce3170e
diff --git a/src/Composer/PackageManager.php b/src/Composer/PackageManager.php index <HASH>..<HASH> 100644 --- a/src/Composer/PackageManager.php +++ b/src/Composer/PackageManager.php @@ -412,7 +412,7 @@ class PackageManager 'type' => $json['type'], 'descrip' => $json['description'], 'authors' => $json['authors'], - 'keywords' => $json['keywords'], + 'keywords' => !empty($json['keywords']) ? $json['keywords'] : '', ); } else { $packages['local'][] = array(
Don't break when keywords aren't set.
bolt_bolt
train
6f29268ea2affd4a3876666b051a7d5af0e6df6b
diff --git a/lib/rspec/active_model/mocks/mocks.rb b/lib/rspec/active_model/mocks/mocks.rb index <HASH>..<HASH> 100644 --- a/lib/rspec/active_model/mocks/mocks.rb +++ b/lib/rspec/active_model/mocks/mocks.rb @@ -52,6 +52,10 @@ module RSpec::ActiveModel::Mocks send(key) end + # Rails>4.2 uses _read_attribute internally, as an optimized + # alternative to record['id'] + alias_method :_read_attribute, :[] + # Returns the opposite of `persisted?` def new_record? !persisted?
Fix mocking belongs_to associations in Rails <I>+
rspec_rspec-activemodel-mocks
train
6f27317b7499c47d67fa11200afd6151a265baa0
diff --git a/mama_cas/tests/test_utils.py b/mama_cas/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/mama_cas/tests/test_utils.py +++ b/mama_cas/tests/test_utils.py @@ -7,6 +7,7 @@ from mama_cas.utils import clean_service_url from mama_cas.utils import is_valid_service_url from mama_cas.utils import redirect from mama_cas.utils import to_bool +from mama_cas.utils import get_callable class UtilsTests(TestCase): @@ -100,3 +101,12 @@ class UtilsTests(TestCase): self.assertFalse(to_bool(None)) self.assertFalse(to_bool('')) self.assertFalse(to_bool(' ')) + + def test_get_callable_invalid_path(self): + """An invalid callable path should raise an ``ImportError``.""" + self.assertRaises(ImportError, get_callable, 'invalid_import_path') + + def test_get_callable_invalid_import(self): + """An invalid callable should raise an ``ImportError``.""" + self.assertRaises(ImportError, get_callable, + 'mama_cas.tests.callback.invalid_callback')
Test import failure paths for get_callable
jbittel_django-mama-cas
train
b38972ceacf9d9b4ecee39b814096e46944a6222
diff --git a/peewee.py b/peewee.py index <HASH>..<HASH> 100644 --- a/peewee.py +++ b/peewee.py @@ -1277,7 +1277,9 @@ class TimestampField(IntegerField): if value == 0: return elif self.resolution > 1: - value, microseconds = divmod(value, self.resolution) + ticks_to_microsecond = 1000000 // self.resolution + value, ticks = divmod(value, self.resolution) + microseconds = ticks * ticks_to_microsecond return self._conv(value).replace(microsecond=microseconds) else: return self._conv(value)
fixed a problem with TimestampField resolution
coleifer_peewee
train
594a3abdec383b55ce32a9e960263f55b41318e2
diff --git a/src/Illuminate/Cookie/Middleware/EncryptCookies.php b/src/Illuminate/Cookie/Middleware/EncryptCookies.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Cookie/Middleware/EncryptCookies.php +++ b/src/Illuminate/Cookie/Middleware/EncryptCookies.php @@ -80,7 +80,11 @@ class EncryptCookies } try { - $request->cookies->set($key, $this->decryptCookie($key, $cookie)); + $value = $this->decryptCookie($key, $cookie); + + $request->cookies->set( + $key, strpos($value, sha1($key).'|') !== 0 ? null : substr($value, 41) + ); } catch (DecryptException $e) { $request->cookies->set($key, null); } @@ -136,7 +140,11 @@ class EncryptCookies } $response->headers->setCookie($this->duplicate( - $cookie, $this->encrypter->encrypt($cookie->getValue(), static::serialized($cookie->getName())) + $cookie, + $this->encrypter->encrypt( + sha1($cookie->getName()).'|'.$cookie->getValue(), + static::serialized($cookie->getName()) + ) )); }
improve cookie encryption (#<I>)
laravel_framework
train
956d232e5cae0184c9f4f024fba25cc2de0d01e6
diff --git a/test/assert-called-test.js b/test/assert-called-test.js index <HASH>..<HASH> 100644 --- a/test/assert-called-test.js +++ b/test/assert-called-test.js @@ -6,7 +6,8 @@ process.nextTick(cb(function called() {})); process.on('uncaughtException', function (ex) { assert(ex instanceof assert.AssertionError); - assert.equal(ex.message, '1 callback not called:\n notCalled'); + assert.equal(ex.message.indexOf('1 callback not called:\n notCalled'), 0); + assert.notEqual(ex.message.indexOf('assert-called-test.js:4:1'), -1); process.exit(0); });
[test] Add tests for stack trace
mmalecki_assert-called
train
462329197fcccb20f82bf082f0f968f15e189ed5
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -77,6 +77,12 @@ const micromatch = (list, patterns, options) => { }; /** + * Backwards compatibility + */ + +micromatch.match = micromatch; + +/** * Returns a matcher function from the given glob `pattern` and `options`. * The returned function takes a string to match as its only argument and returns * true if the string is a match.
re-add match method I want to avoid any regressions. I meant to do this before publish.
micromatch_micromatch
train
d7c86041b31d73282a4726af0fd1fa8e6e491f6f
diff --git a/test/attributes.js b/test/attributes.js index <HASH>..<HASH> 100644 --- a/test/attributes.js +++ b/test/attributes.js @@ -130,8 +130,9 @@ describe("attributes", function () { assert(attributes.value[0].getValue() === "D"); }); - describe("and then with GPF inheritance", function () { - var attributes = gpf.attributes.get(MyNativeClass); + it("retrieves all attributes (on instance)", function () { + var myObject = new MyNativeClass(), + attributes = gpf.attributes.get(myObject); assert(Object.keys(attributes).length === 2); assert(attributes.$attributes.length === 3); assert(attributes.value.length === 1); @@ -175,3 +176,9 @@ describe("attributes", function () { }); }); + +if (config.features.es6class) { + + include("attributes.es6"); + +}
Fix test and include es6 specific (#<I>)
ArnaudBuchholz_gpf-js
train
f750f54430e19960ed6f4dfbf62ff77658d3ae00
diff --git a/src/PeskyCMF/Scaffold/NormalTableScaffoldConfig.php b/src/PeskyCMF/Scaffold/NormalTableScaffoldConfig.php index <HASH>..<HASH> 100644 --- a/src/PeskyCMF/Scaffold/NormalTableScaffoldConfig.php +++ b/src/PeskyCMF/Scaffold/NormalTableScaffoldConfig.php @@ -83,7 +83,7 @@ abstract class NormalTableScaffoldConfig extends ScaffoldConfig { } } $dbColumns = static::getTable()->getTableStructure()->getColumns(); - $columnsToSelect = []; + $columnsToSelect = [static::getTable()->getTableStructure()->getPkColumnName()]; $virtualColumns = []; foreach (array_keys($dataGridConfig->getViewersLinkedToDbColumns(false)) as $originalColName) { list($colName, $keyName) = AbstractValueViewer::splitComplexViewerName($originalColName);
NormalTableScaffoldConfig->getRecordsForDataGrid() - primary key column now selected always
swayok_PeskyCMF
train
c02498a52aada9091aa7b7cab10dd5a4313a26d7
diff --git a/src/main/java/me/prettyprint/cassandra/service/CassandraClientPoolImpl.java b/src/main/java/me/prettyprint/cassandra/service/CassandraClientPoolImpl.java index <HASH>..<HASH> 100644 --- a/src/main/java/me/prettyprint/cassandra/service/CassandraClientPoolImpl.java +++ b/src/main/java/me/prettyprint/cassandra/service/CassandraClientPoolImpl.java @@ -52,11 +52,11 @@ import org.slf4j.LoggerFactory; PoolExhaustedException, Exception { List<CassandraClientPoolByHost> clients = new ArrayList<CassandraClientPoolByHost>(pools.values()); while(!clients.isEmpty()) { - int rand = (int) (Math.random() * pools.size()); + int rand = (int) (Math.random() * clients.size()); try { return clients.get(rand).borrowClient(); } catch (Exception e) { - if (clients.size() > 1) { + if (clients.size() > 0) { log.warn("Unable to obtain previously existing client " + clients.get(rand) + " will try the next client", e); clients.remove(rand); } else {
fixed an off-by-one issue when only using one client
hector-client_hector
train
851026362af417277ac05927339fcf5b01184be9
diff --git a/server/standby_server.go b/server/standby_server.go index <HASH>..<HASH> 100644 --- a/server/standby_server.go +++ b/server/standby_server.go @@ -27,7 +27,7 @@ type StandbyServer struct { client *Client cluster []*machineMessage - syncInterval time.Duration + syncInterval float64 joinIndex uint64 removeNotify chan bool @@ -42,7 +42,7 @@ func NewStandbyServer(config StandbyServerConfig, client *Client) *StandbyServer return &StandbyServer{ Config: config, client: client, - syncInterval: time.Duration(int64(DefaultSyncInterval * float64(time.Second))), + syncInterval: DefaultSyncInterval, } } @@ -118,7 +118,7 @@ func (s *StandbyServer) SyncCluster(peers []string) error { } func (s *StandbyServer) SetSyncInterval(second float64) { - s.syncInterval = time.Duration(int64(second * float64(time.Second))) + s.syncInterval = second } func (s *StandbyServer) ClusterLeader() *machineMessage { @@ -146,7 +146,7 @@ func (s *StandbyServer) redirectRequests(w http.ResponseWriter, r *http.Request) func (s *StandbyServer) monitorCluster() { for { - timer := time.NewTimer(s.syncInterval) + timer := time.NewTimer(time.Duration(int64(s.syncInterval * float64(time.Second)))) defer timer.Stop() select { case <-s.closeChan:
chore(standby_server): let syncInterval represent in second unit This is done to keep consistency with other namings.
etcd-io_etcd
train
8fa368d78edbad162c5003a6214805110a27cc04
diff --git a/spdx/creationinfo.py b/spdx/creationinfo.py index <HASH>..<HASH> 100644 --- a/spdx/creationinfo.py +++ b/spdx/creationinfo.py @@ -13,7 +13,7 @@ # limitations under the License. import config from datetime import datetime -from utils import datetime_iso_format +import utils class Creator(object): @@ -32,6 +32,8 @@ class Creator(object): + + class Organization(Creator): """Organization entity. @@ -57,6 +59,9 @@ class Organization(Creator): else: return 'Organization: {0}'.format(self.name) + def __str__(self): + return self.to_value() + class Person(Creator): @@ -83,6 +88,9 @@ class Person(Creator): else: return 'Person: {0}'.format(self.name) + def __str__(self): + return self.to_value() + class Tool(Creator): """Tool entity. @@ -97,6 +105,10 @@ class Tool(Creator): """Tag/value representation of Tool entity.""" return 'Tool: {0}'.format(self.name) + def __str__(self): + return self.to_value() + + class CreationInfo(object): @@ -129,8 +141,9 @@ class CreationInfo(object): @property def created_iso_format(self): - return datetime_iso_format(self.created) + return utils.datetime_iso_format(self.created) + @property def has_comment(self): return self.comment is not None
Provides __str__ method to creator models. Changes CreationInfo.has_comment to property
spdx_tools-python
train
ec44b67e9f399801877e53c9dd5c1d9a35051bdf
diff --git a/logger/default.go b/logger/default.go index <HASH>..<HASH> 100644 --- a/logger/default.go +++ b/logger/default.go @@ -4,8 +4,10 @@ import ( "context" "fmt" "os" + "path" "runtime" "sort" + "strings" "sync" "time" @@ -53,6 +55,20 @@ func copyFields(src map[string]interface{}) map[string]interface{} { return dst } +var sourceControlSites = []string{"github.com"} + +func logCallerfilePath(filepath string) string { + for _, v := range sourceControlSites { + if strings.Contains(filepath, v) { + parts := strings.Split(filepath, v) + if len(parts) > 0 { + return path.Join(v, parts[1]) + } + } + } + return filepath +} + func (l *defaultLogger) Log(level Level, v ...interface{}) { // TODO decide does we need to write message if log level not used? if !l.opts.Level.Enabled(level) { @@ -66,7 +82,7 @@ func (l *defaultLogger) Log(level Level, v ...interface{}) { fields["level"] = level.String() if _, file, line, ok := runtime.Caller(l.opts.CallerSkipCount); ok { - fields["caller"] = fmt.Sprintf("%s:%d", file, line) + fields["caller"] = fmt.Sprintf("%s:%d", logCallerfilePath(file), line) } rec := dlog.Record{ @@ -107,7 +123,7 @@ func (l *defaultLogger) Logf(level Level, format string, v ...interface{}) { fields["level"] = level.String() if _, file, line, ok := runtime.Caller(l.opts.CallerSkipCount); ok { - fields["caller"] = fmt.Sprintf("%s:%d", file, line) + fields["caller"] = fmt.Sprintf("%s:%d", logCallerfilePath(file), line) } rec := dlog.Record{ @@ -151,7 +167,7 @@ func NewLogger(opts ...Option) Logger { Level: InfoLevel, Fields: make(map[string]interface{}), Out: os.Stderr, - CallerSkipCount: 1, + CallerSkipCount: 2, Context: context.Background(), }
Fixing log file path in logs (#<I>)
micro_go-micro
train
95e01db382bde4e4964bdf037b84d0290646cca8
diff --git a/lib/class/model.js b/lib/class/model.js index <HASH>..<HASH> 100644 --- a/lib/class/model.js +++ b/lib/class/model.js @@ -370,7 +370,7 @@ Model.setMethod(function initBehaviours() { * * @author Jelle De Loecker <jelle@develry.be> * @since 0.2.0 - * @version 0.2.0 + * @version 0.4.0 * * @param {Object} data Optional data * @@ -378,7 +378,7 @@ Model.setMethod(function initBehaviours() { */ Model.setMethod(function createDocument(data) { - var doc = new this.constructor.Document(data, {model: this}); + var doc = new this.constructor.Document(data, {model: this, singular: true}); return doc; });
Documents created used Model#createDocument are singular now
skerit_alchemy
train
60947a2ea35d8b706794c252bd7c0c0530f5e7bb
diff --git a/src/main/java/org/casbin/jcasbin/main/CoreEnforcer.java b/src/main/java/org/casbin/jcasbin/main/CoreEnforcer.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/casbin/jcasbin/main/CoreEnforcer.java +++ b/src/main/java/org/casbin/jcasbin/main/CoreEnforcer.java @@ -54,46 +54,31 @@ public class CoreEnforcer { * CoreEnforcer is the default constructor. */ public CoreEnforcer() { + this("", ""); } /** * CoreEnforcer initializes an enforcer with a model file and a policy file. */ public CoreEnforcer(String modelPath, String policyFile) { - this.modelPath = modelPath; - - adapter = new FileAdapter(policyFile); - - initialize(); - - if (!this.modelPath.equals("")) { - loadModel(); - loadPolicy(); - } + this(modelPath, new FileAdapter(policyFile)); } /** * CoreEnforcer initializes an enforcer with a database adapter. */ public CoreEnforcer(String modelPath, Adapter adapter) { - this.modelPath = modelPath; - - this.adapter = adapter; + this(newModel(modelPath, ""), adapter); - initialize(); - - if (!this.modelPath.equals("")) { - loadModel(); - loadPolicy(); - } + this.modelPath = modelPath; } /** * CoreEnforcer initializes an enforcer with a model and a database adapter. */ public CoreEnforcer(Model m, Adapter adapter) { - modelPath = ""; this.adapter = adapter; + this.watcher = null; model = m; model.printModel(); @@ -119,23 +104,35 @@ public class CoreEnforcer { /** * newModel creates a model. */ - private Model newModel() { - Model model = new Model(); + public static Model newModel() { + Model m = new Model(); - return model; + return m; } /** * newModel creates a model. */ - private Model newModel(String text) { - Model model = new Model(); + public static Model newModel(String text) { + Model m = new Model(); - model.loadModelFromText(text); + m.loadModelFromText(text); - return model; + return m; + } + + /** + * newModel creates a model. + */ + public static Model newModel(String modelPath, String unused) { + Model m = new Model(); + + m.loadModel(modelPath); + + return m; } + /** * loadModel reloads the model from the model CONF file. * Because the policy is attached to a model, so the policy is invalidated and needs to be reloaded by calling LoadPolicy().
Improve Enforcer's constructors.
casbin_jcasbin
train
c72c8022458112fbb517ec7941b8cb5aba6f7128
diff --git a/lib/ruote/receiver/base.rb b/lib/ruote/receiver/base.rb index <HASH>..<HASH> 100644 --- a/lib/ruote/receiver/base.rb +++ b/lib/ruote/receiver/base.rb @@ -137,6 +137,12 @@ module Ruote wfid = fields[:wfid] || @context.wfidgen.generate + fields = Rufus::Json.dup(fields) + variables = Rufus::Json.dup(variables) + root_stash = Rufus::Json.dup(root_stash) + # + # making sure symbols are turned to strings + @context.storage.put_msg( 'launch', 'wfid' => wfid, diff --git a/test/functional/ft_12_launchitem.rb b/test/functional/ft_12_launchitem.rb index <HASH>..<HASH> 100644 --- a/test/functional/ft_12_launchitem.rb +++ b/test/functional/ft_12_launchitem.rb @@ -5,6 +5,7 @@ # Tue Jun 23 10:55:16 JST 2009 # +#require 'rufus-json/automatic' require File.expand_path('../base', __FILE__) @@ -37,5 +38,19 @@ class FtLaunchitemTest < Test::Unit::TestCase {"a"=>0, "b"=>1, "params"=>{"ref"=>"alpha"}}, @dashboard.context.stash[:fields]) end + + # Warning: this test requires rufus-json to have a backend ready. + # + def test_launch_and_variables_with_symbol_keys + + pdef = Ruote.define do + echo '${f} / ${v:v}' + end + + wfid = @dashboard.launch(pdef, { :f => 'x' }, { :v => 'y' }) + wait_for(wfid) + + assert_equal 'x / y', @tracer.to_s + end end
stringify symbols in #launch fields/vars closes gh-<I>
jmettraux_ruote
train
639496fb031978c21760119e3961cb0d0ada2568
diff --git a/Eloquent/Model.php b/Eloquent/Model.php index <HASH>..<HASH> 100755 --- a/Eloquent/Model.php +++ b/Eloquent/Model.php @@ -745,7 +745,9 @@ abstract class Model implements ArrayAccess, ArrayableInterface, JsonableInterfa if ($this->softDelete) { - $query->update(array(static::DELETED_AT => $this->freshTimestamp())); + $this->{static::DELETED_AT} = $time = $this->freshTimestamp(); + + $query->update(array(static::DELETED_AT => $time)); } else {
Set deleted_at on model when soft deleting.
illuminate_database
train
03ab9a19f8d8aaa2138edd3d4e6663d5ed0f1c56
diff --git a/lib/markdiff/differ.rb b/lib/markdiff/differ.rb index <HASH>..<HASH> 100644 --- a/lib/markdiff/differ.rb +++ b/lib/markdiff/differ.rb @@ -183,8 +183,11 @@ module Markdiff # @param [Nokogiri::XML::Node] node def mark_li_as_changed(node) until node.parent.nil? || node.parent.fragment? - if node.name == "li" && node["class"].nil? - node["class"] = "changed" + if node.name == "li" + classes = node["class"].to_s.split(/\s/) + unless classes.include?("added") || classes.include?("changed") || classes.include?("removed") + node["class"] = (classes + ["changed"]).join(" ") + end end node = node.parent end diff --git a/lib/markdiff/operations/add_child_operation.rb b/lib/markdiff/operations/add_child_operation.rb index <HASH>..<HASH> 100644 --- a/lib/markdiff/operations/add_child_operation.rb +++ b/lib/markdiff/operations/add_child_operation.rb @@ -6,7 +6,7 @@ module Markdiff # @return [String] def inserted_node if @inserted_node.name == "li" - @inserted_node["class"] = "added" + @inserted_node["class"] = (@inserted_node["class"].to_s.split(/\s/) + ["added"]).join(" ") @inserted_node.inner_html = "<ins>#{@inserted_node.inner_html}</ins>" @inserted_node else diff --git a/lib/markdiff/operations/add_previous_sibling_operation.rb b/lib/markdiff/operations/add_previous_sibling_operation.rb index <HASH>..<HASH> 100644 --- a/lib/markdiff/operations/add_previous_sibling_operation.rb +++ b/lib/markdiff/operations/add_previous_sibling_operation.rb @@ -7,7 +7,7 @@ module Markdiff def inserted_node if @inserted_node.name == "li" node = @inserted_node.clone - node["class"] = "added" + node["class"] = (node["class"].to_s.split(/\s/) + ["added"]).join(" ") node.inner_html = "<ins>#{@inserted_node.inner_html}</ins>" node else diff --git a/spec/markdiff/differ_spec.rb b/spec/markdiff/differ_spec.rb index <HASH>..<HASH> 100644 --- a/spec/markdiff/differ_spec.rb +++ b/spec/markdiff/differ_spec.rb @@ -290,5 +290,19 @@ RSpec.describe Markdiff::Differ do expect(subject.to_html.gsub("\n", "")).to eq "<ins><h2>added</h2></ins><ins></ins><h2>a</h2><p>b</p>" end end + + context "with classed li node" do + let(:after_string) do + '<ul><li class="a">b</li></ul>' + end + + let(:before_string) do + '<ul></ul>' + end + + it "returns expected patched node" do + expect(subject.to_html.gsub("\n", "")).to eq '<div class="changed"><ul><li class="a added"><ins>b</ins></li></ul></div>' + end + end end end
Preserve classes on adding new class (e.g. .added)
r7kamura_markdiff
train
bffa84c620eaaa676ebc3f14f95a0c455604adb8
diff --git a/modules/saml2/lib/Auth/Source/SP.php b/modules/saml2/lib/Auth/Source/SP.php index <HASH>..<HASH> 100644 --- a/modules/saml2/lib/Auth/Source/SP.php +++ b/modules/saml2/lib/Auth/Source/SP.php @@ -301,6 +301,7 @@ class sspmod_saml2_Auth_Source_SP extends SimpleSAML_Auth_Source { public function onLogout($idpEntityId) { assert('is_string($idpEntityId)'); + /* Call the logout callback we registered in onProcessingCompleted(). */ $this->callLogoutCallback($idpEntityId); } @@ -324,7 +325,9 @@ class sspmod_saml2_Auth_Source_SP extends SimpleSAML_Auth_Source { throw new Exception('Could not find authentication source with id ' . $sourceId); } + /* Register a callback that we can call if we receive a logout request from the IdP. */ $source->addLogoutCallback($idp, $state); + $state['Attributes'] = $authProcState['Attributes']; SimpleSAML_Auth_Source::completeAuth($state); }
saml2_Auth_Source_SP: Add two comments to logout code.
simplesamlphp_saml2
train
7f35ba96e6ea85b4ca13c8a6412c3bf52cefbff5
diff --git a/pysat/instruments/methods/sw.py b/pysat/instruments/methods/sw.py index <HASH>..<HASH> 100644 --- a/pysat/instruments/methods/sw.py +++ b/pysat/instruments/methods/sw.py @@ -193,7 +193,7 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, notes += "{:})".format(itime.date()) # Determine if the beginning or end of the time series needs to be padded - + freq = None if len(kp_times) < 2 else pysat.utils.time.calc_freq(kp_times) date_range = pds.date_range(start=start, end=stop-pds.DateOffset(days=1), freq=freq) @@ -382,7 +382,16 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): notes += "{:})".format(itime.date()) # Determine if the beginning or end of the time series needs to be padded - freq = pysat.utils.time.calc_freq(f107_times) + if len(f107_times) < 2: + freq = None + else: + freq = pysat.utils.time.calc_freq(f107_times) + date_range = pds.date_range(start=start, end=stop-pds.DateOffset(days=1), + freq=freq) + + if len(f107_times) == 0: + f107_times = date_range + date_range = pds.date_range(start=start, end=stop-pds.DateOffset(days=1), freq=freq) @@ -536,7 +545,7 @@ def convert_ap_to_kp(ap_data, fill_val=-1, ap_name='ap'): Metadata object containing information about transformed data """ - + # Ap are keys, Kp returned as double (N- = N.6667, N+=N.3333333) one_third = 1.0 / 3.0 two_third = 2.0 / 3.0 @@ -555,7 +564,7 @@ def convert_ap_to_kp(ap_data, fill_val=-1, ap_name='ap'): """ if not np.isfinite(ap_in): return fill_val - + i = 0 while ap_keys[i] <= ap_in: i += 1
BUG: fix for #<I>
rstoneback_pysat
train
d165bc34881944f35278b2961467908bd36fc8a8
diff --git a/app/routines/openstax/accounts/create_group.rb b/app/routines/openstax/accounts/create_group.rb index <HASH>..<HASH> 100644 --- a/app/routines/openstax/accounts/create_group.rb +++ b/app/routines/openstax/accounts/create_group.rb @@ -9,11 +9,11 @@ module OpenStax def exec(owner:, name: nil, is_public: false) group = OpenStax::Accounts::Group.new(name: name, is_public: is_public) group.requestor = owner - member = group.add_member(owner) - owner = group.add_owner(owner) + group.add_member(owner) + group.add_owner(owner) group.openstax_uid = -SecureRandom.hex(4).to_i(16)/2 \ - if OpenStax::Accounts.configuration.enable_stubbing + if OpenStax::Accounts.configuration.enable_stubbing? || !owner.has_authenticated? group.save
Assign a fake openstax_uid to groups where the requestor has not authenticated so that they can actually be saved
openstax_accounts-rails
train
4dbed10befe4bfb386f24577ee925e744540e207
diff --git a/json-path/src/test/java/com/jayway/jsonpath/FilterTest.java b/json-path/src/test/java/com/jayway/jsonpath/FilterTest.java index <HASH>..<HASH> 100644 --- a/json-path/src/test/java/com/jayway/jsonpath/FilterTest.java +++ b/json-path/src/test/java/com/jayway/jsonpath/FilterTest.java @@ -670,4 +670,16 @@ public class FilterTest extends BaseTest { assertThat(filter).isEqualTo(parsed); } + + @Test + public void testFilterWithOrShortCircuit1() throws Exception { + Object json = Configuration.defaultConfiguration().jsonProvider().parse( "{\"firstname\":\"Bob\",\"surname\":\"Smith\",\"age\":30}"); + assertThat(Filter.parse("[?((@.firstname == 'Bob' || @.firstname == 'Jane') && @.surname == 'Doe')]").apply(createPredicateContext(json))).isFalse(); + } + + @Test + public void testFilterWithOrShortCircuit2() throws Exception { + Object json = Configuration.defaultConfiguration().jsonProvider().parse("{\"firstname\":\"Bob\",\"surname\":\"Smith\",\"age\":30}"); + assertThat(Filter.parse("[?((@.firstname == 'Bob' || @.firstname == 'Jane') && @.surname == 'Smith')]").apply(createPredicateContext(json))).isTrue(); + } }
Added tests for issue #<I>
json-path_JsonPath
train
176e1ba943a528a9e8d93c800adcb63c4bd6feae
diff --git a/lib/orbacle/data_flow_graph/builder.rb b/lib/orbacle/data_flow_graph/builder.rb index <HASH>..<HASH> 100644 --- a/lib/orbacle/data_flow_graph/builder.rb +++ b/lib/orbacle/data_flow_graph/builder.rb @@ -532,7 +532,6 @@ module Orbacle return handle_custom_attr_writer_send(context, arg_exprs, ast) if obj_expr.nil? && message_name == "attr_writer" return handle_custom_attr_accessor_send(context, arg_exprs, ast) if obj_expr.nil? && message_name == "attr_accessor" return handle_custom_class_send(context, obj_node) if message_name == "class" - return handle_custom_freeze_send(context, obj_node) if message_name == "freeze" call_obj_node = add_vertex(Node.new(:call_obj, {})) @graph.add_edge(obj_node, call_obj_node) @@ -552,13 +551,6 @@ module Orbacle return Result.new(extract_class_node, context) end - def handle_custom_freeze_send(context, obj_node) - freeze_node = @graph.add_vertex(Node.new(:freeze, {})) - @graph.add_edge(obj_node, freeze_node) - - return Result.new(freeze_node, context) - end - def handle_changing_visibility(context, new_visibility, arg_exprs) if context.analyzed_klass.klass && arg_exprs.empty? final_node = add_vertex(Node.new(:const, { const_ref: ConstRef.from_full_name(context.analyzed_klass.klass.full_name, Nesting.empty) })) diff --git a/lib/orbacle/data_flow_graph/define_builtins.rb b/lib/orbacle/data_flow_graph/define_builtins.rb index <HASH>..<HASH> 100644 --- a/lib/orbacle/data_flow_graph/define_builtins.rb +++ b/lib/orbacle/data_flow_graph/define_builtins.rb @@ -23,6 +23,7 @@ module Orbacle define_object_opeq template_just_str(Scope.new(["Object"], false), "to_s", 0) + template_self(Scope.new(["Object"], false), "freeze", 0) end def add_integer_klass @@ -126,6 +127,12 @@ module Orbacle @graph.add_edge(str_node, @graph.get_metod_nodes(metod.id).result) end + def template_self(scope, name, args) + metod = template_args(scope, name, args) + metod_nodes = @graph.get_metod_nodes(metod.id) + @graph.add_edge(metod_nodes.caller, metod_nodes.result) + end + def template_args(scope, name, args) arg_names = build_arg_names(args) arg_nodes = build_arg_nodes(arg_names) diff --git a/lib/orbacle/typing_service.rb b/lib/orbacle/typing_service.rb index <HASH>..<HASH> 100644 --- a/lib/orbacle/typing_service.rb +++ b/lib/orbacle/typing_service.rb @@ -148,7 +148,6 @@ module Orbacle when :clivar_definition then handle_group(node, sources) when :ivar then handle_pass1(node, sources) when :extract_class then handle_extract_class(node, sources) - when :freeze then handle_group(node, sources) # below not really tested diff --git a/spec/data_flow_graph_spec.rb b/spec/data_flow_graph_spec.rb index <HASH>..<HASH> 100644 --- a/spec/data_flow_graph_spec.rb +++ b/spec/data_flow_graph_spec.rb @@ -2442,21 +2442,6 @@ module Orbacle end end - describe "custom - freeze send" do - specify "simple example" do - file = <<-END - [1,2].freeze - END - - result = generate_cfg(file) - - expect(result.graph).to include_edge( - node(:array), - node(:freeze)) - expect(result.final_node).to eq(node(:freeze)) - end - end - def generate_cfg(snippet) worklist = Worklist.new graph = DataFlowGraph::Graph.new
Freeze is no longer a hack in the data flow graph
swistak35_orbacle
train
c6b795baeaaf9fb44316873eb4129f83975932b7
diff --git a/src/ngdoc.js b/src/ngdoc.js index <HASH>..<HASH> 100644 --- a/src/ngdoc.js +++ b/src/ngdoc.js @@ -76,9 +76,9 @@ Doc.prototype = { Doc.METADATA_IGNORE.forEach(function(ignore){ keywords[ignore] = true; }); function extractWords(text) { - var tokens = text.toLowerCase().split(/[\.\s,`'"#]+/mg); + var tokens = text.toLowerCase().split(/[\.\s:,`'"#]+/mg); tokens.forEach(function(key){ - var match = key.match(/^((ng:|[\$_a-z])[\w\-_]+)/); + var match = key.match(/^((ng:|[\$_\w])[\w\-_]+)/); if (match){ key = match[1]; if (!keywords[key]) {
updateSearch function doesn't search for controller's name When having this format: /** * @ngdoc controller * @name app.appname.controller:ControllerName * * * @description * ControllerName controller */ 'search the docs' in generated UI doesn't work (it doesn't find anything) when input is 'ControllerName'
nikhilmodak_gulp-ngdocs
train
e43ef364cb99585d3285f51f7ab308f8a77fe09e
diff --git a/api_test.go b/api_test.go index <HASH>..<HASH> 100644 --- a/api_test.go +++ b/api_test.go @@ -5,7 +5,6 @@ import ( "bufio" "bytes" "encoding/json" - "github.com/dotcloud/docker/auth" "github.com/dotcloud/docker/utils" "io" "net" @@ -41,44 +40,6 @@ func TestGetBoolParam(t *testing.T) { } } -func TestPostAuth(t *testing.T) { - runtime, err := newTestRuntime() - if err != nil { - t.Fatal(err) - } - defer nuke(runtime) - - srv := &Server{ - runtime: runtime, - } - - r := httptest.NewRecorder() - - authConfig := &auth.AuthConfig{ - Username: "utest", - Password: "utest", - Email: "utest@yopmail.com", - } - - authConfigJSON, err := json.Marshal(authConfig) - if err != nil { - t.Fatal(err) - } - - req, err := http.NewRequest("POST", "/auth", bytes.NewReader(authConfigJSON)) - if err != nil { - t.Fatal(err) - } - - if err := postAuth(srv, APIVERSION, r, req, nil); err != nil { - t.Fatal(err) - } - - if r.Code != http.StatusOK && r.Code != 0 { - t.Fatalf("%d OK or 0 expected, received %d\n", http.StatusOK, r.Code) - } -} - func TestGetVersion(t *testing.T) { runtime, err := newTestRuntime() if err != nil { @@ -286,37 +247,6 @@ func TestGetImagesViz(t *testing.T) { } } -func TestGetImagesSearch(t *testing.T) { - runtime, err := newTestRuntime() - if err != nil { - t.Fatal(err) - } - defer nuke(runtime) - - srv := &Server{ - runtime: runtime, - } - - r := httptest.NewRecorder() - - req, err := http.NewRequest("GET", "/images/search?term=redis", nil) - if err != nil { - t.Fatal(err) - } - - if err := getImagesSearch(srv, APIVERSION, r, req, nil); err != nil { - t.Fatal(err) - } - - results := []APISearch{} - if err := json.Unmarshal(r.Body.Bytes(), &results); err != nil { - t.Fatal(err) - } - if len(results) < 2 { - t.Errorf("Expected at least 2 lines, %d found", len(results)) - } -} - func TestGetImagesHistory(t *testing.T) { runtime, err := newTestRuntime() if err != nil { diff --git a/buildfile_test.go b/buildfile_test.go index <HASH>..<HASH> 100644 --- a/buildfile_test.go +++ b/buildfile_test.go @@ -84,7 +84,7 @@ run [ "$FOO" = "BAR" ] { ` -from docker-ut +from %s ENTRYPOINT /bin/echo CMD Hello world `, diff --git a/runtime_test.go b/runtime_test.go index <HASH>..<HASH> 100644 --- a/runtime_test.go +++ b/runtime_test.go @@ -17,11 +17,12 @@ import ( ) const ( - unitTestImageName = "docker-unit-tests" - unitTestImageID = "e9aa60c60128cad1" - unitTestStoreBase = "/var/lib/docker/unit-tests" - testDaemonAddr = "127.0.0.1:4270" - testDaemonProto = "tcp" + unitTestImageName = "docker-unit-tests" + unitTestImageID = "e9aa60c60128cad1" + unitTestNetworkBridge = "testdockbr0" + unitTestStoreBase = "/var/lib/docker/unit-tests" + testDaemonAddr = "127.0.0.1:4270" + testDaemonProto = "tcp" ) var globalRuntime *Runtime @@ -76,7 +77,7 @@ func init() { log.Fatal("docker tests need to be run as root") } - NetworkBridgeIface = "testdockbr0" + NetworkBridgeIface = unitTestNetworkBridge // Make it our Store root runtime, err := NewRuntimeFromDirectory(unitTestStoreBase, false) @@ -92,9 +93,12 @@ func init() { pullingPool: make(map[string]struct{}), pushingPool: make(map[string]struct{}), } - // Retrieve the Image - if err := srv.ImagePull(unitTestImageName, "", os.Stdout, utils.NewStreamFormatter(false), nil); err != nil { - panic(err) + // If the unit test is not found, try to download it. + if img, err := runtime.repositories.LookupImage(unitTestImageName); err != nil || img.ID != unitTestImageID { + // Retrieve the Image + if err := srv.ImagePull(unitTestImageName, "", os.Stdout, utils.NewStreamFormatter(false), nil); err != nil { + panic(err) + } } // Spawn a Daemon go func() {
Remove all network dependencies from the test suite
containers_storage
train
5b36422279cabaf8762a5e5ac552290aa06b379a
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -1439,6 +1439,7 @@ Client Library that allow third party to access AcademyHQ APIs. ); </pre> + ## Using Crms Repository ### 1> Create Organisation(Client) To Academyhq @@ -1450,18 +1451,29 @@ Client Library that allow third party to access AcademyHQ APIs. ); </pre> -## Using Course Api Repository - -### 1> Fetch All Public Courses +### 2> Fetch All Public Courses <pre> /*@return all public courses details std object */ - $course_details = $course_api_repository->get_courses( + $course_details = $crms_repository->get_courses( new \AcademyHQ\API\ValueObjects\StringVO("search parameter"), new \AcademyHQ\API\ValueObjects\Integer("page_number") ); </pre> -### 2> Fetch All Organisation Licenses +### 3> Create Course +<pre> + /*@return created standard course details std object */ + $client_details = $crms_repository->create_course( + new \AcademyHQ\API\ValueObjects\StringVO("course name"), + new \AcademyHQ\API\ValueObjects\StringVO("course description"), + new \AcademyHQ\API\ValueObjects\StringVO("image_url") + ); +</pre> + + +## Using Course Api Repository + +### 1> Fetch All Organisation Licenses <pre> /*@return all licenses with course details std object */ $license_details = $course_api_repository->get_licenses( @@ -1470,6 +1482,7 @@ Client Library that allow third party to access AcademyHQ APIs. ); </pre> + ## Using Member Api Repository ### 1> Fetch All Organisation Licenses diff --git a/src/Repository/ConnectedRMS/CourseApiRepository.php b/src/Repository/ConnectedRMS/CourseApiRepository.php index <HASH>..<HASH> 100644 --- a/src/Repository/ConnectedRMS/CourseApiRepository.php +++ b/src/Repository/ConnectedRMS/CourseApiRepository.php @@ -16,23 +16,6 @@ class CourseApiRepository { $this->credentials = $credentials; } - public function get_courses( - VO\StringVO $search, - VO\Integer $current_page - ){ - $request = new Request( - new GuzzleClient, - $this->credentials, - VO\HTTP\Url::fromNative($this->base_url.'/courses/get/'.$search->__toString().'/'.$current_page->__toInteger()), - new VO\HTTP\Method('GET') - ); - - $response = $request->send(); - $data = $response->get_data(); - - return $data; - } - public function get_licenses( VO\StringVO $search, VO\Integer $current_page diff --git a/src/Repository/ConnectedRMS/CrmsRepository.php b/src/Repository/ConnectedRMS/CrmsRepository.php index <HASH>..<HASH> 100644 --- a/src/Repository/ConnectedRMS/CrmsRepository.php +++ b/src/Repository/ConnectedRMS/CrmsRepository.php @@ -9,7 +9,7 @@ use AcademyHQ\API\Common\Credentials; class CrmsRepository { - private $base_url = 'https://api.academyhq.com/api/v2'; + private $base_url = 'https://api.academyhq.com/api/v2/crms'; public function __construct(Credentials $credentials) { @@ -23,7 +23,7 @@ class CrmsRepository { $request = new Request( new GuzzleClient, $this->credentials, - VO\HTTP\Url::fromNative($this->base_url.'/crms/create/client'), + VO\HTTP\Url::fromNative($this->base_url.'/create/client'), new VO\HTTP\Method('POST') ); @@ -39,6 +39,43 @@ class CrmsRepository { return $data; } - + public function get_courses( + VO\StringVO $search, + VO\Integer $current_page + ){ + $request = new Request( + new GuzzleClient, + $this->credentials, + VO\HTTP\Url::fromNative($this->base_url.'/courses/get/'.$search->__toString().'/'.$current_page->__toInteger()), + new VO\HTTP\Method('GET') + ); + + $response = $request->send(); + $data = $response->get_data(); + + return $data; + } + + public function create_course( + VO\StringVO $name, + VO\StringVO $description, + VO\StringVO $image_url = null + ){ + $request = new Request( + new GuzzleClient, + $this->credentials, + VO\HTTP\Url::fromNative($this->base_url.'/create/course'), + new VO\HTTP\Method('GET') + ); + $request_parameters = array( + 'name' => $name->__toString(), + 'domain' => $domain->__toString() + ); + + $data = $response->get_data(); + + return $data; + } + } \ No newline at end of file
new method to create course and move get courses to crms repo and readme updated
OliveMedia_academyhq-api-client
train
11d33a4f9d64e3a55cbdbab19183b868510cf1ce
diff --git a/dciclient/v1/api/file.py b/dciclient/v1/api/file.py index <HASH>..<HASH> 100644 --- a/dciclient/v1/api/file.py +++ b/dciclient/v1/api/file.py @@ -15,6 +15,7 @@ # under the License. from dciclient.v1.api import base +from dciclient.v1 import utils RESOURCE = 'files' @@ -24,9 +25,14 @@ TABLE_HEADERS = ['id', 'name', 'content', 'mime', 'md5', 'jobstate_id', def create(context, name, content, mime, jobstate_id=None, md5=None, job_id=None): - return base.create(context, RESOURCE, name=name, content=content, - mime=mime, md5=md5, jobstate_id=jobstate_id, - job_id=job_id) + headers = {'DCI-NAME': name, + 'DCI-MIME': mime, + 'DCI-JOBSTATE-ID': jobstate_id, + 'DCI-MD5': md5, + 'DCI-JOB-ID': job_id} + headers = utils.sanitize_kwargs(**headers) + uri = '%s/%s' % (context.dci_cs_api, RESOURCE) + return context.session.post(uri, headers=headers, data=content) def get(context, id, where=None, embed=None):
Update the file upload. This patch use the new way to upload a file: the metadatas are now sent through the HTTP headers. Change-Id: I<I>c6d<I>e0f6e<I>b1d1f9b<I>fbc8d<I>
redhat-cip_python-dciclient
train
1a20fcfce6b802041e93de9274dbf136976c7286
diff --git a/benchmarks/index.js b/benchmarks/index.js index <HASH>..<HASH> 100644 --- a/benchmarks/index.js +++ b/benchmarks/index.js @@ -29,7 +29,7 @@ const fastestTests = [ () => renderDeepTree('styletron', styletron), () => renderWideTree('styletron', styletron), () => renderDeepTree('aphrodite', aphrodite), - () => renderWideTree('aphrodite', aphrodite), + () => renderWideTree('aphrodite', aphrodite) ]; /** @@ -47,7 +47,7 @@ const restTests = [ () => renderDeepTree('reactxp', xp), () => renderWideTree('reactxp', xp), () => renderDeepTree('radium', radium), - () => renderWideTree('radium', radium), + () => renderWideTree('radium', radium) ]; const tests = [...coreTests]; diff --git a/docs/storybook/2-apis/StyleSheet/StyleSheetScreen.js b/docs/storybook/2-apis/StyleSheet/StyleSheetScreen.js index <HASH>..<HASH> 100644 --- a/docs/storybook/2-apis/StyleSheet/StyleSheetScreen.js +++ b/docs/storybook/2-apis/StyleSheet/StyleSheetScreen.js @@ -22,6 +22,22 @@ const StyleSheetScreen = () => ( <Section title="Methods"> <DocItem + description={ + <AppText> + Combines two styles such that <Code>style2</Code> will override any styles in{' '} + <Code>style1</Code>. If either style is falsy, the other one is returned without + allocating an array, saving allocations and maintaining reference equality for{' '} + <Code>PureComponent</Code> checks. + </AppText> + } + example={{ + code: 'StyleSheet.compose(style1, style2);' + }} + name="compose" + typeInfo="(style1, style2) => style" + /> + + <DocItem description="Each key of the object passed to `create` must define a style object. The returned object replaces style objects with IDs" example={{ code: `const styles = StyleSheet.create({ diff --git a/src/apis/StyleSheet/__tests__/index-test.js b/src/apis/StyleSheet/__tests__/index-test.js index <HASH>..<HASH> 100644 --- a/src/apis/StyleSheet/__tests__/index-test.js +++ b/src/apis/StyleSheet/__tests__/index-test.js @@ -22,6 +22,18 @@ describe('apis/StyleSheet', () => { expect(isPlainObject(StyleSheet.absoluteFillObject) === true).toBeTruthy(); }); + describe('compose', () => { + test('returns array when neither style is falsey', () => { + expect(StyleSheet.compose(1, 2)).toEqual([1, 2]); + }); + test('returns style1 when style2 is falsey', () => { + expect(StyleSheet.compose(1, null)).toBe(1); + }); + test('returns style2 when style1 is falsey', () => { + expect(StyleSheet.compose(null, 2)).toBe(2); + }); + }); + describe('create', () => { test('replaces styles with numbers', () => { const style = StyleSheet.create({ root: { position: 'absolute' } }); diff --git a/src/apis/StyleSheet/index.js b/src/apis/StyleSheet/index.js index <HASH>..<HASH> 100644 --- a/src/apis/StyleSheet/index.js +++ b/src/apis/StyleSheet/index.js @@ -36,6 +36,13 @@ const absoluteFill = StyleRegistry.register(absoluteFillObject); const StyleSheet = { absoluteFill, absoluteFillObject, + compose(style1, style2) { + if (style1 && style2) { + return [style1, style2]; + } else { + return style1 || style2; + } + }, create(styles) { const result = {}; Object.keys(styles).forEach(key => {
[add] StyleSheet.compose As per the recent addition to React Native.
necolas_react-native-web
train
326b410d9328de0b4e12a229efc6625b7e5b603f
diff --git a/sh.py b/sh.py index <HASH>..<HASH> 100644 --- a/sh.py +++ b/sh.py @@ -204,7 +204,14 @@ def glob(arg): class RunningCommand(object): def __init__(self, cmd, call_args, stdin, stdout, stderr): - self.log = logging.getLogger("command %r call_args %r" % (cmd, call_args)) + truncate = 20 + if len(cmd) > truncate: + logger_str = "command %r...(%d more) call_args %r" % \ + (cmd[:truncate], len(cmd) - truncate, call_args) + else: + logger_str = "command %r call_args %r" % (cmd, call_args) + + self.log = logging.getLogger(logger_str) self.call_args = call_args self.cmd = cmd self.ran = " ".join(cmd)
bugfix #<I> where too many command arguments created a huge logger
amoffat_sh
train
df0cd4b1ce5a8f4377285a8e36e1f3f85d6eb801
diff --git a/freemius/includes/entities/class-fs-plugin.php b/freemius/includes/entities/class-fs-plugin.php index <HASH>..<HASH> 100755 --- a/freemius/includes/entities/class-fs-plugin.php +++ b/freemius/includes/entities/class-fs-plugin.php @@ -40,6 +40,10 @@ * @var FS_Plugin_Info */ public $info; + /** + * @var bool + */ + public $is_premium; /** * @param stdClass|bool $plugin @@ -53,6 +57,7 @@ $this->slug = $plugin->slug; $this->title = $plugin->title; + $this->is_premium = false; if (isset($plugin->info) && is_object($plugin->info)) $this->info = new FS_Plugin_Info($plugin->info);
[plugin] Moved is_premium property to be part of the plugin members.
Freemius_wordpress-sdk
train
1c11362fbb6435cd0c12f72a55cc0f5a79dbb2ad
diff --git a/src/instrumentTest/java/com/couchbase/lite/AttachmentsTest.java b/src/instrumentTest/java/com/couchbase/lite/AttachmentsTest.java index <HASH>..<HASH> 100644 --- a/src/instrumentTest/java/com/couchbase/lite/AttachmentsTest.java +++ b/src/instrumentTest/java/com/couchbase/lite/AttachmentsTest.java @@ -19,7 +19,10 @@ package com.couchbase.lite; import com.couchbase.lite.internal.AttachmentInternal; import com.couchbase.lite.internal.RevisionInternal; +import com.couchbase.lite.storage.ContentValues; +import com.couchbase.lite.storage.SQLException; import com.couchbase.lite.support.Base64; +import com.couchbase.lite.util.Log; import com.couchbase.lite.util.TextUtils; import junit.framework.Assert; @@ -64,6 +67,18 @@ public class AttachmentsTest extends LiteTestCase { database.insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach1), rev1.getSequence(), testAttachmentName, "text/plain", rev1.getGeneration()); Assert.assertEquals(Status.CREATED, status.getCode()); + //We must set the no_attachments column for the rev to false, as we are using an internal + //private API call above (database.insertAttachmentForSequenceWithNameAndType) which does + //not set the no_attachments column on revs table + try { + ContentValues args = new ContentValues(); + args.put("no_attachments=", false); + database.getDatabase().update("revs", args, "sequence=?", new String[] {String.valueOf(rev1.getSequence())}); + } catch (SQLException e) { + Log.e(Database.TAG, "Error setting rev1 no_attachments to false", e); + throw new CouchbaseLiteException(Status.INTERNAL_SERVER_ERROR); + } + Attachment attachment = database.getAttachmentForSequence(rev1.getSequence(), testAttachmentName); Assert.assertEquals("text/plain", attachment.getContentType()); byte[] data = IOUtils.toByteArray(attachment.getContent());
issue number #<I>, added raw SQL to update no_attachments column on revs table after calling private method insertAttachment
couchbase_couchbase-lite-android
train
bdbc79dfd73f3d61a2f83916a2027180637997ad
diff --git a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java index <HASH>..<HASH> 100644 --- a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java +++ b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java @@ -1294,6 +1294,9 @@ public class GermanSpellerRule extends CompoundAwareHunspellRule { if (word.startsWith("Standart") && !word.equals("Standarte") && !word.equals("Standarten") && !word.startsWith("Standartenträger") && !word.startsWith("Standartenführer")) { return true; } + if (word.endsWith("schafte") && word.matches("[A-ZÖÄÜ][a-zöäß-]+schafte")) { + return true; + } return super.isMisspelled(word); } diff --git a/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java b/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java index <HASH>..<HASH> 100644 --- a/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java +++ b/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java @@ -802,6 +802,13 @@ public class GermanSpellerRuleTest { assertTrue(rule.isMisspelled("Spielzugcomputern")); assertFalse(rule.isMisspelled("Spielzug")); assertFalse(rule.isMisspelled("Spielzugs")); + + assertTrue(rule.isMisspelled("Studentenschafte")); + assertTrue(rule.isMisspelled("Steuereigenschafte")); + assertFalse(rule.isMisspelled("Studentenschaften")); + assertFalse(rule.isMisspelled("Steuereigenschaften")); + assertFalse(rule.isMisspelled("Eigenschaften")); + assertFalse(rule.isMisspelled("wirtschafte")); } @Test
[de] detect misspellings in "...schafte"
languagetool-org_languagetool
train
4954f2c086cb948c878cea9bfa98a0f5c6eaa6ad
diff --git a/cwltool/sandboxjs.py b/cwltool/sandboxjs.py index <HASH>..<HASH> 100644 --- a/cwltool/sandboxjs.py +++ b/cwltool/sandboxjs.py @@ -6,7 +6,6 @@ import os import select import subprocess import threading -from multiprocessing import Queue import sys from io import BytesIO from typing import Any, Dict, List, Mapping, Text, Tuple, Union @@ -15,6 +14,11 @@ from pkg_resources import resource_stream import six +try: + import queue #python3 +except ImportError: + import Queue as queue + class JavascriptException(Exception): pass @@ -155,9 +159,9 @@ def execjs(js, jslib, timeout=None, debug=False): # type: (Union[Mapping, Text] READ_BYTES_SIZE = 512 # creating queue for reading from a thread to queue - input_queue = Queue() - output_queue = Queue() - error_queue = Queue() + input_queue = queue.Queue() + output_queue = queue.Queue() + error_queue = queue.Queue() # To tell threads that output has ended and threads can safely exit no_more_output = threading.Lock() diff --git a/tests/test_fetch.py b/tests/test_fetch.py index <HASH>..<HASH> 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -1,6 +1,5 @@ from __future__ import absolute_import import unittest -import urlparse from six.moves import urllib
removed unused import urlparse from test_fetch
common-workflow-language_cwltool
train
bd5a493b6d13a008e9915b2a1b48c586b88201f5
diff --git a/src/SupervisorClient/SupervisorClient.php b/src/SupervisorClient/SupervisorClient.php index <HASH>..<HASH> 100644 --- a/src/SupervisorClient/SupervisorClient.php +++ b/src/SupervisorClient/SupervisorClient.php @@ -207,16 +207,12 @@ class SupervisorClient function addProgramToGroup($group, $program, $options=[]) { - return $this->_rpcCall('twiddler', 'addProgramToGroup', - array($group, $program, $options) - ); + return $this->_rpcCall('twiddler', 'addProgramToGroup', array($group, $program, $options)); } - function removeProcessFromGroup($group, $process_name) + function removeProcessFromGroup($group, $processName) { - return $this->_rpcCall('twiddler', 'removeProcessFromGroup', - array($group, $process_name) - ); + return $this->_rpcCall('twiddler', 'removeProcessFromGroup', array($group, $processName)); } function logMessage($msg, $level = "INFO") @@ -296,5 +292,3 @@ class SupervisorClient return $response; } } - -?>
Make better use of available horizontal space. Use lowerCamelCase for method arguments.
mondalaci_supervisord-php-client
train
70e190b5f3864992a1ec09ceca4bfa757ebc0326
diff --git a/lib/octokit/client/organizations.rb b/lib/octokit/client/organizations.rb index <HASH>..<HASH> 100644 --- a/lib/octokit/client/organizations.rb +++ b/lib/octokit/client/organizations.rb @@ -641,14 +641,14 @@ module Octokit # Get an organization membership # - # @param org [String] Organization GitHub login. + # @param org [Integer, String] The GitHub Organization. # @option options [String] :user The login of the user, otherwise authenticated user. # @return [Sawyer::Resource] Hash representing the organization membership. # @see https://developer.github.com/v3/orgs/members/#get-your-organization-membership # @see https://developer.github.com/v3/orgs/members/#get-organization-membership def organization_membership(org, options = {}) if user = options.delete(:user) - get "orgs/#{org}/memberships/#{user}", options + get "#{Organization.path(org)}/memberships/#{user}", options else get "user/memberships/orgs/#{org}", options end
Use Organization.path instead of just org
octokit_octokit.rb
train
21313016b375e807b78889686a80bed6e3187214
diff --git a/zxbparser.py b/zxbparser.py index <HASH>..<HASH> 100755 --- a/zxbparser.py +++ b/zxbparser.py @@ -2875,7 +2875,7 @@ def p_chr(p): is_constant = True constant = '' for i in range(len(p[2])): # Convert every argument to 8bit unsigned - p[2][i] = make_typecast(TYPE.ubyte, p[2][i], p.lineno(1)) + p[2][i].value = make_typecast(TYPE.ubyte, p[2][i].value, p.lineno(1)) is_constant = is_constant and is_number(p[2][i]) if is_constant: constant += chr(int(p[2][i].value) & 0xFF)
Fixed a bug in CHR typecast
boriel_zxbasic
train
3993f027dbec04917c7e3d6cf5ee39d183608f57
diff --git a/comment/comment.js b/comment/comment.js index <HASH>..<HASH> 100644 --- a/comment/comment.js +++ b/comment/comment.js @@ -95,7 +95,7 @@ bodyContent: '<div class="comment-delete-confirm"><a href="#" id="confirmdelete- color: { to: '#06e' }, backgroundColor: { to: '#FFE390' } }; - var anim = new YAHOO.util.ColorAnim(ids[i], attributes); + var anim = new Y.YUI2.util.ColorAnim(ids[i], attributes); anim.animate(); } scope.register_pagination(); @@ -106,7 +106,7 @@ bodyContent: '<div class="comment-delete-confirm"><a href="#" id="confirmdelete- var attributes = { backgroundColor: { from: '#FFE390', to:'#FFFFFF' } }; - var anim = new YAHOO.util.ColorAnim('dlg-content-'+cid, attributes); + var anim = new Y.YUI2.util.ColorAnim('dlg-content-'+cid, attributes); anim.animate(); } }, @@ -255,7 +255,7 @@ bodyContent: '<div class="comment-delete-confirm"><a href="#" id="confirmdelete- }; var cmt = Y.one('#'+htmlid); cmt.setStyle('overflow', 'hidden'); - var anim = new YAHOO.util.Anim(htmlid, attributes, 1, YAHOO.util.Easing.easeOut); + var anim = new Y.YUI2.util.Anim(htmlid, attributes, 1, Y.YUI2.util.Easing.easeOut); anim.onComplete.subscribe(remove_dom, cmt, this); anim.animate(); }
MDL-<I> use 2in3 in comments
moodle_moodle
train
153e16ac770f865853dd8379f4053d7a21b67d74
diff --git a/railties/lib/rails/test_unit/reporter.rb b/railties/lib/rails/test_unit/reporter.rb index <HASH>..<HASH> 100644 --- a/railties/lib/rails/test_unit/reporter.rb +++ b/railties/lib/rails/test_unit/reporter.rb @@ -16,8 +16,7 @@ module Rails COLOR_CODES = { red: 31, green: 32, - yellow: 33, - blue: 34 + yellow: 33 } def record(result)
Remove unused blue color. I got the blues because we aren't coloring any lines blue.
rails_rails
train
7857f1886a569530e8a77f18540b6773ac97c094
diff --git a/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/bigdecimaltest/BigDecimalTest.java b/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/bigdecimaltest/BigDecimalTest.java index <HASH>..<HASH> 100644 --- a/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/bigdecimaltest/BigDecimalTest.java +++ b/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/bigdecimaltest/BigDecimalTest.java @@ -187,4 +187,27 @@ public class BigDecimalTest extends BaseModelTest { ksession.insert(customer); assertEquals(0, ksession.fireAllRules()); } + + @Test + public void testBigDecimalEquals() { + // DROOLS-3527 + String str = + "package org.drools.modelcompiler.bigdecimals\n" + + "import " + Customer.class.getCanonicalName() + ";\n" + + "rule R1\n" + + "when\n" + + "$customer: Customer( rate == 12.111B )\n" + + "then\n" + + "end"; + + KieSession ksession = getKieSession(str); + + Customer customer = new Customer(); + customer.setRate(new BigDecimal("12.111")); + + ksession.insert(customer); + + assertEquals(1, ksession.fireAllRules()); + + } } diff --git a/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/builder/generator/ExpressionTyperTest.java b/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/builder/generator/ExpressionTyperTest.java index <HASH>..<HASH> 100644 --- a/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/builder/generator/ExpressionTyperTest.java +++ b/drools-model/drools-model-compiler/src/test/java/org/drools/modelcompiler/builder/generator/ExpressionTyperTest.java @@ -98,6 +98,13 @@ public class ExpressionTyperTest { } @Test + public void testBigDecimalLiteral() { + final TypedExpression expected = typedResult("13.111B", BigDecimal.class); + final TypedExpression actual = toTypedExpression("13.111B", null); + assertEquals(expected, actual); + } + + @Test public void testBooleanComparison() { final TypedExpression expected = typedResult("_this.getAge() == 18", int.class); final TypedExpression actual = toTypedExpression("age == 18", Person.class);
[DROOLS-<I>] Added some test for BigDecimal with decimal values (#<I>)
kiegroup_drools
train
176cc8f59731267843bd37c7240d91857cef0e7a
diff --git a/src/main/java/hex/drf/DRF.java b/src/main/java/hex/drf/DRF.java index <HASH>..<HASH> 100644 --- a/src/main/java/hex/drf/DRF.java +++ b/src/main/java/hex/drf/DRF.java @@ -255,10 +255,10 @@ public class DRF extends SharedTreeModelBuilder<DRF.DRFModel> { final float[] varimp = new float[_ncols]; // output variable importance final float[] varimpSD = new float[_ncols]; // output variable importance sd // For each variable launch one FJ-task to compute variable importance. - H2OCountedCompleter[] computers = new H2OCountedCompleter[_ncols]; + Futures fs = new Futures(); for (int var=0; var<_ncols; var++) { final int variable = var; - computers[var] = new H2OCountedCompleter() { + H2OCountedCompleter task4var = new H2OCountedCompleter() { @Override public void compute2() { Frame wf = new Frame(f); // create a copy of frame // Compute prediction error per tree on shuffled OOB sample @@ -269,8 +269,10 @@ public class DRF extends SharedTreeModelBuilder<DRF.DRFModel> { tryComplete(); } }; + H2O.submitTask(task4var); // Fork the computation task + fs.add(task4var); } - ForkJoinTask.invokeAll(computers); + fs.blockForPending(); // Randez-vous // after all varimp contains variable importance of all columns used by a model. return makeModel(model, varimp, varimpSD); } @@ -284,10 +286,10 @@ public class DRF extends SharedTreeModelBuilder<DRF.DRFModel> { // Compute tree votes over shuffled data final CompressedTree[/*nclass*/] theTree = model.ctree(tid); // get the last tree FIXME we should pass only keys final int nclasses = model.nclasses(); - H2OCountedCompleter[] computers = new H2OCountedCompleter[_ncols]; + Futures fs = new Futures(); for (int var=0; var<_ncols; var++) { final int variable = var; - computers[var] = new H2OCountedCompleter() { + H2OCountedCompleter task4var = new H2OCountedCompleter() { @Override public void compute2() { // Compute this tree votes over all data over given variable TreeVotes cd = TreeVotesCollector.collect(theTree, nclasses, fTrain, _ncols, sample_rate, variable); @@ -296,8 +298,10 @@ public class DRF extends SharedTreeModelBuilder<DRF.DRFModel> { tryComplete(); } }; + H2O.submitTask(task4var); // Fork computation + fs.add(task4var); } - ForkJoinTask.invokeAll(computers); // Fork computation and wait for results + fs.blockForPending(); // Wait for results // Compute varimp for individual features (_ncols) final float[] varimp = new float[_ncols]; // output variable importance final float[] varimpSD = new float[_ncols]; // output variable importance sd
Small fix in varimp launcher - using H2O submit task call.
h2oai_h2o-2
train
ae167525d0aec490d92ec3944543941403e1d3b0
diff --git a/activejdbc/pom.xml b/activejdbc/pom.xml index <HASH>..<HASH> 100644 --- a/activejdbc/pom.xml +++ b/activejdbc/pom.xml @@ -151,7 +151,7 @@ <configuration> <outputDirectory>${project.basedir}/target/test-classes</outputDirectory> </configuration> - </execution> + </execution> </executions> <dependencies> <dependency> diff --git a/activejdbc/src/main/java/org/javalite/activejdbc/Model.java b/activejdbc/src/main/java/org/javalite/activejdbc/Model.java index <HASH>..<HASH> 100644 --- a/activejdbc/src/main/java/org/javalite/activejdbc/Model.java +++ b/activejdbc/src/main/java/org/javalite/activejdbc/Model.java @@ -2692,6 +2692,81 @@ public abstract class Model extends CallbackSupport implements Externalizable { public String toInsert() { return toInsert(getMetaModelLocal().getDialect()); } + + /** + * Generates UPDATE SQL based on this model. Uses single quotes for all string values. + * Example: + * <pre> + * + * String insert = u.toUpdate(); + * //yields this output: + * //INSERT INTO users (id, first_name, email, last_name) VALUES (1, 'Marilyn', 'mmonroe@yahoo.com', 'Monroe'); + * </pre> + * + * @return INSERT SQL based on this model. + */ + public String toUpdate(){ + + doUpdatedAt(); + + MetaModel metaModel = getMetaModelLocal(); + StringBuilder query = new StringBuilder().append("UPDATE ").append(metaModel.getTableName()).append(" SET "); + SortedSet<String> attributeNames = metaModel.getAttributeNamesSkipGenerated(manageTime); + + + + List<Object> values = getAttributeValues(attributeNames); + List<String> attributeNamesList = new ArrayList<String>(attributeNames); + for (int i = 0; i < values.size(); i++) { + Object value = values.get(i); + String attributeName = attributeNamesList.get(i); + if (value != null) { + query.append(attributeName + " = " + "'" + value + "'"); + + if (i < values.size() -1) { + query.append(" , "); + } + } + } + +// join(query, attributeNames, " = ?, "); +// query.append(" = ?"); + + + + if (manageTime && metaModel.hasAttribute("updated_at")) { + query.append(", updated_at = ?"); + values.add(get("updated_at")); + } + + if(metaModel.isVersioned()){ + query.append(", ").append(getMetaModelLocal().getVersionColumn()).append(" = ?"); + values.add(getLong(getMetaModelLocal().getVersionColumn()) + 1); + } + query.append(" WHERE ").append(metaModel.getIdName()).append(" = " + getId()); + values.add(getId()); + + + if (metaModel.isVersioned()) { + query.append(" AND ").append(getMetaModelLocal().getVersionColumn()).append(" = ?"); + values.add(get(getMetaModelLocal().getVersionColumn())); + } +// int updated = new DB(metaModel.getDbName()).exec(query.toString(), values.toArray()); +// if(metaModel.isVersioned() && updated == 0){ +// throw new StaleModelException("Failed to update record for model '" + getClass() + +// "', with " + getIdName() + " = " + getId() + " and " + getMetaModelLocal().getVersionColumn() +// + " = " + get(getMetaModelLocal().getVersionColumn()) + +// ". Either this record does not exist anymore, or has been updated to have another " +// + getMetaModelLocal().getVersionColumn() + '.'); +// }else if(metaModel.isVersioned()){ +// set(getMetaModelLocal().getVersionColumn(), getLong(getMetaModelLocal().getVersionColumn()) + 1); +// } + if(metaModel.cached()){ + QueryCache.instance().purgeTableCache(metaModel.getTableName()); + } + return query.toString(); + } + /** * Generates INSERT SQL based on this model with the provided dialect. diff --git a/activejdbc/src/test/java/org/javalite/activejdbc/ModelTest.java b/activejdbc/src/test/java/org/javalite/activejdbc/ModelTest.java index <HASH>..<HASH> 100644 --- a/activejdbc/src/test/java/org/javalite/activejdbc/ModelTest.java +++ b/activejdbc/src/test/java/org/javalite/activejdbc/ModelTest.java @@ -482,6 +482,19 @@ public class ModelTest extends ActiveJDBCTest { insertSQL = s.toInsert(new SimpleFormatter(java.sql.Date.class, "to_date('", "')")); the(insertSQL).shouldBeEqual("INSERT INTO students (dob, first_name, id, last_name) VALUES (to_date('1965-12-01'), 'Jim', 1, 'Cary')"); } + + @Test + public void shouldGenerateCorrectUpdateSQL(){ + Student s = new Student(); + s.set("first_name", "Jim"); + s.set("last_name", "Cary"); + s.set("dob", new java.sql.Date(getDate(1965, 12, 1).getTime())); + s.set("id", 1); + String updateSQL = s.toUpdate(); + System.out.println(updateSQL); + + the(updateSQL).shouldBeEqual("UPDATE students SET DOB = '1965-12-01' , FIRST_NAME = 'Jim' , LAST_NAME = 'Cary' WHERE id = 1"); +} @Test public void shouldGenerateValidInsertSQL() {
Adding a .toUpdate() method. Issue #<I>
javalite_activejdbc
train
ff13cf060fd0e2fa64e183ed63818ed698efcc96
diff --git a/src/pywws/__init__.py b/src/pywws/__init__.py index <HASH>..<HASH> 100644 --- a/src/pywws/__init__.py +++ b/src/pywws/__init__.py @@ -1,3 +1,3 @@ __version__ = '18.8.0' -_release = '1595' -_commit = '6c0f067' +_release = '1596' +_commit = '83d1505' diff --git a/src/pywws/template.py b/src/pywws/template.py index <HASH>..<HASH> 100644 --- a/src/pywws/template.py +++ b/src/pywws/template.py @@ -504,7 +504,7 @@ class Template(object): data = data_set[idx] elif command[0] == 'live': data_set = self.calib_data - idx = datetime.max + idx = live_data['idx'] valid_data = True data = live_data elif command[0] == 'timezone':
Minor tweak of templating 'live' data I don't expect this to affect anything in normal operation.
jim-easterbrook_pywws
train
7ad39266da2553bea343b25c34ebb8d58bd97258
diff --git a/satpy/scene.py b/satpy/scene.py index <HASH>..<HASH> 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -614,7 +614,7 @@ class Scene(InfoObject): writer = self.get_writer(writer, **kwargs) writer.save_dataset(self[dataset_id], filename=filename, - overlay=overlay) + overlay=overlay, **kwargs) def save_datasets(self, writer="geotiff", datasets=None, **kwargs): """Save all the datasets present in a scene to disk using *writer*.
Pass kwargs to dataset saving
pytroll_satpy
train
51fb84bd11726b4a1ee958b58e02d15deb4bdc16
diff --git a/modules/jooby-apt/src/main/java/io/jooby/apt/JoobyProcessor.java b/modules/jooby-apt/src/main/java/io/jooby/apt/JoobyProcessor.java index <HASH>..<HASH> 100644 --- a/modules/jooby-apt/src/main/java/io/jooby/apt/JoobyProcessor.java +++ b/modules/jooby-apt/src/main/java/io/jooby/apt/JoobyProcessor.java @@ -49,7 +49,8 @@ public class JoobyProcessor extends AbstractProcessor { private ProcessingEnvironment processingEnv; /** - * Controller { + * Route Data. + * { * HTTP_METHOD: [method1, ..., methodN] * } */ diff --git a/modules/jooby-apt/src/main/java/io/jooby/internal/apt/asm/RouteAttributesWriter.java b/modules/jooby-apt/src/main/java/io/jooby/internal/apt/asm/RouteAttributesWriter.java index <HASH>..<HASH> 100644 --- a/modules/jooby-apt/src/main/java/io/jooby/internal/apt/asm/RouteAttributesWriter.java +++ b/modules/jooby-apt/src/main/java/io/jooby/internal/apt/asm/RouteAttributesWriter.java @@ -151,9 +151,10 @@ public class RouteAttributesWriter { ? annotation.getAnnotationType().asElement().getSimpleName().toString() : root; // Set all values and then override with present values (fix for JDK 11+) - result.putAll(toMap(elements - .getElementValuesWithDefaults(annotation), prefix)); - result.putAll(toMap(annotation.getElementValues(), prefix)); + toMap(annotation.getElementValues(), prefix) + .forEach(result::put); + toMap(elements.getElementValuesWithDefaults(annotation), prefix) + .forEach(result::putIfAbsent); } return result; }
APT: fix random bug on JDK <I> related to annotation and defalts value 2nd try
jooby-project_jooby
train
76cc61c890b108ec5c822dd3199f0fe54f6e5d74
diff --git a/analytics/sites.py b/analytics/sites.py index <HASH>..<HASH> 100644 --- a/analytics/sites.py +++ b/analytics/sites.py @@ -20,6 +20,9 @@ class Gadgets(object): def get_gadget(self, id): return self._registry[id] + def get_gadgets(self): + return self._registry.values() + def register(self, gadget): """ Registers a gadget object.
added get_gadgets() function to Gadgets class to list all gadgets in registry
praekelt_django-analytics
train
acff5e3330808157a76be3743756e2a700f8930e
diff --git a/pulls/submit-queue.go b/pulls/submit-queue.go index <HASH>..<HASH> 100644 --- a/pulls/submit-queue.go +++ b/pulls/submit-queue.go @@ -216,8 +216,24 @@ func (sq *SubmitQueue) MungePullRequest(config *github_util.Config, pr *github_a return } - if !github_util.HasLabels(issue.Labels, []string{"lgtm"}) { - sq.SetPRStatus(pr, noLGTM) + if mergeable, err := config.IsPRMergeable(pr); err != nil { + glog.V(2).Infof("Skipping %d - unable to determine mergeability", *pr.Number) + sq.SetPRStatus(pr, unknown) + return + } else if !mergeable { + glog.V(4).Infof("Skipping %d - not mergable", *pr.Number) + sq.SetPRStatus(pr, unmergeable) + return + } + + // Validate the status information for this PR + contexts := sq.RequiredStatusContexts + if len(sq.DontRequireE2ELabel) == 0 || !github_util.HasLabel(issue.Labels, sq.DontRequireE2ELabel) { + contexts = append(contexts, sq.E2EStatusContext) + } + if ok := config.IsStatusSuccess(pr, contexts); !ok { + glog.Errorf("PR# %d Github CI status is not success", *pr.Number) + sq.SetPRStatus(pr, ciFailure) return } @@ -237,6 +253,11 @@ func (sq *SubmitQueue) MungePullRequest(config *github_util.Config, pr *github_a config.RemoveLabel(*pr.Number, needsOKToMergeLabel) } + if !github_util.HasLabels(issue.Labels, []string{"lgtm"}) { + sq.SetPRStatus(pr, noLGTM) + return + } + lastModifiedTime := github_util.LastModifiedTime(commits) lgtmTime := github_util.LabelTime("lgtm", events) @@ -252,27 +273,6 @@ func (sq *SubmitQueue) MungePullRequest(config *github_util.Config, pr *github_a return } - if mergeable, err := config.IsPRMergeable(pr); err != nil { - glog.V(2).Infof("Skipping %d - unable to determine mergeability", *pr.Number) - sq.SetPRStatus(pr, unknown) - return - } else if !mergeable { - glog.V(4).Infof("Skipping %d - not mergable", *pr.Number) - sq.SetPRStatus(pr, unmergeable) - return - } - - // Validate the status information for this PR - contexts := sq.RequiredStatusContexts - if len(sq.DontRequireE2ELabel) == 0 || !github_util.HasLabel(issue.Labels, sq.DontRequireE2ELabel) { - contexts = append(contexts, sq.E2EStatusContext) - } - if ok := config.IsStatusSuccess(pr, contexts); !ok { - glog.Errorf("PR# %d Github CI status is not success", *pr.Number) - sq.SetPRStatus(pr, ciFailure) - return - } - if !e2e.Stable() { sq.SetPRStatus(pr, e2eFailure) return
Change order of merge queue tests to improve reason in web page We checked for the LGTM label very early. But most people would rather know that github jenkins is failing or it isn't mergable before they worry about the LGTM label. Try to set the order to the order people would need to fix the problems...
kubernetes_test-infra
train
5270d8a416e3a8e9d6dbb9b32f7437357f6bd9bf
diff --git a/lib/omnibus.rb b/lib/omnibus.rb index <HASH>..<HASH> 100644 --- a/lib/omnibus.rb +++ b/lib/omnibus.rb @@ -164,7 +164,7 @@ module Omnibus # # @return [void] def process_configuration - Config.validate + Config.validate! process_dsl_files end diff --git a/lib/omnibus/config.rb b/lib/omnibus/config.rb index <HASH>..<HASH> 100644 --- a/lib/omnibus/config.rb +++ b/lib/omnibus/config.rb @@ -290,27 +290,26 @@ module Omnibus # @return [Integer, nil] default :build_retries, 3 - # @!group Validation Methods + class << self + # + # Asserts that the Config object is in a valid state. If invalid for any + # reason, an exception will be thrown. + # + # @return [true] + # + def validate! + validate_s3_config! + end - # Asserts that the Config object is in a valid state. If invalid - # for any reason, an exception will be thrown. - # - # @raise [RuntimeError] - # @return [void] - def self.validate - valid_s3_config? - # add other validation methods as needed - end + private - # @raise [InvalidS3Configuration] - def self.valid_s3_config? - if use_s3_caching - unless s3_bucket - raise InvalidS3Configuration.new(s3_bucket, s3_access_key, s3_secret_key) + def validate_s3_config! + if use_s3_caching + unless s3_bucket + raise InvalidS3Configuration + end end end end - - # @!endgroup - end # Config -end # Omnibus + end +end diff --git a/lib/omnibus/exceptions.rb b/lib/omnibus/exceptions.rb index <HASH>..<HASH> 100644 --- a/lib/omnibus/exceptions.rb +++ b/lib/omnibus/exceptions.rb @@ -56,26 +56,22 @@ module Omnibus end class InvalidS3Configuration < Error - def initialize(s3_bucket, s3_access_key, s3_secret_key) - @s3_bucket, @s3_access_key, @s3_secret_key = s3_bucket, s3_access_key, s3_secret_key - end - def to_s <<-EOH One or more required S3 configuration values is missing. Your effective configuration was the following: - s3_bucket #{@s3_bucket.inspect} - s3_access_key #{@s3_access_key.inspect} - s3_secret_key #{@s3_secret_key.inspect} + s3_bucket => #{Config.s3_bucket.inspect} + s3_access_key => #{Config.s3_access_key.inspect} + s3_secret_key => #{Config.s3_secret_key.inspect} If you truly do want S3 caching, you should add values similar to the following in your Omnibus config file: - s3_bucket ENV['S3_BUCKET_NAME'] - s3_access_key ENV['S3_ACCESS_KEY'] - s3_secret_key ENV['S3_SECRET_KEY'] + s3_bucket ENV['S3_BUCKET_NAME'] + s3_access_key ENV['S3_ACCESS_KEY'] + s3_secret_key ENV['S3_SECRET_KEY'] Note that you are not required to use environment variables as illustrated (and the ones listed have no special significance in Omnibus), but it is encouraged
Update validations to behave like real validations on config
chef_omnibus
train
5d508e2f5d72dab636e62b8f8d1b0e1286324959
diff --git a/lib/hqmf-parser/2.0/data_criteria.rb b/lib/hqmf-parser/2.0/data_criteria.rb index <HASH>..<HASH> 100644 --- a/lib/hqmf-parser/2.0/data_criteria.rb +++ b/lib/hqmf-parser/2.0/data_criteria.rb @@ -17,8 +17,8 @@ module HQMF2 "2.16.840.1.113883.10.20.28.3.18" => {valueset_path:"./*/cda:value", result_path: nil }, "2.16.840.1.113883.10.20.28.3.19" => {valueset_path:"./*/cda:value", result_path: nil }, "2.16.840.1.113883.10.20.28.3.20" => {valueset_path:"./*/cda:outboundRelationship[@typeCode='CAUS']/cda:observationCriteria/cda:code", result_path: nil }, - "2.16.840.1.113883.10.20.28.3.21" => {valueset_path:"./*/cda:outboundRelationship[@typeCode='CAUS']/cda:observationCriteria/cda:code", result_path: nil }, - "2.16.840.1.113883.10.20.28.3.22" => {valueset_path:"./*/cda:code", result_path: nil }, + "2.16.840.1.113883.10.20.28.3.21" => {valueset_path:"./*/cda:outboundRelationship[@typeCode='CAUS']/cda:observationCriteria/cda:code", result_path: nil }, + "2.16.840.1.113883.10.20.28.3.22" => {valueset_path:"./*/cda:code", result_path: nil }, "2.16.840.1.113883.10.20.28.3.23" => {valueset_path:"./*/cda:code", result_path: "./*/cda:value"}, "2.16.840.1.113883.10.20.28.3.24" => {valueset_path:"./*/cda:code", result_path: nil }, "2.16.840.1.113883.10.20.28.3.26" => {valueset_path:"./*/cda:code", result_path: nil }, @@ -154,7 +154,7 @@ module HQMF2 if mapping && mapping[:valueset_path] && @entry.at_xpath(mapping[:valueset_path]) @code_list_xpath = mapping[:valueset_path] @value = DataCriteria.parse_value(@entry,mapping[:result_path]) if mapping[:result_path] - end + end end end @@ -274,11 +274,7 @@ module HQMF2 # @return [String] the title of this data criteria def title dispValue = attr_val("#{@code_list_xpath}/cda:displayName/@value") - desc = nil - if @description && (@description.include? ":") - desc = @description.match(/.*:\s+(.+)/)[1] - end - dispValue || desc || id + dispValue || @description || id end # Get the code list OID of the criteria, used as an index to the code list database @@ -325,10 +321,9 @@ module HQMF2 field_values = nil if field_values.empty? - if @specific_occurrence - @description = @description.split('_').drop(1).join('_') - else - @description = "#{@description}#{' ' + @local_variable_name.split('_')[0] if @local_variable_name}" unless @variable + unless @variable || @derivation_operator + exact_desc = title.split(' ')[0...-3].join(' ') + @description = "#{@description}: #{exact_desc}" end HQMF::DataCriteria.new(id, title, nil, description, code_list_id, children_criteria, @@ -444,9 +439,9 @@ module HQMF2 end def extract_value() - # need to look in both places for result criteria because + # need to look in both places for result criteria because #procedureCriteria does not have a value element while observationCriteria does - DataCriteria.parse_value(@entry, "./*/cda:value") || + DataCriteria.parse_value(@entry, "./*/cda:value") || DataCriteria.parse_value(@entry, "./*/cda:outboundRelationship/cda:code[@code='394617004']/../cda:value") end
Update descriptions for data criteria to utilize a trimmed title to fix human readable issues
projecttacoma_cqm-parsers
train
1464849e5474327cb22d04b6e6f1ea85d7b694cb
diff --git a/src/crdt/network.js b/src/crdt/network.js index <HASH>..<HASH> 100644 --- a/src/crdt/network.js +++ b/src/crdt/network.js @@ -36,14 +36,15 @@ function createNetworkWrapper (id, log, createNetwork) { const onRemoteHead = (remoteHead) => limit(() => _onRemoteHead(remoteHead)) const network = createNetwork(id, log, onRemoteHead) + return { async start () { await network.start() + log.on('new head', (head) => network.setHead(head)) const head = await log.getHead() if (head) { network.setHead(head) } - log.on('new head', (head) => network.setHead(head)) }, stop () {
preventing some race conditions on head setting
ipfs-shipyard_peer-crdt
train
b0696ec42c66fbf7d975aa551879271c66ff0d59
diff --git a/pkg/util/ciutil/travis.go b/pkg/util/ciutil/travis.go index <HASH>..<HASH> 100644 --- a/pkg/util/ciutil/travis.go +++ b/pkg/util/ciutil/travis.go @@ -34,7 +34,12 @@ func (t travisCI) DetectVars() Vars { v.SHA = os.Getenv("TRAVIS_PULL_REQUEST_SHA") v.BranchName = os.Getenv("TRAVIS_BRANCH") v.CommitMessage = os.Getenv("TRAVIS_COMMIT_MESSAGE") - v.PRNumber = os.Getenv("TRAVIS_PULL_REQUEST") + // Travis sets the value of TRAVIS_PULL_REQUEST to false if the build + // is not a PR build. + // See: https://docs.travis-ci.com/user/environment-variables/#convenience-variables + if prNumber := os.Getenv("TRAVIS_PULL_REQUEST"); prNumber != "false" { + v.PRNumber = prNumber + } return v }
Check if TRAVIS_PULL_REQUEST is false before setting the ci vars' PRNumber property (#<I>)
pulumi_pulumi
train
b5d7cc30063b5b0e0bc401e6f5b0c2ae89866e80
diff --git a/aws/resource_aws_config_remediation_configuration_test.go b/aws/resource_aws_config_remediation_configuration_test.go index <HASH>..<HASH> 100644 --- a/aws/resource_aws_config_remediation_configuration_test.go +++ b/aws/resource_aws_config_remediation_configuration_test.go @@ -21,6 +21,7 @@ func testAccConfigRemediationConfiguration_basic(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, configservice.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckConfigRemediationConfigurationDestroy, Steps: []resource.TestStep{ @@ -52,6 +53,7 @@ func testAccConfigRemediationConfiguration_disappears(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, configservice.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckConfigRemediationConfigurationDestroy, Steps: []resource.TestStep{ @@ -79,6 +81,7 @@ func testAccConfigRemediationConfiguration_recreates(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, configservice.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckConfigRemediationConfigurationDestroy, Steps: []resource.TestStep{ @@ -113,6 +116,7 @@ func testAccConfigRemediationConfiguration_updates(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, configservice.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckConfigRemediationConfigurationDestroy, Steps: []resource.TestStep{
tests/r/config_remediation_configuration: Add ErrorCheck
terraform-providers_terraform-provider-aws
train
e76e4fd4278c18dccb19ec07d7550bbb76bdd8f7
diff --git a/src/styles/style.js b/src/styles/style.js index <HASH>..<HASH> 100644 --- a/src/styles/style.js +++ b/src/styles/style.js @@ -435,6 +435,7 @@ export var Style = { catch (e) { log('error', `Style: error compiling program for style '${this.name}' (program key '${key}')`, this, e.stack, e.type, e.shader_errors); + throw e; // re-throw so users can be notified via event subscriptions } } return program;
re-throw style compilation errors so users can be notified via event subscriptions fixes Tangram Play inline shader compile errors (!)
tangrams_tangram
train
cc3a517b8cc5c36358c30ef6d5fd04ba7b445dfe
diff --git a/web/application.rb b/web/application.rb index <HASH>..<HASH> 100644 --- a/web/application.rb +++ b/web/application.rb @@ -12,6 +12,7 @@ class WorkerholicWeb < Sinatra::Base end get '/overview' do + # require 'pry'; binding.pry @processes = Workerholic::StatsAPI.process_stats erb :overview @@ -60,6 +61,13 @@ class WorkerholicWeb < Sinatra::Base erb :history end + get '/overview-data-on-load' do + JSON.generate({ + completed_jobs: Workerholic::StatsAPI.job_statistics_history('completed_jobs'), + failed_jobs: Workerholic::StatsAPI.job_statistics_history('failed_jobs') + }) + end + get '/overview-data' do JSON.generate({ completed_jobs: Workerholic::StatsAPI.job_statistics( {category: 'completed_jobs', count_only: true} ), diff --git a/web/public/javascripts/application.js b/web/public/javascripts/application.js index <HASH>..<HASH> 100644 --- a/web/public/javascripts/application.js +++ b/web/public/javascripts/application.js @@ -5,9 +5,9 @@ var App = { jobsCompletedPerSecondHistory: [], totalMemoryHistory: [], maxTime: 240, - pollingInterval: 5000, + pollingInterval: 10000, freshDataCount: function() { - return (this.maxTime / 5) + 1; + return (this.maxTime / (this.pollingInterval / 1000) ) + 1; }, tab: null, removeStaleData: function() { @@ -155,6 +155,25 @@ var App = { }.bind(this) }) }, + getHistoricalOverviewData: function() { + $.ajax({ + url: '/overview-data-on-load', + dataType: 'json', + success: function(data) { + // var dataPointsCount = Math.min(data['completed_jobs'].length, this.freshDataCount()); + + for (var i = 0; i < this.freshDataCount(); i++) { + this.jobsCompletedPerSecondHistory.push(parseInt(data['completed_jobs'][i]) / 10 || 0); + } + + for (var i = 0; i < this.freshDataCount(); i++) { + this.failedJobsCountHistory.push(parseInt(data['failed_jobs'][i]) || 0); + } + + this.drawChart(); + }.bind(this) + }) + }, drawChart: function() { var processedJobsChart = new CanvasJS.Chart('jobs_processed_container', { title: { @@ -389,7 +408,7 @@ var App = { var data = []; for (var i = 0; i <= count; i++) { - var point = { x: (i * 5).toString(), y: array[i] }; + var point = { x: (i * this.pollingInterval / 1000).toString(), y: array[i] }; data.push(point); } @@ -412,6 +431,7 @@ var App = { }, pollData: function(tab) { if (tab === 'overview') { + this.getHistoricalOverviewData(); this.getOverviewData(); setInterval(function() {
Now shows historical data on overview page load for jobs/s & failed jobs
workerholic_workerholic
train
025c253732a269d617c8a4f7113389fbea81b9c2
diff --git a/lib/performance/SizeLimitsPlugin.js b/lib/performance/SizeLimitsPlugin.js index <HASH>..<HASH> 100644 --- a/lib/performance/SizeLimitsPlugin.js +++ b/lib/performance/SizeLimitsPlugin.js @@ -40,6 +40,7 @@ module.exports = class SizeLimitsPlugin { entrypoint.getFiles().reduce((currentSize, file) => { const asset = compilation.getAsset(file); if ( + asset && assetFilter(asset.name, asset.source, asset.info) && asset.source ) {
fix: check the asset's existence before running filter fixes <URL>
webpack_webpack
train
87cf908b0c4ecaa665b223eaa6915d386a386f8e
diff --git a/natural-language-classifier/src/main/java/com/ibm/watson/natural_language_classifier/v1/NaturalLanguageClassifier.java b/natural-language-classifier/src/main/java/com/ibm/watson/natural_language_classifier/v1/NaturalLanguageClassifier.java index <HASH>..<HASH> 100644 --- a/natural-language-classifier/src/main/java/com/ibm/watson/natural_language_classifier/v1/NaturalLanguageClassifier.java +++ b/natural-language-classifier/src/main/java/com/ibm/watson/natural_language_classifier/v1/NaturalLanguageClassifier.java @@ -48,7 +48,8 @@ public class NaturalLanguageClassifier extends BaseService { private static final String DEFAULT_SERVICE_NAME = "natural_language_classifier"; - private static final String DEFAULT_SERVICE_URL = "https://gateway.watsonplatform.net/natural-language-classifier/api"; + private static final String DEFAULT_SERVICE_URL + = "https://gateway.watsonplatform.net/natural-language-classifier/api"; /** * Constructs a new `NaturalLanguageClassifier` client using the DEFAULT_SERVICE_NAME. diff --git a/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/NaturalLanguageUnderstanding.java b/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/NaturalLanguageUnderstanding.java index <HASH>..<HASH> 100644 --- a/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/NaturalLanguageUnderstanding.java +++ b/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/NaturalLanguageUnderstanding.java @@ -47,7 +47,8 @@ public class NaturalLanguageUnderstanding extends BaseService { private static final String DEFAULT_SERVICE_NAME = "natural-language-understanding"; - private static final String DEFAULT_SERVICE_URL = "https://gateway.watsonplatform.net/natural-language-understanding/api"; + private static final String DEFAULT_SERVICE_URL + = "https://gateway.watsonplatform.net/natural-language-understanding/api"; private String versionDate; diff --git a/speech-to-text/src/main/java/com/ibm/watson/speech_to_text/v1/SpeechToText.java b/speech-to-text/src/main/java/com/ibm/watson/speech_to_text/v1/SpeechToText.java index <HASH>..<HASH> 100644 --- a/speech-to-text/src/main/java/com/ibm/watson/speech_to_text/v1/SpeechToText.java +++ b/speech-to-text/src/main/java/com/ibm/watson/speech_to_text/v1/SpeechToText.java @@ -82,17 +82,16 @@ import com.ibm.watson.speech_to_text.v1.model.UpgradeAcousticModelOptions; import com.ibm.watson.speech_to_text.v1.model.UpgradeLanguageModelOptions; import com.ibm.watson.speech_to_text.v1.model.Word; import com.ibm.watson.speech_to_text.v1.model.Words; -import java.util.Map; -import java.util.Map.Entry; - import com.ibm.watson.speech_to_text.v1.websocket.RecognizeCallback; import com.ibm.watson.speech_to_text.v1.websocket.SpeechToTextWebSocketListener; import okhttp3.HttpUrl; -import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.WebSocket; +import java.util.Map; +import java.util.Map.Entry; + /** * The IBM&reg; Speech to Text service provides APIs that use IBM's speech-recognition capabilities to produce * transcripts of spoken audio. The service can transcribe speech from various languages and audio formats. In addition
chore: Clean up remaining checkstyle issues
watson-developer-cloud_java-sdk
train
4243710fc1189ff05a81e4e6016b688ac0f5fc33
diff --git a/jsf-undertow-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowEmptyTest.java b/jsf-undertow-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowEmptyTest.java index <HASH>..<HASH> 100644 --- a/jsf-undertow-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowEmptyTest.java +++ b/jsf-undertow-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowEmptyTest.java @@ -1,12 +1,29 @@ +/* + * Copyright 2016-2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.github.persapiens.jsfboot.bootsfaces; -import static org.assertj.core.api.Assertions.assertThat; import org.testng.annotations.Test; +import static org.assertj.core.api.Assertions.assertThat; + @Test public class UndertowEmptyTest { - - public void nothing() { - assertThat(new UndertowEmpty()).isNotNull(); - } + + public void nothing() { + assertThat(new UndertowEmpty()).isNotNull(); + } } diff --git a/jsf-undertow-myfaces-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowMyfacesEmptyTest.java b/jsf-undertow-myfaces-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowMyfacesEmptyTest.java index <HASH>..<HASH> 100644 --- a/jsf-undertow-myfaces-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowMyfacesEmptyTest.java +++ b/jsf-undertow-myfaces-bootsfaces-spring-boot-starter/src/test/java/com/github/persapiens/jsfboot/bootsfaces/UndertowMyfacesEmptyTest.java @@ -1,12 +1,29 @@ +/* + * Copyright 2016-2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.github.persapiens.jsfboot.bootsfaces; -import static org.assertj.core.api.Assertions.assertThat; import org.testng.annotations.Test; +import static org.assertj.core.api.Assertions.assertThat; + @Test public class UndertowMyfacesEmptyTest { - - public void nothing() { - assertThat(new UndertowMyfacesEmpty()).isNotNull(); - } + + public void nothing() { + assertThat(new UndertowMyfacesEmpty()).isNotNull(); + } }
#<I> all empty starter classes refactored to follow code standard
joinfaces_joinfaces
train
7718642a42b1d2b6a57a82b85e542b5b3a5ba483
diff --git a/openquake/calculators/event_based_risk.py b/openquake/calculators/event_based_risk.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based_risk.py +++ b/openquake/calculators/event_based_risk.py @@ -167,8 +167,9 @@ class EbrCalculator(base.RiskCalculator): if not self.oqparam.ground_motion_fields: return # this happens in the reportwriter - # save memory in the fork if not parent: + # hazard + risk were done in the same calculation + # save memory by resetting the processpool (if any) Starmap.shutdown() Starmap.init()
Added comment [skip CI]
gem_oq-engine
train
13ce15882c5925d189d7c867c3c4726c213c1ef8
diff --git a/cfgrib/eccodes.py b/cfgrib/eccodes.py index <HASH>..<HASH> 100644 --- a/cfgrib/eccodes.py +++ b/cfgrib/eccodes.py @@ -609,11 +609,11 @@ def codes_set(handle, key, value): elif isinstance(value, bytes): codes_set_string(handle, key, value) else: - raise TypeError('Unsupported type %r' % type(value)) + raise TypeError("Unsupported type %r" % type(value)) def codes_set_double_array(handle, key, values): - # type: (cffi.FFI.CData, bytes, typing.List[float]) -> None + # type: (cffi.FFI.CData, bytes, T.List[float]) -> None size = len(values) c_values = ffi.new("double []", values) codes_set_double_array = check_return(lib.codes_set_double_array) @@ -621,18 +621,18 @@ def codes_set_double_array(handle, key, values): def codes_set_array(handle, key, values): - # type: (cffi.FFI.CData, bytes, typing.List[typing.Any]) -> None + # type: (cffi.FFI.CData, bytes, T.List[T.Any]) -> None if len(values) > 0: if isinstance(values[0], float): codes_set_double_array(handle, key, values) else: raise NotImplementedError("Unsupported value type: %r" % type(values[0])) else: - raise ValueError("Cannot provide an empty list.") + raise ValueError("Cannot set an empty list.") def codes_write(handle, outfile): - # type: (cffi.FFI.CData, typing.BinaryIO) -> None + # type: (cffi.FFI.CData, T.BinaryIO) -> None """ Write a coded message to a file. If the file does not exist, it is created.
Fix reference to typing module in type comments and clean up user visible errors.
ecmwf_cfgrib
train
6bda889fc4a5340aea0854452367376d1bd36d3e
diff --git a/flask_resty/testing.py b/flask_resty/testing.py index <HASH>..<HASH> 100644 --- a/flask_resty/testing.py +++ b/flask_resty/testing.py @@ -121,3 +121,4 @@ def assert_response(response, expected_status_code, expected_data=UNDEFINED): expected_data = Shape(expected_data) assert response_data == expected_data + return response_data diff --git a/tests/test_testing.py b/tests/test_testing.py index <HASH>..<HASH> 100644 --- a/tests/test_testing.py +++ b/tests/test_testing.py @@ -160,4 +160,5 @@ def test_assert_response_with_shape(app): with app.test_request_context(): response = flask.jsonify(data=data) - assert_response(response, 200, Shape(data)) + response_data = assert_response(response, 200, Shape(data)) + assert response_data == data
feat: Return response_data from assert_response (#<I>)
4Catalyzer_flask-resty
train
b6965b3f7d4e631ec14331b91fd74a4ea6080617
diff --git a/lib/parallel.rb b/lib/parallel.rb index <HASH>..<HASH> 100644 --- a/lib/parallel.rb +++ b/lib/parallel.rb @@ -246,6 +246,14 @@ module Parallel map(array, options.merge(:with_index => true), &block) end + def worker_number + Thread.current[:parallel_worker_number] + end + + def worker_number=(worker_num) + Thread.current[:parallel_worker_number] = worker_num + end + private def add_progress_bar!(job_factory, options) @@ -274,7 +282,7 @@ module Parallel end def work_direct(job_factory, options, &block) - Thread.current[:parallel_worker_number] = 0 + self.worker_number = 0 results = [] while set = job_factory.next item, index = set @@ -284,7 +292,7 @@ module Parallel end results ensure - Thread.current[:parallel_worker_number] = nil + self.worker_number = nil end def work_in_threads(job_factory, options, &block) @@ -293,8 +301,8 @@ module Parallel results_mutex = Mutex.new # arrays are not thread-safe on jRuby exception = nil - in_threads(options) do |worker_number| - Thread.current[:parallel_worker_number] = worker_number + in_threads(options) do |worker_num| + self.worker_number = worker_num # as long as there are more jobs, work on one of them while !exception && set = job_factory.next begin @@ -385,7 +393,7 @@ module Parallel parent_read, child_write = IO.pipe pid = Process.fork do - Thread.current[:parallel_worker_number] = options[:worker_number] + self.worker_number = options[:worker_number] begin options.delete(:started_workers).each(&:close_pipes) diff --git a/spec/cases/map_worker_number_isolation.rb b/spec/cases/map_worker_number_isolation.rb index <HASH>..<HASH> 100644 --- a/spec/cases/map_worker_number_isolation.rb +++ b/spec/cases/map_worker_number_isolation.rb @@ -2,7 +2,7 @@ require './spec/cases/helper' process_diff do result = Parallel.map([1,2,3,4], in_processes: 2, isolation: true) do |i| - Thread.current[:parallel_worker_number] + Parallel.worker_number end puts result.uniq.sort.join(',') end diff --git a/spec/cases/with_worker_number.rb b/spec/cases/with_worker_number.rb index <HASH>..<HASH> 100644 --- a/spec/cases/with_worker_number.rb +++ b/spec/cases/with_worker_number.rb @@ -5,6 +5,6 @@ in_worker_type = "in_#{ENV.fetch('WORKER_TYPE')}".to_sym Parallel.public_send(method, 1..100, in_worker_type => 4) do sleep 0.1 # so all workers get started - print Thread.current[:parallel_worker_number] + print Parallel.worker_number end diff --git a/spec/parallel_spec.rb b/spec/parallel_spec.rb index <HASH>..<HASH> 100644 --- a/spec/parallel_spec.rb +++ b/spec/parallel_spec.rb @@ -261,16 +261,16 @@ describe Parallel do `METHOD=map WORKER_TYPE=#{type} ruby spec/cases/with_exception_in_start_before_finish.rb 2>&1`.should == '3 called' end - it "has access to thread local parallel_worker_number with 4 #{type}" do + it "sets Parallel.worker_number with 4 #{type}" do out = `METHOD=map WORKER_TYPE=#{type} ruby spec/cases/with_worker_number.rb` out.should =~ /\A[0123]+\z/ %w(0 1 2 3).each { |number| out.should include number } end - it "has access to thread local parallel_worker_number with 0 #{type}" do + it "sets Parallel.worker_number with 0 #{type}" do type_key = "in_#{type}".to_sym - Parallel.map([1,2,3,4,5,6,7,8,9], type_key => 0) { |x| Thread.current[:parallel_worker_number] }.uniq.should == [0] - Thread.current[:parallel_worker_number].should be_nil + Parallel.map([1,2,3,4,5,6,7,8,9], type_key => 0) { |x| Parallel.worker_number }.uniq.should == [0] + Parallel.worker_number.should be_nil end end @@ -388,7 +388,7 @@ describe Parallel do out.should == "1\n2\n3\n4\nOK" end - it 'has access to thread local parallel_worker_number values in isolation' do + it 'sets Parallel.worker_number when run with isolation' do out = `ruby spec/cases/map_worker_number_isolation.rb` out.should == "0,1\nOK" end @@ -481,7 +481,7 @@ describe Parallel do `METHOD=each WORKER_TYPE=#{type} ruby spec/cases/with_exception_in_start_before_finish.rb 2>&1`.should == '3 called' end - it "has access to thread local parallel_worker_number with #{type}" do + it "sets Parallel.worker_number with #{type}" do out = `METHOD=each WORKER_TYPE=#{type} ruby spec/cases/with_worker_number.rb` out.should =~ /\A[0123]+\z/ %w(0 1 2 3).each { |number| out.should include number }
use .worker_number instead of direct thread var access
grosser_parallel
train
489f1ad94481dd91abd29b56bba3cd6f0ac980c7
diff --git a/openquake/hazardlib/contexts.py b/openquake/hazardlib/contexts.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/contexts.py +++ b/openquake/hazardlib/contexts.py @@ -703,7 +703,7 @@ class ContextMaker(object): src, sitecol, planardict): if not planarlist: continue - elif len(planarlist) > 1: + elif len(planarlist) > 1: # when using ps_grid_spacing pla = numpy.concatenate(planarlist).view(numpy.recarray) else: pla = planarlist[0]
Added a comment [ci skip]
gem_oq-engine
train
762377803f7761b0f6fbbb015fffc00a1ee3c0a4
diff --git a/test/iterator-test.js b/test/iterator-test.js index <HASH>..<HASH> 100644 --- a/test/iterator-test.js +++ b/test/iterator-test.js @@ -10,7 +10,6 @@ var assert = require('assert'); var MinIterator = require('min-iterator'); -var liveTree = require('live-tree'); var Node = require('../lib/node').Node; var iterator = require('../lib/iterator'); @@ -230,7 +229,7 @@ describe('iterator.next', function () { var i = iterator.create(n, 'a.**'); - assert.strictEqual(i.constructor, liveTree.Iterator); + assert(i instanceof MinIterator); }); it('return a live-tree iterator if no match is given', function () { @@ -239,7 +238,7 @@ describe('iterator.next', function () { var i = iterator.create(n); - assert.strictEqual(i.constructor, liveTree.Iterator); + assert(i instanceof MinIterator); }); it('return a live-tree iterator for **', function () { @@ -248,7 +247,7 @@ describe('iterator.next', function () { var i = iterator.create(n, '**'); - assert.strictEqual(i.constructor, liveTree.Iterator); + assert(i instanceof MinIterator); }); it('does not fetch parent iterator for a.*', function () {
Test for instanceof MinIterator instead of exact type
mantoni_glob-tree.js
train
11c7e29d1d054f903ba00f6e7c5f1ae4792871f5
diff --git a/resource_aws_vpc_peering_connection_accepter.go b/resource_aws_vpc_peering_connection_accepter.go index <HASH>..<HASH> 100644 --- a/resource_aws_vpc_peering_connection_accepter.go +++ b/resource_aws_vpc_peering_connection_accepter.go @@ -4,6 +4,8 @@ import ( "errors" "log" + "fmt" + "github.com/hashicorp/terraform/helper/schema" ) @@ -49,17 +51,22 @@ func resourceAwsVpcPeeringConnectionAccepter() *schema.Resource { } func resourceAwsVPCPeeringAccepterCreate(d *schema.ResourceData, meta interface{}) error { - d.SetId(d.Get("vpc_peering_connection_id").(string)) + id := d.Get("vpc_peering_connection_id").(string) + d.SetId(id) - if err := resourceAwsVPCPeeringUpdate(d, meta); err != nil { + if err := resourceAwsVPCPeeringRead(d, meta); err != nil { return err } + if d.Id() == "" { + return fmt.Errorf("VPC Peering Connection %q not found", id) + } // Ensure that this IS as cross-account VPC peering connection. if d.Get("peer_owner_id").(string) == meta.(*AWSClient).accountid { return errors.New("aws_vpc_peering_connection_accepter can only adopt into management cross-account VPC peering connections") } - return nil + + return resourceAwsVPCPeeringUpdate(d, meta) } func resourceAwsVPCPeeringAccepterDelete(d *schema.ResourceData, meta interface{}) error { diff --git a/resource_aws_vpc_peering_connection_accepter_test.go b/resource_aws_vpc_peering_connection_accepter_test.go index <HASH>..<HASH> 100644 --- a/resource_aws_vpc_peering_connection_accepter_test.go +++ b/resource_aws_vpc_peering_connection_accepter_test.go @@ -2,32 +2,77 @@ package aws import ( + "regexp" "testing" "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/terraform" ) -func TestAccAwsVPCPeeringConnectionAccepter_basic(t *testing.T) { +func TestAccAwsVPCPeeringConnectionAccepter_sameAccount(t *testing.T) { resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccAwsVPCPeeringConnectionAccepterDestroy, Steps: []resource.TestStep{ - { - Config: testAccAwsVPCPeeringConnectionAccepterConfig, - Check: resource.ComposeTestCheckFunc( - testAccAwsVPCPeeringConnectionAccepterCheckSomething(""), - ), + resource.TestStep{ + Config: testAccAwsVPCPeeringConnectionAccepterSameAccountConfig, + ExpectError: regexp.MustCompile(`aws_vpc_peering_connection_accepter can only adopt into management cross-account VPC peering connections`), }, }, }) } -func testAccAwsVPCPeeringConnectionAccepterCheckSomething(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - return nil - } +func testAccAwsVPCPeeringConnectionAccepterDestroy(s *terraform.State) error { + // We don't destroy the underlying VPC Peering Connection. + return nil } -const testAccAwsVPCPeeringConnectionAccepterConfig = ` +const testAccAwsVPCPeeringConnectionAccepterSameAccountConfig = ` +provider "aws" { + region = "us-west-2" + // Requester's credentials. +} + +provider "aws" { + alias = "peer" + region = "us-west-2" + // Accepter's credentials. +} + +resource "aws_vpc" "main" { + cidr_block = "10.0.0.0/16" +} + +resource "aws_vpc" "peer" { + provider = "aws.peer" + cidr_block = "10.1.0.0/16" +} + +data "aws_caller_identity" "peer" { + provider = "aws.peer" +} + +// Requester's side of the connection. +resource "aws_vpc_peering_connection" "peer" { + vpc_id = "${aws_vpc.main.id}" + peer_vpc_id = "${aws_vpc.peer.id}" + peer_owner_id = "${data.aws_caller_identity.peer.account_id}" + auto_accept = false + + tags { + Side = "Requester" + } +} + +// Accepter's side of the connection. +resource "aws_vpc_peering_connection_accepter" "peer" { + provider = "aws.peer" + vpc_peering_connection_id = "${aws_vpc_peering_connection.peer.id}" + auto_accept = true + + tags { + Side = "Accepter" + } +} `
Tighten up documentation, same-account acceptance test, better error handling.
terraform-providers_terraform-provider-aws
train
21c09177635e9edf3760e75d92704ff0667c807d
diff --git a/test/test_examples.py b/test/test_examples.py index <HASH>..<HASH> 100644 --- a/test/test_examples.py +++ b/test/test_examples.py @@ -2,6 +2,7 @@ import os import watson_developer_cloud +import pytest from os import getcwd from subprocess import Popen, PIPE from os.path import join, dirname @@ -9,7 +10,8 @@ from dotenv import load_dotenv from glob import glob # tests to exclude -excludes = ['authorization_v1.py', 'language_translation_v2.py'] +excludes = ['authorization_v1.py', + 'language_translation_v2.py', 'concept_expansion_v1.py'] # examples path. /examples examples_path = join(dirname(__file__), '../', 'examples', '*.py') @@ -17,13 +19,8 @@ examples_path = join(dirname(__file__), '../', 'examples', '*.py') dotenv_path = join(dirname(__file__), '../', '.env') load_dotenv(dotenv_path) -vcap_services = os.getenv("VCAP_SERVICES") - -# Return everything under the current directory that contains a folder -# called wlp. -def test_success(): - if vcap_services is None: - return; +@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES') +def test_examples(): examples = glob(examples_path) for example in examples: name = example.split('/')[-1] @@ -34,4 +31,5 @@ def test_success(): p = Popen(['python', example], stdout=PIPE, stderr=PIPE, stdin=PIPE) out, err = p.communicate() - assert p.returncode == 0, 'example %s fail with error: %s' % (name, err) + assert p.returncode == 0, 'example %s fail with error: %s' % ( + name, err)
skip integration tests when VCAP_SERVICES is not available
watson-developer-cloud_python-sdk
train
e83435b2b6d3f93bd8cc7fe3678b02718a45c458
diff --git a/tests/php/SiteTreeSubsitesTest.php b/tests/php/SiteTreeSubsitesTest.php index <HASH>..<HASH> 100644 --- a/tests/php/SiteTreeSubsitesTest.php +++ b/tests/php/SiteTreeSubsitesTest.php @@ -392,11 +392,20 @@ class SiteTreeSubsitesTest extends BaseSubsiteTest $controller = ModelAsController::controller_for($pageWithTheme); SiteTree::singleton()->extend('contentcontrollerInit', $controller); $subsiteTheme = $pageWithTheme->Subsite()->Theme; + + $allThemes = SSViewer::get_themes(); + $this->assertContains( $subsiteTheme, - SSViewer::get_themes(), + $allThemes, 'Themes should be modified when Subsite has theme defined' ); + + $this->assertEquals( + $subsiteTheme, + array_shift($allThemes), + 'Subsite theme should be prepeded to theme list' + ); } public function provideAlternateAbsoluteLink()
update test for handling subsite specific themes fixes #<I>
silverstripe_silverstripe-subsites
train
8b43ef5480ff56b670f938f1c3dac0ac37ba1d13
diff --git a/sklearn_porter/estimator/DecisionTreeClassifier/__init__.py b/sklearn_porter/estimator/DecisionTreeClassifier/__init__.py index <HASH>..<HASH> 100644 --- a/sklearn_porter/estimator/DecisionTreeClassifier/__init__.py +++ b/sklearn_porter/estimator/DecisionTreeClassifier/__init__.py @@ -116,7 +116,7 @@ class DecisionTreeClassifier(EstimatorBase, EstimatorApiABC): output = str(tpls.get('exported.class').format(**placeholders)) converter = kwargs.get('converter') encoder.FLOAT_REPR = lambda o: converter(o) - model_data = dumps(self.model_data, sort_keys=True) + model_data = dumps(self.model_data, separators=(',', ':')) return output, model_data # Pick templates:
feature/oop-api-refactoring: Get the most compact JSON representation with `(',', ':')` for the model data dump
nok_sklearn-porter
train
30c09be6f634745560fbd6c1977d7bae33806a72
diff --git a/src/client/firefox/commands.js b/src/client/firefox/commands.js index <HASH>..<HASH> 100644 --- a/src/client/firefox/commands.js +++ b/src/client/firefox/commands.js @@ -281,9 +281,7 @@ function evaluate( return Promise.resolve({ result: null }); } - return new Promise(resolve => { - console.evaluateJSAsync(script, result => resolve(result), params); - }); + return console.evaluateJSAsync(script, params); } function autocomplete( diff --git a/src/client/firefox/types.js b/src/client/firefox/types.js index <HASH>..<HASH> 100644 --- a/src/client/firefox/types.js +++ b/src/client/firefox/types.js @@ -227,7 +227,7 @@ export type TabTarget = { script: Script, func: Function, params?: { frameActor: ?FrameId } - ) => void, + ) => Promise<{ result: ?Object }>, autocomplete: ( input: string, cursor: number,
[sync] Bug <I> - make evaluateJS calls use promise pattern rather than callbacks; r=ochameau
firefox-devtools_debugger
train
b28c1c3860d98a4efc112976805f333bbf8b51e7
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/cas/CASDiskWriteAheadLog.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/cas/CASDiskWriteAheadLog.java index <HASH>..<HASH> 100755 --- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/cas/CASDiskWriteAheadLog.java +++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/cas/CASDiskWriteAheadLog.java @@ -176,7 +176,6 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { private final ScheduledFuture<?> recordsWriterFuture; private final ReentrantLock recordsWriterLock = new ReentrantLock(); - private volatile boolean cancelRecordsWrite = false; private final ConcurrentNavigableMap<OLogSequenceNumber, EventWrapper> events = new ConcurrentSkipListMap<>(); @@ -656,7 +655,7 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { int bytesRead = 0; - long lsnPos = -1; + int lsnPos = -1; segment = segmentsIterator.next(); @@ -705,7 +704,7 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { while (buffer.remaining() > 0) { if (recordLen == -1) { if (recordLenBytes == null) { - lsnPos = pageIndex * pageSize + buffer.position(); + lsnPos = (int) (pageIndex * pageSize + buffer.position()); if (buffer.remaining() >= OIntegerSerializer.INT_SIZE) { recordLen = buffer.getInt(); @@ -1375,7 +1374,6 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { throw new OStorageException("Can not cancel background write thread in WAL"); } - cancelRecordsWrite = true; try { recordsWriterFuture.get(); } catch (CancellationException e) { @@ -1386,20 +1384,18 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { e); } - recordsWriterLock.lock(); - try { - final Future<?> writer = writeFuture; - if (writer != null) { - try { - writer.get(); - } catch (InterruptedException | ExecutionException e) { - throw OException.wrapException( - new OStorageException( - "Error during writing of WAL records in storage " + storageName), - e); - } + if (writeFuture != null) { + try { + writeFuture.get(); + } catch (InterruptedException | ExecutionException e) { + throw OException.wrapException( + new OStorageException("Error during writing of WAL records in storage " + storageName), + e); } + } + recordsWriterLock.lock(); + try { OWALRecord record = records.poll(); while (record != null) { if (record instanceof WriteableWALRecord) { @@ -1409,6 +1405,19 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { record = records.poll(); } + try { + if (writeFuture != null) { + writeFuture.get(); + } + + } catch (final InterruptedException e) { + OLogManager.instance().errorNoDb(this, "WAL write was interrupted", e); + } catch (final ExecutionException e) { + OLogManager.instance().errorNoDb(this, "Error during writint of WAL data", e); + throw OException.wrapException( + new OStorageException("Error during writint of WAL data"), e); + } + for (final OPair<Long, OWALFile> pair : fileCloseQueue) { final OWALFile file = pair.value; @@ -1788,10 +1797,6 @@ public final class CASDiskWriteAheadLog implements OWriteAheadLog { public void run() { recordsWriterLock.lock(); try { - if (cancelRecordsWrite) { - return; - } - if (printPerformanceStatistic) { printReport(); }
Revert - Maximum allowed size of WAL segment has been increased.
orientechnologies_orientdb
train
71cfa1d52b4b1ea6a0b57fa2f22573e53250e4c7
diff --git a/src/clients/ApplicationClient.js b/src/clients/ApplicationClient.js index <HASH>..<HASH> 100644 --- a/src/clients/ApplicationClient.js +++ b/src/clients/ApplicationClient.js @@ -1090,6 +1090,12 @@ export default class ApplicationClient extends BaseClient { }); } + invalidOperation(message) { + return new Promise((resolve, reject) => { + resolve(message) + }) + } + createEventType(name, description, schemaId) { var body = { 'name': name, @@ -1272,7 +1278,7 @@ export default class ApplicationClient extends BaseClient { return this.callApi('PATCH', 200, true, ["draft", "logicalinterfaces", logicalInterfaceId], body); } } else { - return (new Error("PATCH operation not allowed on logical interface")); + return this.invalidOperation("PATCH operation not allowed on logical interface"); } } @@ -1287,7 +1293,7 @@ export default class ApplicationClient extends BaseClient { return this.callApi('PATCH', 202, true, ["logicalinterfaces", logicalInterfaceId], body) } else { - return (new Error("This operation not allowed on logical interface")); + return this.invalidOperation("PATCH operation 'deactivate-configuration' not allowed on logical interface"); } } @@ -1324,7 +1330,7 @@ export default class ApplicationClient extends BaseClient { if(this.draftMode) { return this.callApi('GET', 200, true, ['draft', 'device', 'types', typeId, 'physicalinterface']); } else { - return (new Error("This operation not allowed on device type")); + return this.invalidOperation("GET Device type's physical interface is not allowed"); } } @@ -1337,7 +1343,7 @@ export default class ApplicationClient extends BaseClient { if(this.draftMode) { return this.callApi('DELETE', 204, false, ['draft', 'device', 'types', typeId, 'physicalinterface']); } else { - return (new Error("This operation not allowed on device type")); + return this.invalidOperation("DELETE Device type's physical interface is not allowed"); } } @@ -1439,11 +1445,16 @@ export default class ApplicationClient extends BaseClient { // Device Type patch operation on draft version // Acceptable operation id - validate-configuration, activate-configuration, list-differences patchOperationDeviceType(typeId, operationId) { + if(!operationId) { + return invalidOperation("PATCH operation is not allowed. Operation id is expected") + } + var body = { "operation": operationId } var base = this.draftMode ? ['draft', 'device', 'types', typeId]: ['device', 'types', typeId] + if(this.draftMode) { switch(operationId) { case 'validate-configuration': @@ -1451,13 +1462,16 @@ export default class ApplicationClient extends BaseClient { break case 'activate-configuration': return this.callApi('PATCH', 202, true, base, body); + break case 'deactivate-configuration': return this.callApi('PATCH', 202, true, base, body); - // Patch operation list-differences is expected to return 501 + break + // Patch operation list-differences not implemented case 'list-differences': - return this.callApi('PATCH', 501, false, base, body); + return this.invalidOperation("PATCH operation 'list-differences' is not allowed") + break default: - return this.callApi('PATCH', 200, true, base, body); + return this.invalidOperation("PATCH operation is not allowed. Invalid operation id") } } else { switch(operationId) { @@ -1470,6 +1484,11 @@ export default class ApplicationClient extends BaseClient { case 'remove-deployed-configuration': return this.callApi('PATCH', 202, true, base, body); break + case 'list-differences': + return this.invalidOperation("PATCH operation 'list-differences' is not allowed") + break + default: + return this.invalidOperation("PATCH operation is not allowed. Invalid operation id") } } } @@ -1482,16 +1501,17 @@ export default class ApplicationClient extends BaseClient { "operation": operationId } - if(this.draftMode) + if(this.draftMode) { return this.callApi('PATCH', 202, true, ['device', 'types', typeId], body); + } else { - return (new Error("This operation not allowed on device type")); + return this.invalidOperation("PATCH operation 'deactivate-configuration' is not allowed"); } } getDeviceTypeDeployedConfiguration(typeId) { if(this.draftMode) { - return (new Error("This operation is not allowed on device type")); + return this.invalidOperation("GET deployed configuration is not allowed"); } else { return this.callApi('GET', 200, true, ['device', 'types', typeId, 'deployedconfiguration']); }
Return promise handler for invalid operation between versions
ibm-watson-iot_iot-nodejs
train
c792063e8c26d2859e8b0e6a74959cd40eb859a0
diff --git a/src/Hydrator.php b/src/Hydrator.php index <HASH>..<HASH> 100644 --- a/src/Hydrator.php +++ b/src/Hydrator.php @@ -8,6 +8,8 @@ use Doctrine\Common\Annotations\Reader; use Doctrine\Common\Cache\Cache; use GeneratedHydrator\Configuration; use ReflectionClass; +use RecursiveDirectoryIterator; +use RecursiveIteratorIterator; use ApiClients\Foundation\Resource\ResourceInterface; use ApiClients\Foundation\Resource\AbstractResource; use Zend\Hydrator\HydratorInterface; @@ -77,9 +79,33 @@ class Hydrator $this->options[Options::EXTRA_PROPERTIES]['hydrator'] = $this; } - public function preheat() + public function preheat(string $scanTarget, string $namespace) { - // TODO + $directory = new RecursiveDirectoryIterator($scanTarget); + $directory = new RecursiveIteratorIterator($directory); + + foreach ($directory as $node) { + if (!is_file($node->getPathname())) { + continue; + } + + $file = substr($node->getPathname(), strlen($scanTarget)); + $file = ltrim($file, DIRECTORY_SEPARATOR); + $file = rtrim($file, '.php'); + + $class = $namespace . '\\' . str_replace(DIRECTORY_SEPARATOR, '\\', $file); + + if (!class_exists($class)) { + continue; + } + + if (!is_subclass_of($class, ResourceInterface::class)) { + continue; + } + + $this->getHydrator($class); + $this->annotationReader->getClassAnnotations(new ReflectionClass($class)); + } } /** diff --git a/tests/HydratorTest.php b/tests/HydratorTest.php index <HASH>..<HASH> 100644 --- a/tests/HydratorTest.php +++ b/tests/HydratorTest.php @@ -107,4 +107,22 @@ class HydratorTest extends TestCase $files = $this->getFilesInDirectory($annotationCache); $this->assertSame(4, count($files)); } + + public function testPreheat() + { + $tmpDir = $this->getTmpDir(); + $hydrator = Factory::create([ + Options::NAMESPACE => 'ApiClients\Tests\Foundation\Hydrator\Resources', + Options::NAMESPACE_SUFFIX => 'Async', + Options::RESOURCE_CACHE_DIR => $tmpDir, + Options::RESOURCE_NAMESPACE => $this->getRandomNameSpace(), + ]); + + $classCount = count(get_declared_classes()); + $hydrator->preheat( + __DIR__ . DIRECTORY_SEPARATOR . 'Resources' . DIRECTORY_SEPARATOR, + 'ApiClients\Tests\Foundation\Hydrator\Resources' + ); + $this->assertFalse($classCount === count(get_declared_classes())); + } }
Preheating of hydrator to avoid blocking code
php-api-clients_hydrator
train