hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
bd75da7a370468ab9977d6a9ceff88c526cec9a8 | diff --git a/dalesbred/src/main/java/fi/evident/dalesbred/Database.java b/dalesbred/src/main/java/fi/evident/dalesbred/Database.java
index <HASH>..<HASH> 100644
--- a/dalesbred/src/main/java/fi/evident/dalesbred/Database.java
+++ b/dalesbred/src/main/java/fi/evident/dalesbred/Database.java
@@ -39,6 +39,7 @@ import org.jetbrains.annotations.Nullable;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
+import java.io.InputStream;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
@@ -535,7 +536,15 @@ public final class Database {
int i = 1;
for (Object arg : args)
- ps.setObject(i++, instantiatorRegistry.valueToDatabase(unwrapConfidential(arg)));
+ bindArgument(ps, i++, instantiatorRegistry.valueToDatabase(unwrapConfidential(arg)));
+ }
+
+ private static void bindArgument(@NotNull PreparedStatement ps, int index, @Nullable Object value) throws SQLException {
+ if (value instanceof InputStream) {
+ ps.setBinaryStream(index, (InputStream) value);
+ } else {
+ ps.setObject(index, value);
+ }
}
@NotNull
diff --git a/dalesbred/src/test/java/fi/evident/dalesbred/DatabaseLargeObjectsTest.java b/dalesbred/src/test/java/fi/evident/dalesbred/DatabaseLargeObjectsTest.java
index <HASH>..<HASH> 100644
--- a/dalesbred/src/test/java/fi/evident/dalesbred/DatabaseLargeObjectsTest.java
+++ b/dalesbred/src/test/java/fi/evident/dalesbred/DatabaseLargeObjectsTest.java
@@ -3,6 +3,8 @@ package fi.evident.dalesbred;
import org.junit.Rule;
import org.junit.Test;
+import java.io.ByteArrayInputStream;
+
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
@@ -24,4 +26,16 @@ public class DatabaseLargeObjectsTest {
byte[] data = { 1, 2, 3 };
assertThat(db.findUnique(byte[].class, "values (cast (? as blob))", data), is(data));
}
+
+ @Test
+ public void streamBlobToDatabase() throws Exception {
+ db.update("drop table if exists blob_test");
+ db.update("create temporary table blob_test (id int, blob_data blob)");
+
+ byte[] originalData = { 25, 35, 3 };
+ db.update("insert into blob_test values (1, ?)", new ByteArrayInputStream(originalData));
+
+ byte[] data = db.findUnique(byte[].class, "select blob_data from blob_test where id=1");
+ assertThat(data, is(originalData));
+ }
} | Pass InputStreams to setBinaryStream when binding arguments. | EvidentSolutions_dalesbred | train |
bacff3613b3a8e7d7ca95b63fba0929938a80c52 | diff --git a/benchexec/container.py b/benchexec/container.py
index <HASH>..<HASH> 100644
--- a/benchexec/container.py
+++ b/benchexec/container.py
@@ -49,7 +49,7 @@ __all__ = [
'get_my_pid_from_proc',
'drop_capabilities',
'forward_all_signals',
- 'setup_container_config',
+ 'setup_container_system_config',
'CONTAINER_UID',
'CONTAINER_GID',
'CONTAINER_HOME',
@@ -372,18 +372,27 @@ def close_open_fds(keep_files=[]):
# (the fd that was used by os.listdir() of course always fails)
pass
-def setup_container_system_config(basedir):
+def setup_container_system_config(basedir, mountdir=None):
"""Create a minimal system configuration for use in a container.
- @param basedir: The root directory of the container as bytes.
+ @param basedir: The directory where the configuration files should be placed as bytes.
+ @param mountdir: If present, bind mounts to the configuration files will be added below
+ this path (given as bytes).
"""
etc = os.path.join(basedir, b"etc")
if not os.path.exists(etc):
os.mkdir(etc)
for file, content in CONTAINER_ETC_FILE_OVERRIDE.items():
+ # Create "basedir/etc/file"
util.write_file(content, etc, file)
+ if mountdir:
+ # Create bind mount to "mountdir/etc/file"
+ make_bind_mount(
+ os.path.join(etc, file), os.path.join(mountdir, b"etc", file), private=True)
os.symlink(b"/proc/self/mounts", os.path.join(etc, b"mtab"))
+ # Bind bounds for symlinks are not possible, so we do nothing for "mountdir/etc/mtab".
+ # This is not a problem usually because most systems have the correct symlink anyway.
def is_container_system_config_file(file):
"""Determine whether a given file is one of the files created by setup_container_system_config().
diff --git a/benchexec/containerexecutor.py b/benchexec/containerexecutor.py
index <HASH>..<HASH> 100644
--- a/benchexec/containerexecutor.py
+++ b/benchexec/containerexecutor.py
@@ -111,12 +111,7 @@ def handle_basic_container_args(options, parser=None):
dir_modes["/run"] = DIR_HIDDEN
if options.container_system_config:
- if dir_modes.get("/etc", dir_modes["/"]) != DIR_OVERLAY:
- logging.warning("Specified directory mode for /etc implies --keep-system-config, "
- "i.e., the container cannot be configured to force only local user and host lookups. "
- "Use --overlay-dir /etc to allow overwriting system configuration in the container.")
- options.container_system_config = False
- elif options.network_access:
+ if options.network_access:
logging.warning("The container configuration disables DNS, "
"host lookups will fail despite --network-access. "
"Consider using --keep-system-config.")
@@ -260,9 +255,6 @@ class ContainerExecutor(baseexecutor.BaseExecutor):
self._env_override = {}
if container_system_config:
- if dir_modes.get("/etc", dir_modes.get("/")) != DIR_OVERLAY:
- raise ValueError("Cannot setup minimal system configuration for the container "
- "without overlay filesystem for /etc.")
self._env_override["HOME"] = container.CONTAINER_HOME
if not container.CONTAINER_HOME in dir_modes:
dir_modes[container.CONTAINER_HOME] = DIR_HIDDEN
@@ -643,7 +635,7 @@ class ContainerExecutor(baseexecutor.BaseExecutor):
target_path = os.path.join(target_path, b"")
return path.startswith(target_path)
- def find_mode_for_dir(path, fstype):
+ def find_mode_for_dir(path, fstype=None):
if (path == b"/proc"):
# /proc is necessary for the grandchild to read PID, will be replaced later.
return DIR_READ_ONLY
@@ -688,9 +680,6 @@ class ContainerExecutor(baseexecutor.BaseExecutor):
work_base = os.path.join(temp_dir, b"overlayfs")
os.mkdir(work_base)
- if self._container_system_config:
- container.setup_container_system_config(temp_base)
-
# Create a copy of host's mountpoints.
# Setting MS_PRIVATE flag discouples our mount namespace from the hosts's,
# i.e., mounts we do are not seen by the host, and any (un)mounts the host does afterward
@@ -815,6 +804,12 @@ class ContainerExecutor(baseexecutor.BaseExecutor):
else:
assert False
+ if self._container_system_config:
+ # If overlayfs is not used for /etc, we need additional bind mounts
+ # for files in /etc that we want to override, like /etc/passwd
+ config_mount_base = mount_base if find_mode_for_dir(b"/etc") != DIR_OVERLAY else None
+ container.setup_container_system_config(temp_base, config_mount_base )
+
# If necessary, (i.e., if /tmp is not already hidden),
# hide the directory where we store our files from processes in the container
# by mounting an empty directory over it.
diff --git a/doc/container.md b/doc/container.md
index <HASH>..<HASH> 100644
--- a/doc/container.md
+++ b/doc/container.md
@@ -138,10 +138,6 @@ For the same reason, DNS lookups for host names are disabled.
All of these can be re-enabled with `--keep-system-config`,
which also lets the container use the same user list as the host.
-Note that this feature is only available with an overlay mount for `/etc`,
-and thus a container that uses a different access mode for this directory
-will have `--keep-system-config` set by default.
-
## Retrieving Result Files
Files written by the executed tool to directories in the hidden or overlay modes | Improve container config if overlayfs is not used.
Inside the container we want to disable DNS lookups
and modify /etc/passwd etc. to contain specific information for our container.
Previously this was only possible if overlayfs was used for /etc,
and in all other modes --keep-system-config was implied
(which disables these modifications). | sosy-lab_benchexec | train |
ef2bbfee5acada10be2e943e421a7e080cedf067 | diff --git a/server/build/webpack.js b/server/build/webpack.js
index <HASH>..<HASH> 100644
--- a/server/build/webpack.js
+++ b/server/build/webpack.js
@@ -28,15 +28,22 @@ const relativeResolve = rootModuleRelativePath(require)
export default async function createCompiler (dir, { dev = false, quiet = false, buildDir } = {}) {
dir = resolve(dir)
const config = getConfig(dir)
- const defaultEntries = dev
- ? [join(__dirname, '..', '..', 'client/webpack-hot-middleware-client')] : []
+ const defaultEntries = dev ? [
+ join(__dirname, '..', '..', 'client', 'webpack-hot-middleware-client'),
+ join(__dirname, '..', '..', 'client', 'on-demand-entries-client')
+ ] : []
const mainJS = dev
? require.resolve('../../client/next-dev') : require.resolve('../../client/next')
let minChunks
const entry = async () => {
- const entries = { 'main.js': mainJS }
+ const entries = {
+ 'main.js': [
+ ...defaultEntries,
+ mainJS
+ ]
+ }
const pages = await glob('pages/**/*.js', { cwd: dir })
const devPages = pages.filter((p) => p === 'pages/_document.js' || p === 'pages/_error.js')
@@ -45,11 +52,11 @@ export default async function createCompiler (dir, { dev = false, quiet = false,
// managing pages.
if (dev) {
for (const p of devPages) {
- entries[join('bundles', p)] = [...defaultEntries, `./${p}?entry`]
+ entries[join('bundles', p)] = `./${p}?entry`
}
} else {
for (const p of pages) {
- entries[join('bundles', p)] = [...defaultEntries, `./${p}?entry`]
+ entries[join('bundles', p)] = `./${p}?entry`
}
}
diff --git a/server/on-demand-entry-handler.js b/server/on-demand-entry-handler.js
index <HASH>..<HASH> 100644
--- a/server/on-demand-entry-handler.js
+++ b/server/on-demand-entry-handler.js
@@ -71,11 +71,7 @@ export default function onDemandEntryHandler (devMiddleware, compiler, {
const pathname = await resolvePath(pagePath)
const name = join('bundles', pathname.substring(dir.length))
- const entry = [
- join(__dirname, '..', 'client/webpack-hot-middleware-client'),
- join(__dirname, '..', 'client', 'on-demand-entries-client'),
- `${pathname}?entry`
- ]
+ const entry = `${pathname}?entry`
await new Promise((resolve, reject) => {
const entryInfo = entries[page] | Add default entries to main.js (#<I>)
So, we don't need to add them to individual pages.
This also fix the issue where, error pages doesn't ping the server. | zeit_next.js | train |
320811d806f2ff1fe110b3d81e3d63b018d36dd7 | diff --git a/src/main/java/com/technophobia/substeps/execution/ExecutionNodeResult.java b/src/main/java/com/technophobia/substeps/execution/ExecutionNodeResult.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/technophobia/substeps/execution/ExecutionNodeResult.java
+++ b/src/main/java/com/technophobia/substeps/execution/ExecutionNodeResult.java
@@ -23,6 +23,8 @@ import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringWriter;
+import com.technophobia.substeps.runner.SubstepExecutionFailure;
+
public class ExecutionNodeResult implements Serializable {
private static final long serialVersionUID = -1444083371334604179L;
@@ -38,6 +40,8 @@ public class ExecutionNodeResult implements Serializable {
private byte[] screenshot;
+ private SubstepExecutionFailure substepExecutionFailure;
+
public ExecutionNodeResult(final long id) {
this.executionNodeId = id;
}
@@ -153,4 +157,14 @@ public class ExecutionNodeResult implements Serializable {
this.screenshot = screenshot;
}
+
+ public void setFailure(SubstepExecutionFailure substepExecutionFailure) {
+
+ this.substepExecutionFailure = substepExecutionFailure;
+ }
+
+ public SubstepExecutionFailure getFailure() {
+ return substepExecutionFailure;
+ }
+
}
diff --git a/src/main/java/com/technophobia/substeps/runner/SubstepExecutionFailure.java b/src/main/java/com/technophobia/substeps/runner/SubstepExecutionFailure.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/technophobia/substeps/runner/SubstepExecutionFailure.java
+++ b/src/main/java/com/technophobia/substeps/runner/SubstepExecutionFailure.java
@@ -21,6 +21,7 @@ package com.technophobia.substeps.runner;
import java.io.Serializable;
+import com.google.common.base.Function;
import com.technophobia.substeps.execution.node.IExecutionNode;
/**
@@ -32,10 +33,18 @@ import com.technophobia.substeps.execution.node.IExecutionNode;
*/
public class SubstepExecutionFailure implements Serializable {
+ public static final Function<SubstepExecutionFailure, Long> GET_NODE_ID = new Function<SubstepExecutionFailure, Long>() {
+
+ public Long apply(SubstepExecutionFailure failure) {
+ return failure.getExeccutionNode() == null ? null : failure.getExeccutionNode().getId();
+ }
+
+ };
+
private static final long serialVersionUID = 4981517213059529046L;
private final Throwable cause;
- private IExecutionNode execcutionNode;
+ private IExecutionNode executionNode;
private boolean setupOrTearDown = false;
private boolean nonCritical = false;
@@ -52,7 +61,8 @@ public class SubstepExecutionFailure implements Serializable {
*/
public SubstepExecutionFailure(final Throwable targetException, final IExecutionNode node) {
this.cause = targetException;
- this.execcutionNode = node;
+ this.executionNode = node;
+ this.executionNode.getResult().setFailure(this);
}
/**
@@ -75,7 +85,7 @@ public class SubstepExecutionFailure implements Serializable {
* @return the execcutionNode
*/
public IExecutionNode getExeccutionNode() {
- return this.execcutionNode;
+ return this.executionNode;
}
/**
@@ -83,7 +93,7 @@ public class SubstepExecutionFailure implements Serializable {
* the execcutionNode to set
*/
public void setExeccutionNode(final IExecutionNode execcutionNode) {
- this.execcutionNode = execcutionNode;
+ this.executionNode = execcutionNode;
}
/**
diff --git a/src/main/java/com/technophobia/substeps/runner/SubstepsRunner.java b/src/main/java/com/technophobia/substeps/runner/SubstepsRunner.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/technophobia/substeps/runner/SubstepsRunner.java
+++ b/src/main/java/com/technophobia/substeps/runner/SubstepsRunner.java
@@ -28,11 +28,11 @@ import com.technophobia.substeps.execution.node.RootNode;
*/
public interface SubstepsRunner {
- void prepareExecutionConfig(final SubstepsExecutionConfig theConfig);
+ RootNode prepareExecutionConfig(final SubstepsExecutionConfig theConfig);
- List<SubstepExecutionFailure> run();
+ RootNode run();
- RootNode getRootNode();
+ List<SubstepExecutionFailure> getFailures();
void addNotifier(INotifier notifier);
} | Changed interface of SubstepsRunner to better suite usage | G2G3Digital_substeps-framework | train |
6fe6a379b2c206477b386d0479c6e2d3032c4f25 | diff --git a/cufflinks/plotlytools.py b/cufflinks/plotlytools.py
index <HASH>..<HASH> 100644
--- a/cufflinks/plotlytools.py
+++ b/cufflinks/plotlytools.py
@@ -802,8 +802,9 @@ def _iplot(self,data=None,layout=None,filename='',world_readable=None,
z=[int(100*(float(_)-rg.min())/(rg.max()-rg.min()))+12 for _ in rg]
text=kwargs['labels'] if 'labels' in kwargs else text
labels=self[text].values.tolist() if text else ''
- clrs=get_colors(colors,colorscale,x).values()
- gen=colorgen()
+ clrs=colors if colors else get_scales(colorscale)
+ clrs=[clrs] if not isinstance(clrs,list) else clrs
+ clrs=[clrs[0]]*len(x)
marker=Marker(color=clrs,size=z,symbol=symbol,
line=Line(width=width),textfont=getLayout(theme=theme)['xaxis1']['titlefont'])
trace=Scatter(x=x,y=y,marker=marker,mode='markers',text=labels) | Bubble chart colors: Fixes #6 | santosjorge_cufflinks | train |
000ffde3afcee5e172521e3d1babbc6264e1cbec | diff --git a/lib/fit4ruby/Activity.rb b/lib/fit4ruby/Activity.rb
index <HASH>..<HASH> 100644
--- a/lib/fit4ruby/Activity.rb
+++ b/lib/fit4ruby/Activity.rb
@@ -261,9 +261,9 @@ module Fit4Ruby
@events.each do |e|
return e.vo2max if e.event == 'vo2max'
end
- # Then check the user_profile entries for a metmax entry. METmax * 3.5
+ # Then check the user_data entries for a metmax entry. METmax * 3.5
# is same value as VO2max.
- @user_profiles.each do |u|
+ @user_data.each do |u|
return ((u.metmax * 1000.0) * 3.5) / 1024.0 if u.metmax
end | metmax is stored in user_data not user_profile. | scrapper_fit4ruby | train |
21ff8d766a51dcb7b1851776067924cac6f476b9 | diff --git a/lib/notification.js b/lib/notification.js
index <HASH>..<HASH> 100644
--- a/lib/notification.js
+++ b/lib/notification.js
@@ -4,6 +4,7 @@ var txLib = require('./tx');
var statusLib = require('./status');
var _ = require('lodash');
var async = require('async');
+var rpparser = require('./rpparser');
/*
* opts
@@ -25,6 +26,16 @@ module.exports.getNextNotification = function(opts, callback) {
return;
}
+ if (!rpparser.isRippleAddress(address)){
+ callback(new Error('Invalid parameter: address. Must provide a valid Ripple address'));
+ return;
+ }
+
+ if (prev_tx_hash && !/[0-9A-Fa-f]/.test(prev_tx_hash)) {
+ callback(new Error('Invalid parameter: prev_tx_hash. Must provide a valid transaction hash'));
+ return;
+ }
+
getNextTx(remote, {
address: address,
prev_tx_hash: prev_tx_hash,
@@ -439,7 +450,7 @@ function txToNotification(opts){
tx_ledger: (tx && tx.ledger_index ? tx.ledger_index : ''),
tx_hash: (tx && tx.hash ? tx.hash : ''),
tx_timestamp: (tx && tx.date ? ripple.utils.toTimestamp(tx.date) : ''),
- tx_timestamp_human: (tx && tx.date ? new Date(ripple.utils.toTimestamp(tx.date)).toISOString() : '')
+ tx_timestamp_human: (tx && tx.date ? new Date(ripple.utils.toTimestamp(tx.date)).toISOString() : ''),
tx_url: '',
next_notification_url: '',
confirmation_token: '' | [CHORE] Better error message for malformed address in next_notification | ripple_ripple-rest | train |
0d99e4ddf22e1c4df61e70f5d593f65b3a495e2d | diff --git a/lib/cequel/model/dictionary.rb b/lib/cequel/model/dictionary.rb
index <HASH>..<HASH> 100644
--- a/lib/cequel/model/dictionary.rb
+++ b/lib/cequel/model/dictionary.rb
@@ -81,7 +81,7 @@ module Cequel
@row[column]
elsif !@deleted_columns.include?(column)
value = scope.select(column).first[column]
- deserialize_value(value) if value
+ deserialize_value(column, value) if value
end
end
@@ -99,7 +99,7 @@ module Cequel
else
{}.tap do |slice|
row = scope.select(*columns).first.except(self.class.key_alias)
- row.each { |col, value| slice[col] = deserialize_value(value) }
+ row.each { |col, value| slice[col] = deserialize_value(col, value) }
slice.merge!(@row.slice(*columns))
@deleted_columns.each { |column| slice.delete(column) }
end
@@ -141,7 +141,7 @@ module Cequel
new_columns.delete(key)
yield key, @row[key]
elsif !@deleted_columns.include?(key)
- yield key, deserialize_value(value)
+ yield key, deserialize_value(key, value)
end
end
last_key = batch_results.keys.last
@@ -187,7 +187,7 @@ module Cequel
# Subclasses may override this method to implement custom deserialization
# strategies
#
- def deserialize_value(value)
+ def deserialize_value(column, value)
value
end
diff --git a/spec/models/post_comments.rb b/spec/models/post_comments.rb
index <HASH>..<HASH> 100644
--- a/spec/models/post_comments.rb
+++ b/spec/models/post_comments.rb
@@ -8,7 +8,7 @@ class PostComments < Cequel::Model::Dictionary
data.to_json
end
- def deserialize_value(json)
+ def deserialize_value(post_id, json)
JSON.parse(json)
end
end | Dictionary#deserialize_value takes column as well as value | cequel_cequel | train |
067fc39b9fdd33fc37dd6acf483c9a46f4556236 | diff --git a/pypump/models/feed.py b/pypump/models/feed.py
index <HASH>..<HASH> 100644
--- a/pypump/models/feed.py
+++ b/pypump/models/feed.py
@@ -192,7 +192,12 @@ class ItemList(object):
self._done = True
# check what to do next time
- if self._since is not None:
+ if hasattr(self.feed, 'issue65'):
+ # work around API bug for favorites feed, see https://github.com/xray7224/PyPump/issues/65
+ if self._offset is None:
+ self._offset = 0
+ self._offset += 20
+ elif self._since is not None:
if self.feed.links.get('prev'):
self.url = self.feed.links['prev']
del self.feed.links['prev'] # avoid using it again
@@ -317,10 +322,11 @@ class Followers(Feed):
class Following(Feed):
""" People followed by Person """
-
class Favorites(Feed):
""" Person's favorites """
# API bug, can only get 20 items, see https://github.com/xray7224/PyPump/issues/65
+ # mark feed so we can enable bug work around in ItemList._build_cache()
+ issue65 = True
class Inbox(Feed): | work around issue #<I>
for Feed subclasses with issue<I> attribute set, use offset instead or prev/next
links when getting next page of items | xray7224_PyPump | train |
ea712364e202816ce99f53824f01d0aa6f178247 | diff --git a/activesupport/lib/active_support/deprecation/proxy_wrappers.rb b/activesupport/lib/active_support/deprecation/proxy_wrappers.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/deprecation/proxy_wrappers.rb
+++ b/activesupport/lib/active_support/deprecation/proxy_wrappers.rb
@@ -3,7 +3,7 @@ require 'active_support/inflector'
module ActiveSupport
module Deprecation
class DeprecationProxy #:nodoc:
- instance_methods.each { |m| undef_method m unless m =~ /^__/ }
+ instance_methods.each { |m| undef_method m unless m =~ /^__|^object_id$/ }
# Don't give a deprecation warning on inspect since test/unit and error
# logs rely on it for diagnostics. | Don't undefine object_id | rails_rails | train |
6aa5bdf8fae6fa61add2b4571890a673f76b1af4 | diff --git a/lib/torii/endpoints/-oauth2-code.js b/lib/torii/endpoints/-oauth2-code.js
index <HASH>..<HASH> 100644
--- a/lib/torii/endpoints/-oauth2-code.js
+++ b/lib/torii/endpoints/-oauth2-code.js
@@ -55,13 +55,15 @@ var Oauth2 = Endpoint.extend({
},
open: function(){
- var name = this.get('name'),
- url = this.buildUrl();
+ var name = this.get('name'),
+ url = this.buildUrl(),
+ redirectUri = this.get('redirectUri');
return this.get('popup').open(url).then(function(authData){
return Oauth2Authentication.create({
authorizationCode: authData.code,
- endpoint: name
+ endpoint: name,
+ redirectUri: redirectUri
});
});
}
diff --git a/test/tests/integration/endpoints/facebook-oauth2-test.js b/test/tests/integration/endpoints/facebook-oauth2-test.js
index <HASH>..<HASH> 100644
--- a/test/tests/integration/endpoints/facebook-oauth2-test.js
+++ b/test/tests/integration/endpoints/facebook-oauth2-test.js
@@ -2,6 +2,7 @@ var torii, container;
import toriiContainer from 'test/helpers/torii-container';
import configuration from 'torii/configuration';
+import Oauth2Authentication from 'torii/authentications/oauth2';
var originalConfiguration = configuration.endpoints['facebook-oauth2'];
@@ -35,3 +36,16 @@ test("Opens a popup to Facebook", function(){
});
});
});
+
+test("Resolves with an authentication object containing 'redirectUri'", function(){
+ Ember.run(function(){
+ torii.open('facebook-oauth2').then(function(data){
+ ok(data instanceof Oauth2Authentication,
+ 'data is a type of Oauth2Authentication');
+ ok(data.get('redirectUri'),
+ 'Object has redirectUri');
+ }, function(err){
+ ok(false, 'Failed with err '+err);
+ });
+ });
+}); | include redirectUri in torii.open resolved value | Vestorly_torii | train |
a107cfbf153032d374816b782903c6b4d54db6fc | diff --git a/alertaclient/config.py b/alertaclient/config.py
index <HASH>..<HASH> 100644
--- a/alertaclient/config.py
+++ b/alertaclient/config.py
@@ -9,6 +9,7 @@ default_config = {
'profile': None,
'endpoint': 'http://localhost:8080',
'key': '',
+ 'client_id': None,
'username': None,
'password': None,
'timezone': 'Europe/London', | Allow OAuth2 client id to be configured locally (#<I>) | alerta_python-alerta-client | train |
ec1749beca1faa6dd1487a0b6e4aaad23caf0685 | diff --git a/ryu/lib/ip.py b/ryu/lib/ip.py
index <HASH>..<HASH> 100644
--- a/ryu/lib/ip.py
+++ b/ryu/lib/ip.py
@@ -64,3 +64,34 @@ def ipv6_to_str(ip):
:return: IPv6 address string
"""
return addrconv.ipv6.bin_to_text(ip)
+
+
+def text_to_bin(ip):
+ """
+ Converts human readable IPv4 or IPv6 string to binary representation.
+ :param str ip: IPv4 or IPv6 address string
+ :return: binary representation of IPv4 or IPv6 address
+ """
+
+ if ':' not in ip:
+ data = addrconv.ipv4.text_to_bin(ip)
+ else:
+ data = addrconv.ipv6.text_to_bin(ip)
+
+ return data
+
+
+def bin_to_text(ip):
+ """
+ Converts binary representation to human readable IPv4 or IPv6 string.
+ :param ip: binary representation of IPv4 or IPv6 address
+ :return: IPv4 or IPv6 address string
+ """
+ if len(ip) == 4:
+ data = addrconv.ipv4.bin_to_text(ip)
+ elif len(ip) == 16:
+ data = addrconv.ipv6.bin_to_text(ip)
+ else:
+ raise struct.error('Invalid ip address length: %s' % len(ip))
+
+ return data
diff --git a/ryu/tests/unit/lib/test_ip.py b/ryu/tests/unit/lib/test_ip.py
index <HASH>..<HASH> 100644
--- a/ryu/tests/unit/lib/test_ip.py
+++ b/ryu/tests/unit/lib/test_ip.py
@@ -86,3 +86,29 @@ class Test_ip(unittest.TestCase):
res = ip.ipv6_to_str(ipv6_bin)
print('%s %s' % (val, res))
eq_(val, res)
+
+ def test_text_to_bin_from_ipv4_text(self):
+ ipv4_str = '10.28.197.1'
+ val = struct.pack('!4B', 10, 28, 197, 1)
+ res = ip.text_to_bin(ipv4_str)
+ eq_(val, res)
+
+ def test_text_to_bin_from_ipv6_text(self):
+ ipv6_str = '2013:da8:215:8f2:aa20:66ff:fe4c:9c3c'
+ val = struct.pack('!8H', 0x2013, 0xda8, 0x215, 0x8f2, 0xaa20,
+ 0x66ff, 0xfe4c, 0x9c3c)
+ res = ip.text_to_bin(ipv6_str)
+ eq_(val, res)
+
+ def test_bin_to_text_from_ipv4_text(self):
+ ipv4_bin = struct.pack('!4B', 10, 28, 197, 1)
+ val = '10.28.197.1'
+ res = ip.bin_to_text(ipv4_bin)
+ eq_(val, res)
+
+ def test_bin_to_text_from_ipv6_text(self):
+ ipv6_bin = struct.pack('!8H', 0x2013, 0xda8, 0x215, 0x8f2, 0xaa20,
+ 0x66ff, 0xfe4c, 0x9c3c)
+ val = '2013:da8:215:8f2:aa20:66ff:fe4c:9c3c'
+ res = ip.bin_to_text(ipv6_bin)
+ eq_(val, res) | lib/ip: Add method to convert the format of Ipv4 or Ipv6 | osrg_ryu | train |
19ef37d36517774a6d9dd69f3ff383cc6fb3b1ee | diff --git a/server/src/main/java/org/axway/grapes/server/reports/ReportsRegistry.java b/server/src/main/java/org/axway/grapes/server/reports/ReportsRegistry.java
index <HASH>..<HASH> 100644
--- a/server/src/main/java/org/axway/grapes/server/reports/ReportsRegistry.java
+++ b/server/src/main/java/org/axway/grapes/server/reports/ReportsRegistry.java
@@ -17,7 +17,7 @@ public class ReportsRegistry {
//
private static final String REPORTS_PACKAGE = "org.axway.grapes.server.reports";
- private static final Set<Report> reports = new HashSet<>();
+ private static final Set<Report> reports = Collections.synchronizedSet(new HashSet<>());
private static final Logger LOG = LoggerFactory.getLogger(ReportsRegistry.class);
private ReportsRegistry() {};
@@ -26,6 +26,7 @@ public class ReportsRegistry {
* Initializes the set of report implementation.
*/
public static void init() {
+ reports.clear();
Reflections reflections = new Reflections(REPORTS_PACKAGE);
final Set<Class<? extends Report>> reportClasses = reflections.getSubTypesOf(Report.class); | Modified the report set to be threadsafe | Axway_Grapes | train |
05e6add23a655aa785404288ea0c4ff3efad28c3 | diff --git a/lib/travis/cli/monitor.rb b/lib/travis/cli/monitor.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/cli/monitor.rb
+++ b/lib/travis/cli/monitor.rb
@@ -31,7 +31,8 @@ module Travis
super
repos.map! { |r| repo(r) }
repos.concat(user.repositories) if my_repos?
- setup_notification(repos.any? || :dummy) unless notification
+ setup_notification(!firehose? || :dummy) unless notification
+ debug "Using notifications: #{notification.class.name[/[^:]+$/]}"
end
def setup_notification(type = nil)
@@ -63,6 +64,10 @@ module Travis
events
end
+ def firehose?
+ org? and repos.empty?
+ end
+
def all?
!pull? and !push?
end | improve default logic for desktop notifications sent by montior command | travis-ci_travis.rb | train |
f15e2070a5cfc61e2d2bbf4b41cd04d0771544db | diff --git a/lib/ORM.js b/lib/ORM.js
index <HASH>..<HASH> 100644
--- a/lib/ORM.js
+++ b/lib/ORM.js
@@ -1,6 +1,5 @@
var util = require("util");
var events = require("events");
-var path = require("path");
var url = require("url");
var hat = require("hat");
var Query = require("sql-query");
@@ -244,7 +243,7 @@ ORM.prototype.defineType = function (name, opts) {
this.customTypes[name] = opts;
this.driver.customTypes[name] = opts;
return this;
-}
+};
ORM.prototype.ping = function (cb) {
this.driver.ping(cb); | Removes unnecessary path requirement in ORM.js | dresende_node-orm2 | train |
f418c26a7af047a7f8a49a93b2e5aa3d46d4ee91 | diff --git a/lib/mongoid/railtie.rb b/lib/mongoid/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/railtie.rb
+++ b/lib/mongoid/railtie.rb
@@ -55,18 +55,13 @@ module Rails #:nodoc:
end
end
- # After initialization we will attempt to connect to the database, if
- # we get an exception and can't find a mongoid.yml we will alert the user
- # to generate one.
- initializer "verify that mongoid is configured" do
+ # After initialization we will warn the user if we can't find a mongoid.yml and
+ # alert to create one.
+ initializer "warn when configuration is missing" do
config.after_initialize do
- begin
- ::Mongoid.master
- rescue ::Mongoid::Errors::InvalidDatabase => e
- unless Rails.root.join("config", "mongoid.yml").file?
- puts "\nMongoid config not found. Create a config file at: config/mongoid.yml"
- puts "to generate one run: rails generate mongoid:config\n\n"
- end
+ unless Rails.root.join("config", "mongoid.yml").file?
+ puts "\nMongoid config not found. Create a config file at: config/mongoid.yml"
+ puts "to generate one run: rails generate mongoid:config\n\n"
end
end
end | Fixes #<I> by not connecting to DB during the loading. Instead, just checking if the config is present and warning if it's not. | mongodb_mongoid | train |
2783c8edbeb07a4387d0e2ae5f91602d3d812565 | diff --git a/frontend/plugins/contests/contests.py b/frontend/plugins/contests/contests.py
index <HASH>..<HASH> 100644
--- a/frontend/plugins/contests/contests.py
+++ b/frontend/plugins/contests/contests.py
@@ -111,36 +111,37 @@ class ContestScoreboard(object):
# Compute stats for each submission
task_succeeded = {taskid: False for taskid in tasks}
for submission in db_results:
- if submission['taskid'] not in tasks:
- continue
- if submission['username'] not in users:
- continue
- status = results[submission['username']]["tasks"][submission['taskid']]
- if status["status"] == "AC" or status["status"] == "ACF":
- continue
- else:
- if submission['result'] == "success":
- if not task_succeeded[submission['taskid']]:
- status["status"] = "ACF"
- task_succeeded[submission['taskid']] = True
- else:
- status["status"] = "AC"
- status["tries"] += 1
- status["time"] = submission['submitted_on']
- status["score"] = ((submission['submitted_on'] + (
- timedelta(minutes=contest_data["penalty"]) * (status["tries"] - 1))) - start).total_seconds() / 60
- elif submission['result'] == "failed":
- status["status"] = "WA"
- status["tries"] += 1
- elif submission['result'] == "timeout":
- status["status"] = "TLE"
- status["tries"] += 1
- else: # other internal error
+ for username in submission["username"]:
+ if submission['taskid'] not in tasks:
+ continue
+ if username not in users:
+ continue
+ status = results[username]["tasks"][submission['taskid']]
+ if status["status"] == "AC" or status["status"] == "ACF":
continue
- activity.append({"user": results[submission['username']]["name"],
- "when": submission['submitted_on'],
- "result": (status["status"] == 'AC' or status["status"] == 'ACF'),
- "taskid": submission['taskid']})
+ else:
+ if submission['result'] == "success":
+ if not task_succeeded[submission['taskid']]:
+ status["status"] = "ACF"
+ task_succeeded[submission['taskid']] = True
+ else:
+ status["status"] = "AC"
+ status["tries"] += 1
+ status["time"] = submission['submitted_on']
+ status["score"] = ((submission['submitted_on'] + (
+ timedelta(minutes=contest_data["penalty"]) * (status["tries"] - 1))) - start).total_seconds() / 60
+ elif submission['result'] == "failed":
+ status["status"] = "WA"
+ status["tries"] += 1
+ elif submission['result'] == "timeout":
+ status["status"] = "TLE"
+ status["tries"] += 1
+ else: # other internal error
+ continue
+ activity.append({"user": results[username]["name"],
+ "when": submission['submitted_on'],
+ "result": (status["status"] == 'AC' or status["status"] == 'ACF'),
+ "taskid": submission['taskid']})
activity.reverse()
# Compute current score
for user in results: | Adapt contests plugin to new submission model | UCL-INGI_INGInious | train |
54a4379191939f91cc1c10eb4920e438cefbf2f6 | diff --git a/lib/sendMessage.js b/lib/sendMessage.js
index <HASH>..<HASH> 100644
--- a/lib/sendMessage.js
+++ b/lib/sendMessage.js
@@ -131,6 +131,14 @@ function createMessageSender(socketEmitter, mqttEmitter, parentConnection){
doMessageForward(check.meshblu.messageForward, emitMsg, check.uuid, function(error, messages) {
_.each(messages, function(msg){
getDevice(msg.forwardTo, function(error, forwardDevice) {
+ if(error) {
+ console.error(error.stack);
+ return;
+ }
+ if(!forwardDevice) {
+ console.error('sendMessage.js:135: forwardDevice not found');
+ return;
+ }
sendMessage(forwardDevice.uuid, msg.message, topic, check.uuid, check, [forwardDevice.uuid]);
});
}); | Add checks in the forwarder to only forward if the device exists | octoblu_meshblu | train |
73c36d2216aa6c878e948bb7df859c6b83762531 | diff --git a/examples/plot_fingerprint.py b/examples/plot_fingerprint.py
index <HASH>..<HASH> 100644
--- a/examples/plot_fingerprint.py
+++ b/examples/plot_fingerprint.py
@@ -22,9 +22,8 @@ import quail
#load data
egg = quail.load_example_data()
-# analysis (use parallel processing because this takes a while)
-analyzed_data = quail.analyze(egg, analysis='fingerprint', listgroup=['average']*16,
- parallel=True, permute=True, n_perms=1000)
+# analysis
+analyzed_data = quail.analyze(egg, analysis='fingerprint', listgroup=['average']*16)
# plot
quail.plot(analyzed_data, title='Memory Fingerprint') | taking parallel out of fingerprint example because it breaks when trying to regenerate readthedocs | ContextLab_quail | train |
36df1142849431a4d1545518ce7ced08f57560d8 | diff --git a/tests/cases/assetstore_test.py b/tests/cases/assetstore_test.py
index <HASH>..<HASH> 100644
--- a/tests/cases/assetstore_test.py
+++ b/tests/cases/assetstore_test.py
@@ -118,6 +118,21 @@ class AssetstoreTestCase(base.TestCase):
oldAssetstore = self.model('assetstore').load(oldAssetstore['_id'])
self.assertFalse(oldAssetstore['current'])
+ # List the assetstores
+ assetstoresBefore = self.model('assetstore').list()
+ # Now break the root of the new assetstore and make sure we can still
+ # list it
+ oldroot = assetstore['root']
+ assetstore['root'] = '///invalidpath'
+ self.model('assetstore').save(assetstore, validate=False)
+ assetstoresAfter = self.model('assetstore').list()
+ self.assertEqual(len(assetstoresBefore), len(assetstoresAfter))
+ self.assertIsNone([store for store in assetstoresAfter if store['_id']
+ == assetstore['_id']][0]['capacity']['free'])
+ # restore the original root
+ assetstore['root'] = oldroot
+ self.model('assetstore').save(assetstore, validate=False)
+
def testDeleteAssetstore(self):
resp = self.request(path='/assetstore', method='GET', user=self.admin)
self.assertStatusOk(resp) | Added a test for a filesystem assetstore with a bad root path. | girder_girder | train |
74f659ad5b2670dfdcf131044d9e7f6173a6effb | diff --git a/annis-service/src/main/java/annis/ql/parser/JoinListener.java b/annis-service/src/main/java/annis/ql/parser/JoinListener.java
index <HASH>..<HASH> 100644
--- a/annis-service/src/main/java/annis/ql/parser/JoinListener.java
+++ b/annis-service/src/main/java/annis/ql/parser/JoinListener.java
@@ -35,6 +35,7 @@ import annis.sqlgen.model.Precedence;
import annis.sqlgen.model.RightAlignment;
import annis.sqlgen.model.RightDominance;
import annis.sqlgen.model.RightOverlap;
+import annis.sqlgen.model.SameSpan;
import annis.sqlgen.model.Sibling;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
@@ -239,7 +240,7 @@ public class JoinListener extends AqlParserBaseListener
@Override
public void enterIdenticalCoverage(AqlParser.IdenticalCoverageContext ctx)
{
- join(ctx, Identical.class);
+ join(ctx, SameSpan.class);
}
@Override
diff --git a/annis-service/src/test/java/annis/CountTest.java b/annis-service/src/test/java/annis/CountTest.java
index <HASH>..<HASH> 100644
--- a/annis-service/src/test/java/annis/CountTest.java
+++ b/annis-service/src/test/java/annis/CountTest.java
@@ -126,6 +126,9 @@ public class CountTest
// that cover more than one token
assertEquals(2, countPcc2("NP & NP & NP & #1 . #2 & #2 . #3"));
+ // regression tests:
+ assertEquals(89, countPcc2("ambiguity & complex_np & #1 _=_ #2"));
+
}
@Test | bug from new parser: identical coverage was mapped to identical join, not samespan join | korpling_ANNIS | train |
10dba1ac15c4ee5957fbc03e9363319dac83e49e | diff --git a/utils/rollup-plugins/lib/devserver.js b/utils/rollup-plugins/lib/devserver.js
index <HASH>..<HASH> 100644
--- a/utils/rollup-plugins/lib/devserver.js
+++ b/utils/rollup-plugins/lib/devserver.js
@@ -15,6 +15,17 @@ const dev = !!process.env.ROLLUP_WATCH;
let server;
+/** Byte size units. Let's hope our requests never get above `kB` ;) */
+const units = ['B', 'kB', 'MB', 'GB', 'TB'];
+
+/**
+ * Convert bytes into a human readable form.
+ */
+function humanizeSize(bytes) {
+ const index = Math.floor(Math.log(bytes) / Math.log(1024));
+ return `${+(bytes / 1024 ** index).toFixed(2)} ${units[index]}`;
+}
+
/**
* Run a local development web server.
* @see https://github.com/lukeed/sirv/tree/master/packages/sirv#api
@@ -54,17 +65,34 @@ function devserver({
server = createServer(sirv(resolve(dir), sirvOpts));
+ // request logging middleware
server.on('request', (req, res) => {
- const { method, url } = req;
-
- if (
- url &&
- (url.endsWith('/') || url.endsWith('.html')) &&
- method === 'GET'
- ) {
- // console.log('@@REQ', req);
- console.log('@@HIT HIT');
- }
+ const start = process.hrtime();
+ const write = res.write.bind(res);
+ let byteLength = 0;
+
+ // monkey patch to calculate response byte size
+ res.write = function writeFn(data) {
+ if (data) byteLength += data.length;
+ // @ts-ignore
+ write(...arguments); // eslint-disable-line prefer-rest-params
+ };
+
+ req.once('end', () => {
+ const duration = process.hrtime(start);
+ const { method, originalUrl, url } = req;
+ const { statusCode } = res;
+ const timing = `${+(duration[1] / 1e6).toFixed(2)}ms`;
+ const color =
+ statusCode >= 400 ? 'red' : statusCode >= 300 ? 'yellow' : 'green'; // eslint-disable-line no-nested-ternary
+ const size = humanizeSize(byteLength);
+ const uri = originalUrl || url;
+ console.log(
+ `» ${timing} ${colors[color](
+ statusCode,
+ )} ${method} ${uri} ${colors.cyan(size)}`,
+ );
+ });
});
server.listen(port, err => { | Add request logging to devserver rollup plugin | WeAreGenki_minna-ui | train |
ce0d7ce512f983d66accd253aa16fb0543fa5c04 | diff --git a/kawala-common/src/main/java/com/kaching/platform/common/Errors.java b/kawala-common/src/main/java/com/kaching/platform/common/Errors.java
index <HASH>..<HASH> 100644
--- a/kawala-common/src/main/java/com/kaching/platform/common/Errors.java
+++ b/kawala-common/src/main/java/com/kaching/platform/common/Errors.java
@@ -13,9 +13,12 @@ package com.kaching.platform.common;
import static com.google.common.collect.Lists.newArrayList;
import static java.lang.String.format;
import static java.util.Collections.EMPTY_LIST;
+import static java.util.Collections.emptyList;
import java.util.List;
+import com.google.common.collect.ImmutableList;
+
/**
* Object helping with capturing and propagating errors.
*/
@@ -45,6 +48,13 @@ public class Errors {
return this;
}
+ public List<String> getMessages() {
+ if (messages == null) {
+ return emptyList();
+ }
+ return ImmutableList.copyOf(messages);
+ }
+
public void throwIfHasErrors() {
if (messages != null) {
throw new RuntimeException(toString()); | Add a way to get to the error messages. | wealthfront_kawala | train |
c5fecb14b3b39b9ba70922b914fd20bbb82523d5 | diff --git a/plaso/serializer/json_serializer.py b/plaso/serializer/json_serializer.py
index <HASH>..<HASH> 100644
--- a/plaso/serializer/json_serializer.py
+++ b/plaso/serializer/json_serializer.py
@@ -4,6 +4,7 @@
from __future__ import unicode_literals
import binascii
+import codecs
import collections
import json
@@ -81,9 +82,11 @@ class JSONAttributeContainerSerializer(interface.AttributeContainerSerializer):
* a list.
"""
if isinstance(attribute_value, py2to3.BYTES_TYPE):
+ encoded_value = binascii.b2a_qp(attribute_value)
+ encoded_value = codecs.decode(encoded_value, 'ascii')
attribute_value = {
'__type__': 'bytes',
- 'stream': '{0:s}'.format(binascii.b2a_qp(attribute_value))
+ 'stream': '{0:s}'.format(encoded_value)
}
elif isinstance(attribute_value, (list, tuple)): | Improved compatibility of serializer tests with Python 3 #<I> | log2timeline_plaso | train |
d15fa176cd28444e69a9e24dd7c49b061ab4b820 | diff --git a/core/commands/mount_windows.go b/core/commands/mount_windows.go
index <HASH>..<HASH> 100644
--- a/core/commands/mount_windows.go
+++ b/core/commands/mount_windows.go
@@ -3,9 +3,9 @@ package commands
import (
"errors"
- "gx/ipfs/QmadYQbq2fJpaRE3XhpMLH68NNxmWMwfMQy1ntr1cKf7eo/go-ipfs-cmdkit"
-
cmds "github.com/ipfs/go-ipfs/commands"
+
+ cmdkit "gx/ipfs/QmSNbH2A1evCCbJSDC6u3RV3GGDhgu6pRGbXHvrN89tMKf/go-ipfs-cmdkit"
)
var MountCmd = &cmds.Command{ | Fix orphaned gx dependency
License: MIT | ipfs_go-ipfs | train |
fbb10e00dab33051fafa2bbddd320d4fcb78ae0a | diff --git a/views/js/ui/usermgr.js b/views/js/ui/usermgr.js
index <HASH>..<HASH> 100644
--- a/views/js/ui/usermgr.js
+++ b/views/js/ui/usermgr.js
@@ -67,12 +67,12 @@ define([
for (var i = 0; i < response.records; i++) {
$edits.eq(i).click(function() {
- $editElt = $(this);
+ var $editElt = $(this);
options.edit.apply($editElt, [$editElt.parent().data('user-identifier')]);
});
$removes.eq(i).click(function() {
- $removeElt = $(this);
+ var $removeElt = $(this);
options.remove.apply($removeElt, [$removeElt.parent().data('user-identifier')]);
});
} | Again, missuse of 'use strict' | oat-sa_tao-core | train |
c7b2afb8c7fa56bd7627c2b10e43f39908fa0423 | diff --git a/src/modules/user/Module.php b/src/modules/user/Module.php
index <HASH>..<HASH> 100755
--- a/src/modules/user/Module.php
+++ b/src/modules/user/Module.php
@@ -2,6 +2,7 @@
namespace vps\tools\modules\user;
+ use vps\tools\helpers\ConfigurationHelper;
use vps\tools\modules\user\models\User;
use Yii;
use yii\base\BootstrapInterface;
@@ -111,18 +112,7 @@
],
], true);
- // Add module I18N category.
- if (!isset($app->i18n->translations[ 'user.*' ]))
- {
- Yii::$app->i18n->translations[ 'user*' ] = [
- 'class' => 'yii\i18n\PhpMessageSource',
- 'basePath' => __DIR__ . '/messages',
- 'forceTranslation' => true,
- 'fileMap' => [
- 'user' => 'user.php',
- ]
- ];
- }
+ ConfigurationHelper::addTranslation('user', [ 'user' => 'user.php' ], __DIR__ . '/messages');
$this->title = Yii::tr('User manage', [], 'user');
}
diff --git a/src/modules/user/controllers/UserController.php b/src/modules/user/controllers/UserController.php
index <HASH>..<HASH> 100755
--- a/src/modules/user/controllers/UserController.php
+++ b/src/modules/user/controllers/UserController.php
@@ -79,6 +79,7 @@
public function actionManage ()
{
+ $this->title = Yii::tr('User manage', [], 'user');
$this->_tpl = '@userViews/manage';
}
@@ -90,6 +91,7 @@
public function actionLogin ()
{
+ $this->title = Yii::tr('Login', [], 'user');
$this->_tpl = '@userViews/login';
$defaultClient = Yii::$app->settings->get('auth_client_default', $this->module->defaultClient);
$this->data('defaultClient', $defaultClient); | #<I> User manage i<I>n and some other translations. | pulsarvp_vps-tools | train |
8a1e6498378e79ad5b29a82e31af6b55b48d8698 | diff --git a/tests/EventTest.php b/tests/EventTest.php
index <HASH>..<HASH> 100644
--- a/tests/EventTest.php
+++ b/tests/EventTest.php
@@ -22,7 +22,7 @@ class EventTest extends PHPUnit_Framework_TestCase {
public function setUp()
{
$this->id = uniqid();
-
+
$this->defaultTimezone = date_default_timezone_get();
date_default_timezone_set('UTC');
}
@@ -127,11 +127,23 @@ class EventTest extends PHPUnit_Framework_TestCase {
*/
public function testWeekdayMethods()
{
+ $e = new Event($this->id, 'php qux');
+ $this->assertEquals('* * * * 2 *', $e->tuesdays()->getExpression());
+
+ $e = new Event($this->id, 'php flob');
+ $this->assertEquals('* * * * 3 *', $e->wednesdays()->getExpression());
+
$e = new Event($this->id, 'php foo');
$this->assertEquals('* * * * 4 *', $e->thursdays()->getExpression());
$e = new Event($this->id, 'php bar');
$this->assertEquals('* * * * 5 *', $e->fridays()->getExpression());
+
+ $e = new Event($this->id, 'php baz');
+ $this->assertEquals('* * * * 1-5 *', $e->weekdays()->getExpression());
+
+ $e = new Event($this->id, 'php bla');
+ $this->assertEquals('30 1 * * 2 *', $e->weeklyOn('2','01:30')->getExpression());
}
public function testCronLifeTime() | This implements tests for weekday methods that were lacking tests. This includes tests for Event->tuesdays(), Event->wednesdays(), Event->weekdays() and Event->weeklyOn() (#<I>) | lavary_crunz | train |
67a5327dc0c74da36711df64a42f1c0652bb1d7a | diff --git a/extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider2.java b/extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider2.java
index <HASH>..<HASH> 100644
--- a/extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider2.java
+++ b/extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider2.java
@@ -545,11 +545,8 @@ final class FactoryProvider2<F>
}
if (!anyAssistedInjectConstructors) {
- // If none existed, use @Inject.
+ // If none existed, use @Inject or a no-arg constructor.
try {
- // TODO(b/151482394): Change this to enforce that there is a @Inject annotated cosntructor
- // since it doesn't make sense to use assisted inject with a no-arg constructor, regardless
- // if the injector is configured to require @Inject annotation or not.
return InjectionPoint.forConstructorOf(implementation);
} catch (ConfigurationException e) {
errors.merge(e.getErrorMessages()); | Delete TODO to make `@Inject` required to use assisted inject.
Since it's actually possible to use assisted inject with field or method injection, an `@Inject` annotated constructor is not necessary.
This does mean that assisted injection does not respect the `binder().requireAtInjectOnConstructors()` requirement and it is non-trivial to propagate the injector option down to enforce this because the injection point is looked up at module configuration time.
-------------
Created by MOE: <URL> | google_guice | train |
95a1ff156fa1029ac7f6d8ae19937f5143a8839a | diff --git a/cruddy/__init__.py b/cruddy/__init__.py
index <HASH>..<HASH> 100644
--- a/cruddy/__init__.py
+++ b/cruddy/__init__.py
@@ -53,14 +53,20 @@ class CRUD(object):
table_name = kwargs['table_name']
profile_name = kwargs.get('profile_name')
region_name = kwargs.get('region_name')
+ placebo = kwargs.get('placebo')
+ placebo_dir = kwargs.get('placebo_dir')
self.required_attributes = kwargs.get('required_attributes', list())
self.supported_ops = kwargs.get('supported_ops', self.SupportedOps)
self.encrypted_attributes = kwargs.get('encrypted_attributes', list())
session = boto3.Session(profile_name=profile_name,
region_name=region_name)
+ if placebo and placebo_dir:
+ self.pill = placebo.attach(session, placebo_dir, debug=True)
+ else:
+ self.pill = None
ddb_resource = session.resource('dynamodb')
self.table = ddb_resource.Table(table_name)
- self.debug = kwargs.get('debug', False)
+ self._debug = kwargs.get('debug', False)
if self.encrypted_attributes:
self._kms_client = session.client('kms')
else:
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -10,14 +10,14 @@ requires = [
setup(
name='cruddy',
version='0.1.5',
- description='A CRUD wrapper class for AWS Lambda',
+ description='A CRUD wrapper class for Amazon DynamoDB',
long_description=open('README.md').read(),
author='Mitch Garnaat',
author_email='mitch@cloudnative.io',
url='https://github.com/cloudnative/cruddy',
packages=find_packages(exclude=['tests*']),
install_requires=requires,
- license=open("LICENSE").read(),
+ license="Apache License 2.0",
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers', | Fixing setup.py and a bug with debug flag. Add support for placebo recording. | Min-ops_cruddy | train |
2836809a500aac6d6b049fa8bddc6133e71a477d | diff --git a/lib/cli/version.rb b/lib/cli/version.rb
index <HASH>..<HASH> 100644
--- a/lib/cli/version.rb
+++ b/lib/cli/version.rb
@@ -14,6 +14,6 @@
# Cloud Foundry namespace
module CF
module UAA
- CLI_VERSION = "1.3.9"
+ CLI_VERSION = "2.0.0"
end
end | Bump to <I> (because uaa-lib did) | cloudfoundry_cf-uaac | train |
e7628b82e61897cc0fe0528574abb55b01475cd4 | diff --git a/src/toil/provisioners/clusterScaler.py b/src/toil/provisioners/clusterScaler.py
index <HASH>..<HASH> 100644
--- a/src/toil/provisioners/clusterScaler.py
+++ b/src/toil/provisioners/clusterScaler.py
@@ -19,7 +19,6 @@ from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import map
-from past.utils import old_div
from builtins import object
import json
import logging
@@ -98,9 +97,6 @@ class BinPackedFit(object):
def __init__(self, nodeShapes, targetTime=3600):
self.nodeShapes = nodeShapes
self.targetTime = targetTime
- # Prioritize preemptable node shapes with the lowest memory
- self.nodeShapes.sort(key=lambda nS: not nS.preemptable)
- self.nodeShapes.sort(key=lambda nS: nS.memory)
self.nodeReservations = {nodeShape:[] for nodeShape in nodeShapes} # The list of node reservations
def binPack(self, jobShapes): | Don't attempt to prioritize low memory node types
Just relies on the user to input the correct ordering. This allows us
to use i2's properly for Cactus. | DataBiosphere_toil | train |
ba15997ca7afd6e1192c759f4712115770283324 | diff --git a/lib/eventemitter2.js b/lib/eventemitter2.js
index <HASH>..<HASH> 100644
--- a/lib/eventemitter2.js
+++ b/lib/eventemitter2.js
@@ -486,6 +486,26 @@
}
}
+ function recursivelyGarbageCollect(root) {
+ if (root === undefined) {
+ return;
+ }
+ var keys = Object.keys(root);
+ for (var i in keys) {
+ var key = keys[i];
+ var obj = root[key];
+ if (obj instanceof Function)
+ continue;
+ if (Object.keys(obj).length > 0) {
+ recursivelyGarbageCollect(root[key]);
+ }
+ if (Object.keys(obj).length === 0) {
+ delete root[key];
+ }
+ }
+ }
+ recursivelyGarbageCollect(this.listenerTree);
+
return this;
}; | Fixed memory leak bug
Keys where left with empty objects instead of being entirely cleaned up,
when wildcards are enabled. The examples in #<I> results in a garbage
collected tree with this patch.
Fixes #<I> | EventEmitter2_EventEmitter2 | train |
3cb26934dc98ce786bd835c7e0c1f1a53d92d830 | diff --git a/components/MapPolygon.js b/components/MapPolygon.js
index <HASH>..<HASH> 100644
--- a/components/MapPolygon.js
+++ b/components/MapPolygon.js
@@ -89,6 +89,16 @@ var MapPolygon = React.createClass({
* @platform ios
*/
miterLimit: PropTypes.number,
+
+ /**
+ * Boolean to indicate whether to draw each segment of the line as a geodesic as opposed to
+ * straight lines on the Mercator projection. A geodesic is the shortest path between two
+ * points on the Earth's surface. The geodesic curve is constructed assuming the Earth is
+ * a sphere.
+ *
+ * @platform android
+ */
+ geodesic: PropTypes.bool,
},
getDefaultProps: function() {
diff --git a/components/MapPolyline.js b/components/MapPolyline.js
index <HASH>..<HASH> 100644
--- a/components/MapPolyline.js
+++ b/components/MapPolyline.js
@@ -84,6 +84,16 @@ var MapPolyline = React.createClass({
* @platform ios
*/
miterLimit: PropTypes.number,
+
+ /**
+ * Boolean to indicate whether to draw each segment of the line as a geodesic as opposed to
+ * straight lines on the Mercator projection. A geodesic is the shortest path between two
+ * points on the Earth's surface. The geodesic curve is constructed assuming the Earth is
+ * a sphere.
+ *
+ * @platform android
+ */
+ geodesic: PropTypes.bool,
},
getDefaultProps: function() { | Missing geodesic proptype declaration | react-native-community_react-native-maps | train |
b42fa6b9a98790605560eb4e95757b2729347e15 | diff --git a/angr/surveyors/slicecutor.py b/angr/surveyors/slicecutor.py
index <HASH>..<HASH> 100644
--- a/angr/surveyors/slicecutor.py
+++ b/angr/surveyors/slicecutor.py
@@ -186,7 +186,7 @@ class Slicecutor(Surveyor):
return (len(self.active) + len(self._merge_countdowns)) == 0
def _step_path(self, p): #pylint:disable=no-self-use
- p.step(stmt_whitelist=p.stmt_whitelist, last_stmt=p.last_stmt)
+ p.step(whitelist=p.stmt_whitelist, last_stmt=p.last_stmt)
def path_comparator(self, a, b):
if a.weighted_length != b.weighted_length: | Slicecutor: use "whitelist" instead of "stmt_whitelist" according to changes in SimuVEX. | angr_angr | train |
7e95dbbb4ea344b187e8441fe49464aa0705ae57 | diff --git a/code/libraries/koowa/libraries/object/stack.php b/code/libraries/koowa/libraries/object/stack.php
index <HASH>..<HASH> 100644
--- a/code/libraries/koowa/libraries/object/stack.php
+++ b/code/libraries/koowa/libraries/object/stack.php
@@ -9,13 +9,17 @@
/**
* Object Stack
- *
- * Implements a simple stack collection (LIFO)
+ *
+ * A stack is a data type or collection in which the principal (or only) operations on the collection are the addition
+ * of an object to the collection, known as push and removal of an entity, known as pop. The relation between the push
+ * and pop operations is such that the stack is a Last-In-First-Out (LIFO) data structure.
+ *
+ * @link http://en.wikipedia.org/wiki/Stack_(abstract_data_type)
*
* @author Johan Janssens <https://github.com/johanjanssens>
* @package Koowa\Library\Object
*/
-class KObjectStack extends KObject implements Countable
+class KObjectStack extends KObject implements Iterator, Countable, Serializable
{
/**
* The object container
@@ -42,7 +46,7 @@ class KObjectStack extends KObject implements Countable
*
* @return mixed The value of the top element
*/
- public function top()
+ public function peek()
{
return end($this->_object_stack);
}
@@ -50,16 +54,11 @@ class KObjectStack extends KObject implements Countable
/**
* Pushes an element at the end of the stack
*
- * @param KObject $object
- * @throws \InvalidArgumentException if the object doesn't extend from KObject
+ * @param mixed $object
* @return KObjectStack
*/
public function push($object)
{
- if(!$object instanceof KObject) {
- throw new InvalidArgumentException('Object needs to extend from KObject');
- }
-
$this->_object_stack[] = $object;
return $this;
}
@@ -72,10 +71,12 @@ class KObjectStack extends KObject implements Countable
public function pop()
{
return array_pop($this->_object_stack);
- }
-
+ }
+
/**
* Counts the number of elements
+ *
+ * Required by the Countable interface
*
* @return integer The number of elements
*/
@@ -85,6 +86,106 @@ class KObjectStack extends KObject implements Countable
}
/**
+ * Rewind the Iterator to the top
+ *
+ * Required by the Iterator interface
+ *
+ * @return object KObjectQueue
+ */
+ public function rewind()
+ {
+ reset($this->_object_stack);
+ return $this;
+ }
+
+ /**
+ * Check whether the stack contains more objects
+ *
+ * Required by the Iterator interface
+ *
+ * @return boolean
+ */
+ public function valid()
+ {
+ return !is_null(key($this->_object_stack));
+ }
+
+ /**
+ * Return current object index
+ *
+ * Required by the Iterator interface
+ *
+ * @return mixed
+ */
+ public function key()
+ {
+ return key($this->_object_stack);
+ }
+
+ /**
+ * Return current object pointed by the iterator
+ *
+ * Required by the Iterator interface
+ *
+ * @return mixed
+ */
+ public function current()
+ {
+ return $this->_object_stack[$this->key()];
+ }
+
+ /**
+ * Move to the next object
+ *
+ * Required by the Iterator interface
+ *
+ * @return mixed
+ */
+ public function next()
+ {
+ return next($this->_object_stack);
+ }
+
+ /**
+ * Serialize
+ *
+ * Required by the Serializable interface
+ *
+ * @return string
+ */
+ public function serialize()
+ {
+ return serialize($this->toArray());
+ }
+
+ /**
+ * Unserialize
+ *
+ * Required by the Serializable interface
+ *
+ * @param string $data
+ * @return void
+ */
+ public function unserialize($data)
+ {
+ $data = array_reverse(unserialize($data));
+
+ foreach ($data as $item) {
+ $this->push($item);
+ }
+ }
+
+ /**
+ * Serialize to an array representing the stack
+ *
+ * @return array
+ */
+ public function toArray()
+ {
+ return $this->_object_stack;
+ }
+
+ /**
* Check to see if the registry is empty
*
* @return boolean Return TRUE if the registry is empty, otherwise FALSE | re #<I> : Updated docblocks. KObjectStack now also implements the Iterator and Serializable interfaces. | timble_kodekit | train |
94987079a6f4ea45bc694a93b8e34299f23dea2d | diff --git a/pkg/webserver/webserver.go b/pkg/webserver/webserver.go
index <HASH>..<HASH> 100644
--- a/pkg/webserver/webserver.go
+++ b/pkg/webserver/webserver.go
@@ -262,10 +262,12 @@ func (s *Server) Serve() {
}
// Signals the test harness that we've started listening.
-// TODO: write back the port number that we randomly selected?
-// For now just writes back a single byte.
+// Writes back the address that we randomly selected.
func runTestHarnessIntegration(listener net.Listener) {
addr := os.Getenv("CAMLI_SET_BASE_URL_AND_SEND_ADDR_TO")
+ if addr == "" {
+ return
+ }
c, err := net.Dial("tcp", addr)
if err == nil {
fmt.Fprintf(c, "%s\n", listener.Addr()) | runTestHarnessIntegration: Remove TODO, already implemented (#<I>) | perkeep_perkeep | train |
18ef9106a9916b3eb8b77c91df8363ca9d572bac | diff --git a/lib/term.js b/lib/term.js
index <HASH>..<HASH> 100644
--- a/lib/term.js
+++ b/lib/term.js
@@ -1336,15 +1336,14 @@ Term.prototype.without = function() {
};
Term.prototype.merge = function(arg) {
- if (this._fastArity(arguments.length, 1) === false) {
- var _len = arguments.length; var _args = new Array(_len);
- for (var _i = 0; _i < _len; _i++) {_args[_i] = arguments[_i];}
- this._arity(_args, 1, 'merge', this);
- }
+ var _len = arguments.length;var _args = new Array(_len); for(var _i = 0; _i < _len; _i++) {_args[_i] = arguments[_i];}
+ this._arityRange(_args, 1, Infinity, 'merge', this);
var term = new Term(this._r);
term._query.push(termTypes.MERGE);
- var args = [this, new Term(this._r).expr(arg)._wrap()];
+ var args = [this];
+ for (var i = 0; i < _len; ++i)
+ args.push(new Term(this._r).expr(_args[i])._wrap());
term._fillArgs(args);
return term;
};
diff --git a/tests/document_manipulation.test.js b/tests/document_manipulation.test.js
index <HASH>..<HASH> 100644
--- a/tests/document_manipulation.test.js
+++ b/tests/document_manipulation.test.js
@@ -86,14 +86,16 @@ describe('Document Manipulation', function() {
r.expr({a: 0}).merge({b: 1}),
r.expr([{a: 0}, {a: 1}, {a: 2}]).merge({b: 1}),
r.expr({a: 0, c: {l: 'tt'}}).merge({b: {c: {d: {e: 'fff'}}, k: 'pp'}}),
- r.expr({a: 1}).merge({date: r.now()})
+ r.expr({a: 1}).merge({date: r.now()}),
+ r.expr({a: 1}).merge({nested: r.row}, {b: 2})
])
- .spread(function(r1, r2, r3, r4) {
+ .spread(function(r1, r2, r3, r4, r5) {
expect(r1).to.eql({a: 0, b: 1});
expect(r2).to.eql([{a: 0, b: 1}, {a: 1, b: 1}, {a: 2, b: 1}]);
expect(r3).to.eql({a: 0, b: {c: {d: {e: 'fff'}}, k: 'pp'}, c: {l:'tt'}});
expect(r4.a).to.eql(1);
expect(r4.date).to.be.instanceOf(Date);
+ expect(r5).to.eql({ a: 1, nested: { a: 1 }, b: 2 });
});
});
@@ -110,7 +112,7 @@ describe('Document Manipulation', function() {
it('should throw if no arguments are passed', function() {
var invalid = function() { return test.table.merge(); };
- expect(invalid).to.throw(/`merge` takes 1 argument, 0 provided/);
+ expect(invalid).to.throw(/`merge` takes at least 1 argument, 0 provided/);
});
}); | refactor(merge): support merge with multiple terms | mbroadst_rethunk | train |
8671461bf134285d47d7cf126b2b893c7bbdc94d | diff --git a/src/js/base/editing/History.js b/src/js/base/editing/History.js
index <HASH>..<HASH> 100644
--- a/src/js/base/editing/History.js
+++ b/src/js/base/editing/History.js
@@ -46,6 +46,21 @@ export default class History {
}
/**
+ * @method commit
+ * Resets history stack, but keeps current editor's content.
+ */
+ commit() {
+ // Clear the stack.
+ this.stack = [];
+
+ // Restore stackOffset to its original value.
+ this.stackOffset = -1;
+
+ // Record our first snapshot (of nothing).
+ this.recordUndo();
+ }
+
+ /**
* @method reset
* Resets the history stack completely; reverting to an empty editor.
*/
diff --git a/src/js/base/module/Editor.js b/src/js/base/module/Editor.js
index <HASH>..<HASH> 100644
--- a/src/js/base/module/Editor.js
+++ b/src/js/base/module/Editor.js
@@ -509,6 +509,15 @@ export default class Editor {
this.context.triggerEvent('change', this.$editable.html());
}
+ /*
+ * commit
+ */
+ commit() {
+ this.context.triggerEvent('before.command', this.$editable.html());
+ this.history.commit();
+ this.context.triggerEvent('change', this.$editable.html());
+ }
+
/**
* redo
*/ | Implemented commit command (resets history but keeps editor content). | summernote_summernote | train |
fdcff823e5e84b009744c0dfe66b7953df13d9bf | diff --git a/salt/pillar/django_orm.py b/salt/pillar/django_orm.py
index <HASH>..<HASH> 100644
--- a/salt/pillar/django_orm.py
+++ b/salt/pillar/django_orm.py
@@ -221,7 +221,7 @@ def ext_pillar(minion_id,
# Check that the human-friendly name given is valid (will
# be able to pick up a value from the query) and unique
# (since we're using it as the key in a dictionary)
- if not name_field in model:
+ if name_field not in model:
raise salt.exceptions.SaltException(
"Name '{0}' not found in returned fields.".format(
name_field)) | Fix PEP8 E<I> - test for membership should be "not in" | saltstack_salt | train |
5073b526a3f107873f305657b5214cbdf263523c | diff --git a/lib/base/warden.rb b/lib/base/warden.rb
index <HASH>..<HASH> 100644
--- a/lib/base/warden.rb
+++ b/lib/base/warden.rb
@@ -57,7 +57,7 @@ module VCAP::Services::Base::Warden
end
def container_running?(handle)
- handle != "" && container_info(handle)
+ handle != "" && container_info(handle) != nil
end
def container_run_command(handle, cmd, is_privileged=false) | "container_running" need return true rather than the result of "container_info"
Change-Id: I4cf<I>cd9a5d<I>ecf<I>e<I>c2bad<I> | cloudfoundry-attic_vcap-services-base | train |
b573971e1186c937db21b6ad294b6f2717dc5568 | diff --git a/cnxarchive/sql/get-module-metadata.sql b/cnxarchive/sql/get-module-metadata.sql
index <HASH>..<HASH> 100644
--- a/cnxarchive/sql/get-module-metadata.sql
+++ b/cnxarchive/sql/get-module-metadata.sql
@@ -12,7 +12,7 @@ FROM (SELECT
m.created as created, m.revised as revised,
m.stateid, m.doctype,
l.url AS license,
- m.submitter, m.submitlog, m.portal_type as type,
+ m.submitter, m.submitlog, m.portal_type as "mediaType",
a.abstract,
p.uuid AS "parentId", p.version AS "parentVersion",
m.authors as authors, m.licensors as licensors, m.maintainers as maintainers,
diff --git a/cnxarchive/tests.py b/cnxarchive/tests.py
index <HASH>..<HASH> 100644
--- a/cnxarchive/tests.py
+++ b/cnxarchive/tests.py
@@ -43,7 +43,7 @@ COLLECTION_METADATA = {
u'stateid': None,
u'submitlog': u'',
u'submitter': u'',
- u'type': u'Collection',
+ u'mediaType': u'application/vnd.org.cnx.collection',
u'version': u'1.7',
}
MODULE_METADATA = {
@@ -66,7 +66,7 @@ MODULE_METADATA = {
u'stateid': None,
u'submitlog': u'',
u'submitter': u'',
- u'type': u'Module',
+ u'mediaType': u'application/vnd.org.cnx.module',
u'version': u'1.8',
}
diff --git a/cnxarchive/utils.py b/cnxarchive/utils.py
index <HASH>..<HASH> 100644
--- a/cnxarchive/utils.py
+++ b/cnxarchive/utils.py
@@ -77,3 +77,17 @@ def template_to_regex(template):
regex += re.escape(template[last_pos:])
regex = '^%s$' % regex
return regex
+
+
+MODULE_MIMETYPE = 'application/vnd.org.cnx.module'
+COLLECTION_MIMETYPE = 'application/vnd.org.cnx.collection'
+FOLDER_MIMETYPE = 'application/vnd.org.cnx.folder'
+MIMETYPES = (MODULE_MIMETYPE, COLLECTION_MIMETYPE, FOLDER_MIMETYPE,)
+PORTALTYPE_TO_MIMETYPE_MAPPING = {
+ 'Module': MODULE_MIMETYPE,
+ 'Collection': COLLECTION_MIMETYPE,
+ }
+
+def portaltype_to_mimetype(portal_type):
+ """Map the given ``portal_type`` to a mimetype"""
+ return PORTALTYPE_TO_MIMETYPE_MAPPING[portal_type]
diff --git a/cnxarchive/views.py b/cnxarchive/views.py
index <HASH>..<HASH> 100644
--- a/cnxarchive/views.py
+++ b/cnxarchive/views.py
@@ -11,7 +11,7 @@ import psycopg2
from . import get_settings
from . import httpexceptions
-from .utils import split_ident_hash
+from .utils import split_ident_hash, portaltype_to_mimetype
from .database import CONNECTION_SETTINGS_KEY, SQL
@@ -32,7 +32,9 @@ def get_content(environ, start_response):
result = cursor.fetchone()[0]
except (TypeError, IndexError,): # None returned
raise httpexceptions.HTTPNotFound()
- if result['type'] == 'Collection':
+ # FIXME The 'mediaType' value will be changing to mimetypes
+ # in the near future.
+ if result['mediaType'] == 'Collection':
# Grab the collection tree.
result['tree'] = None # TODO
else:
@@ -45,6 +47,12 @@ def get_content(environ, start_response):
raise httpexceptions.HTTPNotFound()
result['content'] = content[:]
+ # FIXME We currently have legacy 'portal_type' names in the database.
+ # Future upgrades should replace the portal type with a mimetype
+ # of 'application/vnd.org.cnx.(module|collection|folder|<etc>)'.
+ # Until then we will do the replacement here.
+ result['mediaType'] = portaltype_to_mimetype(result['mediaType'])
+
result = json.dumps(result)
status = "200 OK"
headers = [('Content-type', 'application/json',)] | change the portal_type value to a mimetype and rename the resulting field to mediaType. | openstax_cnx-archive | train |
ba983e608bcc096219e973f91cb520cc51a3f756 | diff --git a/library/src/main/java/com/karumi/headerrecyclerview/HeaderRecyclerViewAdapter.java b/library/src/main/java/com/karumi/headerrecyclerview/HeaderRecyclerViewAdapter.java
index <HASH>..<HASH> 100644
--- a/library/src/main/java/com/karumi/headerrecyclerview/HeaderRecyclerViewAdapter.java
+++ b/library/src/main/java/com/karumi/headerrecyclerview/HeaderRecyclerViewAdapter.java
@@ -138,10 +138,12 @@ public abstract class HeaderRecyclerViewAdapter<VH extends RecyclerView.ViewHold
public void showFooter() {
this.showFooter = true;
+ notifyDataSetChanged();
}
public void hideFooter() {
this.showFooter = false;
+ notifyDataSetChanged();
}
protected abstract VH onCreateHeaderViewHolder(ViewGroup parent, int viewType); | Add notifyDataSetChanged call to show/hide footer | Karumi_HeaderRecyclerView | train |
95e7641faa78784d5f71eb2c9150b4bb9510e91e | diff --git a/agent/proxycfg/state.go b/agent/proxycfg/state.go
index <HASH>..<HASH> 100644
--- a/agent/proxycfg/state.go
+++ b/agent/proxycfg/state.go
@@ -583,8 +583,6 @@ func (s *state) initialConfigSnapshot() ConfigSnapshot {
}
func (s *state) run() {
- logger := s.logger.Named(logging.ProxyConfig)
-
// Close the channel we return from Watch when we stop so consumers can stop
// watching and clean up their goroutines. It's important we do this here and
// not in Close since this routine sends on this chan and so might panic if it
@@ -605,12 +603,10 @@ func (s *state) run() {
case <-s.ctx.Done():
return
case u := <-s.ch:
- logger.Trace("A blocking query returned; handling snapshot update",
- "proxy-id", s.proxyID.String(),
- )
+ s.logger.Trace("A blocking query returned; handling snapshot update")
if err := s.handleUpdate(u, &snap); err != nil {
- logger.Error("Failed to handle update from watch",
+ s.logger.Error("Failed to handle update from watch",
"id", u.CorrelationID, "error", err,
)
continue
@@ -621,8 +617,8 @@ func (s *state) run() {
// etc on future updates.
snapCopy, err := snap.Clone()
if err != nil {
- logger.Error("Failed to copy config snapshot for proxy",
- "proxy-id", s.proxyID.String(), "error", err,
+ s.logger.Error("Failed to copy config snapshot for proxy",
+ "error", err,
)
continue
}
@@ -630,15 +626,11 @@ func (s *state) run() {
select {
// try to send
case s.snapCh <- *snapCopy:
- logger.Trace("Delivered new snapshot to proxy config watchers",
- "proxy-id", s.proxyID.String(),
- )
+ s.logger.Trace("Delivered new snapshot to proxy config watchers")
// avoid blocking if a snapshot is already buffered
default:
- logger.Trace("Failed to deliver new snapshot to proxy config watchers",
- "proxy-id", s.proxyID.String(),
- )
+ s.logger.Trace("Failed to deliver new snapshot to proxy config watchers")
}
// Allow the next change to trigger a send
@@ -649,25 +641,21 @@ func (s *state) run() {
continue
case replyCh := <-s.reqCh:
- logger.Trace("A proxy config snapshot was requested",
- "proxy-id", s.proxyID.String(),
- )
+ s.logger.Trace("A proxy config snapshot was requested")
if !snap.Valid() {
// Not valid yet just respond with nil and move on to next task.
replyCh <- nil
- logger.Trace("The proxy's config snapshot is not valid yet",
- "proxy-id", s.proxyID.String(),
- )
+ s.logger.Trace("The proxy's config snapshot is not valid yet")
continue
}
// Make a deep copy of snap so we don't mutate any of the embedded structs
// etc on future updates.
snapCopy, err := snap.Clone()
if err != nil {
- logger.Error("Failed to copy config snapshot for proxy",
- "proxy-id", s.proxyID.String(), "error", err,
+ s.logger.Error("Failed to copy config snapshot for proxy",
+ "error", err,
)
continue
} | Update proxycfg logging, labels were already attached | hashicorp_consul | train |
bca0848179b19ee9aaa91f4c3af13bd79aa65d3d | diff --git a/pgi/signals.py b/pgi/signals.py
index <HASH>..<HASH> 100644
--- a/pgi/signals.py
+++ b/pgi/signals.py
@@ -15,19 +15,30 @@ from .gtype import PGType
class GSignal(object):
- def __init__(self, signal_id):
+ def __init__(self, info, signal_id):
self._id = signal_id
- self._func = None
+ self._info = info
@property
- def __doc__(self):
- if self._func:
- return self._func.__doc__
+ def _func(self):
+ try:
+ sig_info = self._info.find_signal(self.name)
+ except AttributeError:
+ # older libgirepository
+ sig_info = None
+
+ if sig_info:
+ return generate_callback(sig_info)
else:
- # We only expose signals for types in the typelib atm
- # but when we expose others like in pygobject we might want
- # to create a docstring here from the signal query info
- return ""
+ # FIXME: either too old libgirepository or signal
+ # that is not in the typelib.
+ f = lambda: None
+ f.__doc__ = "%s()" % self.name.replace("-", "_")
+ return f
+
+ @property
+ def __doc__(self):
+ return self._func.__doc__
def __call__(self, *args, **kwargs):
assert self._func
@@ -89,18 +100,8 @@ class _GSignalQuery(object):
gtype.class_unref(klass)
for id_ in sig_ids:
- sig = GSignal(id_)
- name = sig.name
-
- try:
- sig_info = info.find_signal(name)
- except AttributeError:
- # older libgirepository
- sig_info = None
-
- if sig_info:
- sig._func = generate_callback(sig_info)
- setattr(self, escape_parameter(name), sig)
+ sig = GSignal(info, id_)
+ setattr(self, escape_parameter(sig.name), sig)
_GSignalQuery.__name__ = "GSignalQuery" | signal docs: provide a dummy function in case the signal isn't in the typelib for now | pygobject_pgi | train |
579fa50da52ce922d061cc6f3ae9250d115ffb4f | diff --git a/tests/test_send_self_env.py b/tests/test_send_self_env.py
index <HASH>..<HASH> 100644
--- a/tests/test_send_self_env.py
+++ b/tests/test_send_self_env.py
@@ -167,6 +167,8 @@ class TestSendSelfEnvironment(object):
(ValueError, test_yield_parameter, [], {}),
(ValueError, lambda x: x ** 2, [], {}),
(ValueError, type, [], {}),
+ # "both" args
+ (TypeError, None, [type, 1], {}),
# send_self args
(TypeError, None, [1], {}),
(TypeError, None, ["str"], {}),
@@ -174,6 +176,10 @@ class TestSendSelfEnvironment(object):
(TypeError, None, [], {'finalize_callback': 1}),
(TypeError, None, [], {'finalize_callback': False}),
(TypeError, None, [], {'debug': 1}),
+ # "delayed" func
+ (TypeError, type, [], {'catch_stopiteration': 1}),
+ (ValueError, type, [], {'catch_stopiteration': True}),
+ (RuntimeError, 1, [], {'catch_stopiteration': True}),
]
)
def test_bad_arguments(self, error, func, args, kwargs): | Add tests for new exception code from class rework
See dd5d<I>a7e7d4c<I>ab<I>a<I>f9b<I>.
Now <I>% test coverage. | FichteFoll_resumeback | train |
70fc9535d2fdbd00508b357323a161f8b41eb026 | diff --git a/src/Collection.php b/src/Collection.php
index <HASH>..<HASH> 100644
--- a/src/Collection.php
+++ b/src/Collection.php
@@ -422,6 +422,7 @@ class Collection
* The document to return may be null.
*
* @see FindOneAndDelete::__construct() for supported options
+ * @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @param array|object $filter Query by which to filter documents
* @param array $options Command options
* @return object|null
@@ -443,6 +444,7 @@ class Collection
* "returnDocument" option to return the updated document.
*
* @see FindOneAndReplace::__construct() for supported options
+ * @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @param array|object $filter Query by which to filter documents
* @param array|object $replacement Replacement document
* @param array $options Command options
@@ -465,6 +467,7 @@ class Collection
* "returnDocument" option to return the updated document.
*
* @see FindOneAndReplace::__construct() for supported options
+ * @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @param array|object $filter Query by which to filter documents
* @param array|object $update Update to apply to the matched document
* @param array $options Command options | Refer to findAndModify docs in related Collection methods | mongodb_mongo-php-library | train |
f916406adc9003756a1b370f5d7df0967e747b9e | diff --git a/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java b/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
index <HASH>..<HASH> 100644
--- a/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
+++ b/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
@@ -941,6 +941,11 @@ class ConsensusModuleAgent implements Agent, MemberStatusListener
return RecordingPos.getRecordingId(aeron.countersReader(), appendedPosition.counterId());
}
+ long logStopPosition(final long leadershipTermId)
+ {
+ return recordingLog.getTermEntry(leadershipTermId).logPosition;
+ }
+
void truncateLogEntryAndAbort(final long leadershipTermId, final long logPosition)
{
// TODO: this is brutal. Need to handle the service.
diff --git a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
index <HASH>..<HASH> 100644
--- a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
+++ b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
@@ -236,7 +236,20 @@ class Election implements AutoCloseable
if (State.LEADER_READY == state && logLeadershipTermId < leadershipTermId)
{
- publishNewLeadershipTerm(clusterMembers[followerMemberId].publication());
+ if (this.logLeadershipTermId == logLeadershipTermId)
+ {
+ publishNewLeadershipTerm(clusterMembers[followerMemberId].publication());
+ }
+ else
+ {
+ memberStatusPublisher.newLeadershipTerm(
+ clusterMembers[followerMemberId].publication(),
+ logLeadershipTermId,
+ consensusModuleAgent.logStopPosition(logLeadershipTermId),
+ logLeadershipTermId + 1,
+ thisMember.id(),
+ logSessionId);
+ }
}
else if (State.CANVASS != state && logLeadershipTermId > leadershipTermId)
{
@@ -321,9 +334,13 @@ class Election implements AutoCloseable
}
else if (0 != compareLog(this.logLeadershipTermId, this.logPosition, logLeadershipTermId, logPosition))
{
- if (this.logLeadershipTermId < logLeadershipTermId)
+ if (this.logPosition > logPosition && this.logLeadershipTermId == logLeadershipTermId)
+ {
+ consensusModuleAgent.truncateLogEntryAndAbort(logLeadershipTermId, logPosition);
+ }
+ else if (this.logLeadershipTermId < logLeadershipTermId)
{
- this.leadershipTermId = this.logLeadershipTermId;
+ this.leadershipTermId = leadershipTermId;
this.candidateTermId = NULL_VALUE;
leaderMember = clusterMembers[leaderMemberId];
this.logSessionId = logSessionId;
@@ -332,8 +349,6 @@ class Election implements AutoCloseable
state(State.FOLLOWER_CATCHUP_TRANSITION, ctx.epochClock().time());
}
-
- // TODO: state may be out of step which requires log truncation and recovery.
}
}
@@ -350,9 +365,18 @@ class Election implements AutoCloseable
void onCommitPosition(final long leadershipTermId, final long logPosition, final int leaderMemberId)
{
- if (leadershipTermId > this.leadershipTermId)
+ if (State.FOLLOWER_BALLOT == state && leadershipTermId > this.leadershipTermId)
{
- // TODO: query leader recording log and catch up
+ if (this.logPosition > logPosition)
+ {
+ consensusModuleAgent.truncateLogEntryAndAbort(logLeadershipTermId, logPosition);
+ }
+ else
+ {
+ catchupLogPosition = logPosition;
+
+ state(State.FOLLOWER_CATCHUP_TRANSITION, ctx.epochClock().time());
+ }
}
}
@@ -725,18 +749,6 @@ class Election implements AutoCloseable
leaderMember.publication(), leadershipTermId, logPosition, thisMember.id());
}
- private void ensureSubscriptionsCreated()
- {
- if (null == logSubscription)
- {
- final ChannelUri logChannelUri = followerLogChannel(ctx.logChannel(), logSessionId);
-
- logSubscription = consensusModuleAgent.createAndRecordLogSubscriptionAsFollower(
- logChannelUri.toString(), logPosition);
- consensusModuleAgent.awaitServicesReady(logChannelUri, logSessionId);
- }
- }
-
private void addLiveLogDestination()
{
consensusModuleAgent.updateMemberDetails(); | [Java]: fill out some handling of out of sync members. | real-logic_aeron | train |
51f07b3c4b83085ebb20fd5758292e58b6286514 | diff --git a/dcard/forums.py b/dcard/forums.py
index <HASH>..<HASH> 100644
--- a/dcard/forums.py
+++ b/dcard/forums.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
import logging
try:
from dcard import api | Magic comments for utf8 characters in file | leVirve_dcard-spider | train |
d056679d6be96ac152f57e3df3c6ff1a93d1c487 | diff --git a/lib/steam/servers/GameServer.php b/lib/steam/servers/GameServer.php
index <HASH>..<HASH> 100644
--- a/lib/steam/servers/GameServer.php
+++ b/lib/steam/servers/GameServer.php
@@ -279,7 +279,7 @@ abstract class GameServer extends Server {
switch($requestType) {
case self::REQUEST_CHALLENGE:
$expectedResponse = 'S2C_CHALLENGE_Packet';
- $requestPacket = new A2S_SERVERQUERY_GETCHALLENGE_Packet();
+ $requestPacket = new A2S_PLAYER_Packet();
break;
case self::REQUEST_INFO:
$expectedResponse = 'S2A_INFO_BasePacket'; | Use A2S_PLAYER packets to get a challenge number
This fixes the bug mentioned in koraktor/steam-condenser#<I>. | koraktor_steam-condenser-php | train |
1e20ed7de934a3f83a37ba8fe53633a4304867d6 | diff --git a/crane/crane.go b/crane/crane.go
index <HASH>..<HASH> 100644
--- a/crane/crane.go
+++ b/crane/crane.go
@@ -6,6 +6,7 @@ import (
"github.com/michaelsauter/crane/print"
"os"
"os/exec"
+ "strconv"
"strings"
"syscall"
)
@@ -15,6 +16,11 @@ type StatusError struct {
status int
}
+var (
+ minimalDockerVersion = []int{1, 0}
+ recommendedDockerVersion = []int{1, 2}
+)
+
func RealMain() {
// On panic, recover the error, display it and return the given status code if any
defer func() {
@@ -36,10 +42,57 @@ func RealMain() {
}
os.Exit(statusError.status)
}()
-
+ checkDockerClient()
handleCmd()
}
+// Ensure there is a docker binary in the path, panicking if its version
+// is below the minimal requirement, and printing a warning if its version
+// is below the recommended requirement.
+func checkDockerClient() {
+ dockerCmd := []string{"docker", "--version"}
+ sedCmd := []string{"sed", "-e", "s/[^0-9]*\\([0-9.]\\+\\).*/\\1/"}
+ output, err := pipedCommandOutput(dockerCmd, sedCmd)
+ if err != nil {
+ panic(StatusError{errors.New("Error when probing Docker's client version. Is docker installed and within the $PATH?"), 69})
+ }
+ rawVersions := strings.Split(string(output), ".")[:2]
+ var versions []int
+ for _, rawVersion := range rawVersions {
+ version, err := strconv.Atoi(rawVersion)
+ if err != nil {
+ panic(StatusError{fmt.Errorf("Error when parsing Docker's version %v: %v", rawVersion, err), 69})
+ }
+ versions = append(versions, version)
+ }
+ for i, expectedVersion := range minimalDockerVersion {
+ if versions[i] > expectedVersion {
+ break
+ }
+ if versions[i] < expectedVersion {
+ panic(StatusError{fmt.Errorf("Unsupported client version. Please upgrade to Docker %v or later.", intJoin(recommendedDockerVersion, ".")), 69})
+ }
+ }
+ for i, expectedVersion := range recommendedDockerVersion {
+ if versions[i] > expectedVersion {
+ break
+ }
+ if versions[i] < expectedVersion {
+ print.Noticef("WARNING: outdated Docker client, behavior might not be optimal. Please upgrade to Docker %v or later.\n", intJoin(recommendedDockerVersion, "."))
+ break
+ }
+ }
+}
+
+// Similar to strings.Join() for int slices.
+func intJoin(intSlice []int, sep string) string {
+ var stringSlice []string
+ for _, v := range intSlice {
+ stringSlice = append(stringSlice, fmt.Sprint(v))
+ }
+ return strings.Join(stringSlice, ".")
+}
+
func executeCommand(name string, args []string) {
if isVerbose() {
fmt.Printf("\n--> %s %s\n", name, strings.Join(args, " ")) | introduce minimal/recommended docker client version check | michaelsauter_crane | train |
d5935d9496c595431335d7f593d52f00012e73ff | diff --git a/pydivert/models.py b/pydivert/models.py
index <HASH>..<HASH> 100644
--- a/pydivert/models.py
+++ b/pydivert/models.py
@@ -339,7 +339,7 @@ class CapturedPacket(object):
if len(headers) > 2:
raise ValueError("No more than 2 headers (tcp/udp/icmp over ip) are supported")
- self.payload = payload
+ self._payload = payload
self._raw_packet = raw_packet
self.meta = meta
@@ -365,6 +365,21 @@ class CapturedPacket(object):
break
@property
+ def payload(self):
+ return self._payload
+
+ @payload.setter
+ def payload(self, payload):
+ header = self.headers[0]
+ if self._payload:
+ #recalculate length (current length - (current_payload_length - new_payload_length)))
+ header.Length = socket.ntohs(socket.htons(header.Length) - (len(self._payload) - len(payload)))
+ else:
+ #payload was empty: current_length + new_payload_length
+ header.Length = socket.ntohs(socket.htons(header.Length) + len(payload))
+ self._payload = payload
+
+ @property
def address_family(self):
for v6hdr in ("ipv6_hdr", "icmpv6_hdr"):
if getattr(self, v6hdr):
diff --git a/pydivert/tests/__init__.py b/pydivert/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/pydivert/tests/__init__.py
+++ b/pydivert/tests/__init__.py
@@ -146,6 +146,7 @@ def run_test_suites():
continue
with open(os.devnull, 'wb') as devnull:
+ print("Preparing test environment for WinDivert version %s" % version)
subprocess.call(['sc', 'stop', 'WinDivert%s' % version], stdout=devnull, stderr=devnull)
subprocess.call(['sc', 'delete', 'WinDivert%s' % version], stdout=devnull, stderr=devnull)
diff --git a/pydivert/tests/test_windivert.py b/pydivert/tests/test_windivert.py
index <HASH>..<HASH> 100644
--- a/pydivert/tests/test_windivert.py
+++ b/pydivert/tests/test_windivert.py
@@ -393,7 +393,7 @@ class WinDivertTCPDataCaptureTestCase(BaseTestCase):
def tearDown(self):
try:
self.handle.close()
- except:
+ except Exception as e:
pass
self.server.shutdown()
self.server.server_close()
@@ -428,6 +428,7 @@ class WinDivertTCPIPv4TestCase(BaseTestCase):
packet = handle.receive()
self.assertEqual(packet.tcp_hdr.Syn, 1)
self.assertEqual(hexlify(packet.tcp_hdr.Options), b"0204ffd70103030801010402")
+ client_thread.join(timeout=10)
def test_modify_tcp_payload(self):
""" | Addressed issue #4 . Changing payload would not change ipv4/ipv6 header length field. Maybe is a check enforced by the driver in version <I>. | ffalcinelli_pydivert | train |
2e546bb05b1b5986a1147b5c194d694c6dd5cb60 | diff --git a/src/Embera/Provider/ProviderAdapter.php b/src/Embera/Provider/ProviderAdapter.php
index <HASH>..<HASH> 100755
--- a/src/Embera/Provider/ProviderAdapter.php
+++ b/src/Embera/Provider/ProviderAdapter.php
@@ -115,7 +115,7 @@ abstract class ProviderAdapter
/** inline {@inheritdoc} */
public static function getHosts()
{
- return self::$hosts;
+ return static::$hosts;
}
}
diff --git a/src/Embera/ProviderCollection/DefaultProviderCollection.php b/src/Embera/ProviderCollection/DefaultProviderCollection.php
index <HASH>..<HASH> 100755
--- a/src/Embera/ProviderCollection/DefaultProviderCollection.php
+++ b/src/Embera/ProviderCollection/DefaultProviderCollection.php
@@ -13,6 +13,18 @@
namespace Embera\ProviderCollection;
/**
- * Basically its a wrapper for the ProvidercollectionAdapter.
+ * Basically its a wrapper for the ProvidercollectionAdapter but defines the
+ * default providers supported by the library
*/
-class defaultProviderCollection extends ProviderCollectionAdapter { }
+class defaultProviderCollection extends ProviderCollectionAdapter
+{
+ /** inline {@inheritdoc} */
+ public function __construct(array $config = [])
+ {
+ parent::__construct($config);
+ $this->registerProvider([
+ 'TwentyThreeHq',
+ 'Youtube',
+ ]);
+ }
+}
diff --git a/src/Embera/ProviderCollection/ProviderCollectionAdapter.php b/src/Embera/ProviderCollection/ProviderCollectionAdapter.php
index <HASH>..<HASH> 100755
--- a/src/Embera/ProviderCollection/ProviderCollectionAdapter.php
+++ b/src/Embera/ProviderCollection/ProviderCollectionAdapter.php
@@ -31,11 +31,7 @@ abstract class ProviderCollectionAdapter implements ProviderCollectionInterface
protected $wildCardHosts = [];
/** @var array Massive array with the mapping of host -> provider relation. */
- protected $providers = [
- 'm.youtube.com' => 'Youtube',
- 'youtube.com' => 'Youtube',
- 'youtu.be' => 'Youtube',
- ];
+ protected $providers = [];
/** Alias for the setConfig method */
public function __construct(array $config = [])
@@ -97,6 +93,20 @@ abstract class ProviderCollectionAdapter implements ProviderCollectionInterface
$this->providers = $list;
}
+ /** inline {@inheritdoc} */
+ public function registerProvider($names, $prefix = true)
+ {
+ foreach ((array) $names as $name) {
+ if ($prefix) {
+ $name = 'Embera\Provider\\' . $name;
+ }
+ $hosts = $name::getHosts();
+ foreach ($hosts as $h) {
+ $this->providers[$h] = $name;
+ }
+ }
+ }
+
/**
* Extract the urls from a given text or array
*
@@ -190,4 +200,5 @@ abstract class ProviderCollectionAdapter implements ProviderCollectionInterface
return $provider;
}
+
}
diff --git a/src/Embera/ProviderCollection/ProviderCollectionInterface.php b/src/Embera/ProviderCollection/ProviderCollectionInterface.php
index <HASH>..<HASH> 100755
--- a/src/Embera/ProviderCollection/ProviderCollectionInterface.php
+++ b/src/Embera/ProviderCollection/ProviderCollectionInterface.php
@@ -58,4 +58,14 @@ interface ProviderCollectionInterface
* @return void
*/
public function setProviderList(array $list);
+
+
+ /**
+ * Registers a provider into the default
+ *
+ * @param mixed $names
+ * @param bool $prefix
+ * @return void
+ */
+ public function registerProvider($names, $prefix = true);
} | Providers now register their own hosts | mpratt_Embera | train |
af6df3ad37c0ab3c160bf8cdea61b33b2380d736 | diff --git a/src/app/n2n/impl/web/dispatch/map/val/ValUrl.php b/src/app/n2n/impl/web/dispatch/map/val/ValUrl.php
index <HASH>..<HASH> 100644
--- a/src/app/n2n/impl/web/dispatch/map/val/ValUrl.php
+++ b/src/app/n2n/impl/web/dispatch/map/val/ValUrl.php
@@ -72,7 +72,8 @@ class ValUrl extends SimplePropertyValidator {
}
if ($schemeRequired) {
- if (false !== filter_var($url, FILTER_VALIDATE_URL, FILTER_FLAG_SCHEME_REQUIRED)) {
+ // quick deprecated fix and makes schemeRequired redundant at the moment
+ if (false !== filter_var($url, FILTER_VALIDATE_URL/*, FILTER_FLAG_SCHEME_REQUIRED*/)) {
return true;
}
} else { | quick deprecated fix and makes schemeRequired redundant for now | n2n_n2n-impl-web-dispatch | train |
107b56f2acdb704c0126261f661cec1a615353ca | diff --git a/programs/demag_gui.py b/programs/demag_gui.py
index <HASH>..<HASH> 100755
--- a/programs/demag_gui.py
+++ b/programs/demag_gui.py
@@ -5222,6 +5222,7 @@ class Demag_GUI(wx.Frame):
elif calculation_type=="DE-BFL-O": PCA_type="line-with-origin"
elif calculation_type=="DE-FM": PCA_type="Fisher"
elif calculation_type=="DE-BFP": PCA_type="plane"
+ else: print("no PCA type found setting to line"); PCA_type="line"
self.PCA_type_box.SetStringSelection(PCA_type)
def update_fit_boxes(self, new_fit = False):
diff --git a/programs/thellier_gui.py b/programs/thellier_gui.py
index <HASH>..<HASH> 100755
--- a/programs/thellier_gui.py
+++ b/programs/thellier_gui.py
@@ -1,5 +1,24 @@
#!/usr/bin/env pythonw
-
+"""
+Runs Thellier GUI PmagPy's main analysis GUI for thellier-tyep
+paleointensity data. This can be used to obtain intensities for Thermal and
+Microwave data. It allows export of figures and analysis results for upload
+to the MagIC database and/or publication. For more information on how to
+interpret or use the GUI's many functions see the Help menu in the open GUI.
+More documentation can be found on all of PmagPy's functionality at the
+PmagPy cookbook which can be found here: earthref.org/PmagPy/cookbook/
+
+SYNTAX
+ thellier_gui.py [command line options]
+
+OPTIONS
+ -h : opens this help message
+ -WD : specify working directory
+ -DM : specify MagIC data model (options : 3 or 2.x)
+
+AUTHORS
+ Ron Shaar and Lisa Tauxe
+"""
#============================================================================================
# LOG HEADER:
#============================================================================================ | fixed small problem in Demag GUI in which a variable was not set due to the lack of a else statement at the end of a control set, added basic command line documentation to thellier GUI in __main__ doc | PmagPy_PmagPy | train |
3388fe5034f86cbd642c19b9584727bddf49eaed | diff --git a/concrete/authentication/community/controller.php b/concrete/authentication/community/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/authentication/community/controller.php
+++ b/concrete/authentication/community/controller.php
@@ -67,7 +67,6 @@ class Controller extends GenericOauth2TypeController
$this->set('apisecret', \Config::get('auth.community.secret', ''));
$list = new \GroupList();
- $list->includeAllGroups();
$this->set('groups', $list->getResults());
}
diff --git a/concrete/authentication/external_concrete5/controller.php b/concrete/authentication/external_concrete5/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/authentication/external_concrete5/controller.php
+++ b/concrete/authentication/external_concrete5/controller.php
@@ -141,7 +141,6 @@ class Controller extends GenericOauth2TypeController
$this->set('redirectUri', $this->urlResolver->resolve(['/ccm/system/authentication/oauth2/external_concrete5/callback']));
$list = $this->app->make(GroupList::class);
- $list->includeAllGroups();
$this->set('groups', $list->getResults());
}
diff --git a/concrete/authentication/facebook/controller.php b/concrete/authentication/facebook/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/authentication/facebook/controller.php
+++ b/concrete/authentication/facebook/controller.php
@@ -65,7 +65,6 @@ class Controller extends GenericOauth2TypeController
$this->set('apisecret', $config->get('auth.facebook.secret', ''));
$list = new \GroupList();
- $list->includeAllGroups();
$this->set('groups', $list->getResults());
}
diff --git a/concrete/authentication/google/controller.php b/concrete/authentication/google/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/authentication/google/controller.php
+++ b/concrete/authentication/google/controller.php
@@ -76,7 +76,6 @@ class Controller extends GenericOauth2TypeController
$this->set('apisecret', $config->get('auth.google.secret', ''));
$list = new \GroupList();
- $list->includeAllGroups();
$this->set('groups', $list->getResults());
$this->set('whitelist', $config->get('auth.google.email_filters.whitelist', []));
diff --git a/concrete/authentication/twitter/controller.php b/concrete/authentication/twitter/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/authentication/twitter/controller.php
+++ b/concrete/authentication/twitter/controller.php
@@ -91,7 +91,6 @@ class Controller extends GenericOauth1aTypeController
$this->set('apisecret', $config->get('auth.twitter.secret', ''));
$list = new \GroupList();
- $list->includeAllGroups();
$this->set('groups', $list->getResults());
} | Remove Guest from "Group to enter on registration" options | concrete5_concrete5 | train |
26e8a17ea959b0453c534a3b470babc804edae24 | diff --git a/tests/test_query.py b/tests/test_query.py
index <HASH>..<HASH> 100644
--- a/tests/test_query.py
+++ b/tests/test_query.py
@@ -83,15 +83,22 @@ class QueryTestCase(unittest.TestCase):
assert not item.negated
assert item.values == ['true']
- # def test_fusion(self):
- # """Test something from everything combined"""
- # q = ('the.rolling.stones.iregex.not:asc=sympathy,for,the,devil&'
- # '!guns.n.roses=paradise,city&queen:asc')
-
- # # Don't care about the other ones, as they're testing the &
- # item = self.parse(q)
-
- # assert item.path == ['guns', 'n', 'roses']
- # assert item.operator == constants.OPERATOR_IEQUAL[0]
- # assert item.negated
- # assert item.values == ['paradise', 'city']
+ def test_fusion(self):
+ """Test something from everything combined"""
+ q = ('the.rolling.stones.iregex.not:asc=sympathy,for,the,devil;'
+ '!(guns.n.roses=paradise,city&queen:asc)')
+
+ item = self.parse(q)
+
+ # I'm lazy and don't want to walk the tree, so lets just test the repr.
+ assert (repr(item) ==
+ "(the.rolling.stones.iregex.not:asc :iexact 'sympathy' "
+ "| 'for' | 'the' | 'devil') OR NOT (guns.n.roses :iexact "
+ "'paradise' | 'city') AND (queen :iexact '')")
+
+ def test_grouping(self):
+ item = self.parse('foo=bar&(a=b;b=c)')
+
+ assert item.left.path == ['foo']
+ assert item.right.left.path == ['a']
+ assert item.right.right.path == ['b'] | Add some final tests for things that were missing. | armet_python-armet | train |
600f3cad171c55388e984385d5859166e5f07fee | diff --git a/lib/Agent.js b/lib/Agent.js
index <HASH>..<HASH> 100644
--- a/lib/Agent.js
+++ b/lib/Agent.js
@@ -122,7 +122,7 @@ Agent.prototype.send = function(to, message) {
var colon = to.indexOf(':');
if (colon !== -1) {
// to is an url like "protocol://networkId/agentId"
- var url = util.splitUrl(to);
+ var url = util.parseUrl(to);
if (url.protocol == 'http' || url.protocol == 'https') {
return this._sendAsHTTP(to, message);
}
diff --git a/lib/util.js b/lib/util.js
index <HASH>..<HASH> 100644
--- a/lib/util.js
+++ b/lib/util.js
@@ -19,7 +19,7 @@ exports.isPromise = function (value) {
* when there is a match. Returns null if no valid url.
*
*/
-exports.splitUrl = function (url) {
+exports.parseUrl = function (url) {
// match an url like "protocol://domain/path"
var match = /^([A-z]+):\/\/([^\/]+)(\/(.*)$|$)/.exec(url);
if (match) {
diff --git a/test/util.test.js b/test/util.test.js
index <HASH>..<HASH> 100644
--- a/test/util.test.js
+++ b/test/util.test.js
@@ -24,10 +24,10 @@ describe('util', function() {
assert.equal(util.isPromise(myPromise), true);
});
- it('should split an url', function () {
- assert.deepEqual(util.splitUrl('http://example.com/path'), {protocol: 'http', domain: 'example.com', path: 'path'});
- assert.deepEqual(util.splitUrl('http://example.com/some/path'), {protocol: 'http', domain: 'example.com', path: 'some/path'});
- assert.deepEqual(util.splitUrl('https://example.com/'), {protocol: 'https', domain: 'example.com', path: ''});
- assert.deepEqual(util.splitUrl('a://b/c'), {protocol: 'a', domain: 'b', path: 'c'});
+ it('should parse an url', function () {
+ assert.deepEqual(util.parseUrl('http://example.com/path'), {protocol: 'http', domain: 'example.com', path: 'path'});
+ assert.deepEqual(util.parseUrl('http://example.com/some/path'), {protocol: 'http', domain: 'example.com', path: 'some/path'});
+ assert.deepEqual(util.parseUrl('https://example.com/'), {protocol: 'https', domain: 'example.com', path: ''});
+ assert.deepEqual(util.parseUrl('a://b/c'), {protocol: 'a', domain: 'b', path: 'c'});
});
}); | Renamed splitUrl to parseUrl | enmasseio_evejs | train |
72cdb8f84263eef98e1ced7dba42cf921ac0609c | diff --git a/climlab/model/ebm.py b/climlab/model/ebm.py
index <HASH>..<HASH> 100644
--- a/climlab/model/ebm.py
+++ b/climlab/model/ebm.py
@@ -143,6 +143,7 @@ class EBM(TimeDependentProcess):
def __init__(self,
num_lat=90,
S0=const.S0,
+ s2=-0.48,
A=210.,
B=2.,
D=0.555, # in W / m^2 / degC, same as B
@@ -165,6 +166,7 @@ class EBM(TimeDependentProcess):
super(EBM, self).__init__(timestep=timestep, **kwargs)
sfc = self.Ts.domain
self.param['S0'] = S0
+ self.param['s2'] = s2
self.param['A'] = A
self.param['B'] = B
self.param['D'] = D
diff --git a/climlab/tests/test_ebm.py b/climlab/tests/test_ebm.py
index <HASH>..<HASH> 100644
--- a/climlab/tests/test_ebm.py
+++ b/climlab/tests/test_ebm.py
@@ -3,7 +3,7 @@ import numpy as np
import climlab
import pytest
from climlab.tests.xarray_test import to_xarray
-
+from climlab.utils.legendre import P2
@pytest.fixture()
def EBM_seasonal():
@@ -99,3 +99,25 @@ def test_albedo():
#m.add_subprocess('albedo', albedo.ConstantAlbedo(state=m.state, **m.param))
#m.integrate_years(1)
#assert m.icelat == None
+
+@pytest.mark.fast
+def test_analytical():
+ '''Check to see if the the numerical solution converges to the analytical
+ steady-state solution of the simple EBM with constant albedo'''
+ param = {'a0': 0.3,
+ 'a2': 0.,
+ 'ai': 0.3,
+ 's2': -0.48,
+ 'S0': 1360.,
+ 'A': 210.,
+ 'B': 2.,
+ 'D': 0.55,
+ 'Tf': -1000., # effectively makes albedo constant
+ }
+ m = climlab.EBM(**param)
+ m.integrate_years(5)
+ Tnumerical = np.squeeze(m.Ts)
+ delta = param['D']/param['B']
+ x = np.sin(np.deg2rad(m.lat))
+ Tanalytical = ((1-param['a0'])*param['S0']/4*(1+param['s2']*P2(x)/(1+6*delta))-param['A'])/param['B']
+ assert Tnumerical == pytest.approx(Tanalytical, abs=2E-2) | Add test for EBM against simple analytical solution | brian-rose_climlab | train |
688edc86757286d6474c3f8030e75d2ba778a400 | diff --git a/src/scs_core/aws/manager/configuration_check_finder.py b/src/scs_core/aws/manager/configuration_check_finder.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/aws/manager/configuration_check_finder.py
+++ b/src/scs_core/aws/manager/configuration_check_finder.py
@@ -41,8 +41,6 @@ class ConfigurationCheckFinder(object):
response = self.__http_client.get(self.__URL, headers=headers, params=request.params())
- print("find - response: %s" % response)
-
return ConfigurationCheckResponse.construct_from_jdict(response.json())
@@ -158,8 +156,6 @@ class ConfigurationCheckResponse(HTTPResponse):
if not jdict:
return None
- print("ConfigurationCheckResponse - jdict: %s" % jdict)
-
status = HTTPStatus(jdict.get('statusCode'))
if status != HTTPStatus.OK:
diff --git a/src/scs_core/aws/manager/configuration_check_requester.py b/src/scs_core/aws/manager/configuration_check_requester.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/aws/manager/configuration_check_requester.py
+++ b/src/scs_core/aws/manager/configuration_check_requester.py
@@ -38,8 +38,6 @@ class ConfigurationCheckRequester(object):
response = self.__http_client.get(self.__URL, headers=headers, params=params)
- print("request - response: %s" % response)
-
return ConfigurationCheckRequesterResponse.construct_from_jdict(response.json())
@@ -63,8 +61,6 @@ class ConfigurationCheckRequesterResponse(HTTPResponse):
if not jdict:
return None
- print("ConfigurationCheckRequesterResponse - jdict: %s" % jdict)
-
status = HTTPStatus(jdict.get('statusCode'))
if status != HTTPStatus.OK: | Added ConfigurationCheckRequesterResponse | south-coast-science_scs_core | train |
12d3dec1617b0fa78ecde8f6def0229498d670f4 | diff --git a/concrete/controllers/backend/board/instance/rules.php b/concrete/controllers/backend/board/instance/rules.php
index <HASH>..<HASH> 100644
--- a/concrete/controllers/backend/board/instance/rules.php
+++ b/concrete/controllers/backend/board/instance/rules.php
@@ -50,6 +50,7 @@ class Rules extends AbstractController
$command = new ScheduleBoardInstanceRuleCommand();
$command->setBoardInstanceSlotRuleID($boardInstanceSlotRuleID);
$command->setStartDate($data['startDate']);
+ $command->setName($data['name']);
$command->setEndDate($data['endDate']);
$command->setStartTime($data['startTime']);
$command->setEndTime($data['endTime']);
diff --git a/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommand.php b/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommand.php
index <HASH>..<HASH> 100644
--- a/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommand.php
+++ b/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommand.php
@@ -21,6 +21,11 @@ class ScheduleBoardInstanceRuleCommand extends Command
/**
* @var string
*/
+ protected $name;
+
+ /**
+ * @var string
+ */
protected $startDate = '';
/**
@@ -161,10 +166,21 @@ class ScheduleBoardInstanceRuleCommand extends Command
$this->timezone = $timezone;
}
+ /**
+ * @return string
+ */
+ public function getName(): string
+ {
+ return $this->name;
+ }
-
-
-
+ /**
+ * @param string $name
+ */
+ public function setName($name): void
+ {
+ $this->name = $name;
+ }
}
diff --git a/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommandHandler.php b/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommandHandler.php
index <HASH>..<HASH> 100644
--- a/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommandHandler.php
+++ b/concrete/src/Board/Command/ScheduleBoardInstanceRuleCommandHandler.php
@@ -40,6 +40,9 @@ class ScheduleBoardInstanceRuleCommandHandler
$rule->setStartDate($startDateTime->getTimestamp());
$rule->setEndDate($endDateTime->getTimestamp());
$rule->setSlot($command->getSlot());
+ if ($command->getName()) {
+ $rule->setNotes($command->getName());
+ }
$this->entityManager->persist($rule);
$this->entityManager->flush();
}
diff --git a/concrete/views/dialogs/boards/custom_slot.php b/concrete/views/dialogs/boards/custom_slot.php
index <HASH>..<HASH> 100644
--- a/concrete/views/dialogs/boards/custom_slot.php
+++ b/concrete/views/dialogs/boards/custom_slot.php
@@ -78,8 +78,12 @@ $date = Core::make('date')
<div v-show="currentStep == 'schedule'">
<form autocomplete="off">
<div class="mb-3">
+ <label class="form-label"><?=t('Name')?></label>
+ <input type="text" class="form-control" v-model="customSlotName">
+ </div>
+ <div class="mb-3">
<label class="form-label"><?=t('From')?></label>
- <div class="row mb-3">
+ <div class="row">
<div class="col-6">
<v-date-picker
:masks="{'input': 'YYYY-MM-DD'}"
@@ -178,6 +182,7 @@ $date = Core::make('date')
slot: this.slot,
startDate: this.startDateFormatted,
endDate: this.endDateFormatted,
+ name: this.customSlotName,
startTime: this.startTime,
endTime: this.endTime,
timezone: this.timezone,
@@ -284,6 +289,7 @@ $date = Core::make('date')
invalidSelectedElement: false,
startDate: '',
endDate: '',
+ customSlotName: '',
startTime: '00:00',
endTime: '23:59',
slot: <?=(int)$slot?>, | Adding name to frontend scheduler | concrete5_concrete5 | train |
80aef02eccc0f5c0d4805bacb72d018faea07157 | diff --git a/tests/test_compiler.py b/tests/test_compiler.py
index <HASH>..<HASH> 100644
--- a/tests/test_compiler.py
+++ b/tests/test_compiler.py
@@ -1,7 +1,7 @@
from enum import Enum
from importlib.machinery import SourceFileLoader
from os import environ, path
-from subprocess import Popen, PIPE
+from subprocess import Popen
from tempfile import NamedTemporaryFile, TemporaryDirectory
from unittest import TestCase
@@ -27,7 +27,7 @@ class TestCompiler(TestCase):
'--proto_path=' + path.dirname(self.proto_file.name),
self.proto_file.name
]
- proc = Popen(args, stdout=PIPE, stderr=PIPE, env=new_env)
+ proc = Popen(args, env=new_env)
proc.wait()
assert proc.returncode == 0 | Don't suppress protoc errors in tests to simplify debugging | Pr0Ger_protobuf3 | train |
acd13ced5b4e8789995eeb9dd150baed98ec47f1 | diff --git a/authemail/models.py b/authemail/models.py
index <HASH>..<HASH> 100644
--- a/authemail/models.py
+++ b/authemail/models.py
@@ -11,6 +11,8 @@ from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.core.mail import send_mail
+# Make part of the model eventually, so it can be edited
+EXPIRY_PERIOD = 3 # days
def _generate_code():
return binascii.hexlify(os.urandom(20)).decode('utf-8')
@@ -123,6 +125,9 @@ class PasswordResetCodeManager(models.Manager):
return password_reset_code
+ def get_expiry_period(self):
+ return EXPIRY_PERIOD
+
def send_multi_format_email(template_prefix, template_ctxt, target_email):
subject_file = 'authemail/%s_subject.txt' % template_prefix
diff --git a/authemail/tests.py b/authemail/tests.py
index <HASH>..<HASH> 100644
--- a/authemail/tests.py
+++ b/authemail/tests.py
@@ -1,4 +1,6 @@
import re
+from datetime import timedelta
+
from django.core import mail
from django.contrib.auth import get_user_model
from django.test.utils import override_settings
@@ -481,6 +483,27 @@ class PasswordTests(APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data['detail'], 'Unable to verify user.')
+ # Get a third code to lapse
+ url = reverse('authemail-password-reset')
+ payload = {
+ 'email': self.em_user,
+ }
+ self.client.post(url, payload)
+ password_reset_code = PasswordResetCode.objects.latest('code')
+ password_reset_code.created_at += timedelta(days=-(PasswordResetCode.objects.get_expiry_period()+1))
+ password_reset_code.save()
+ code_lapsed = password_reset_code.code
+
+ # Confirm password reset code_lapsed can't be used
+ url = reverse('authemail-password-reset-verify')
+ params = {
+ 'code': code_lapsed,
+ }
+ response = self.client.get(url, params)
+
+ self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+ self.assertEqual(response.data['detail'], 'Unable to verify user.')
+
# Confirm unable to log in with old password
url = reverse('authemail-login')
payload = {
diff --git a/authemail/views.py b/authemail/views.py
index <HASH>..<HASH> 100644
--- a/authemail/views.py
+++ b/authemail/views.py
@@ -1,3 +1,5 @@
+from datetime import date, timedelta
+
from django.conf import settings
from django.contrib.auth import authenticate, get_user_model
from django.utils.translation import gettext as _
@@ -155,7 +157,10 @@ class PasswordReset(APIView):
try:
user = get_user_model().objects.get(email=email)
+
+ # Delete all unused password reset codes
PasswordResetCode.objects.filter(user=user).delete()
+
if user.is_verified and user.is_active:
password_reset_code = \
PasswordResetCode.objects.create_reset_code(user)
@@ -182,7 +187,14 @@ class PasswordResetVerify(APIView):
code = request.GET.get('code', '')
try:
- PasswordResetCode.objects.get(code=code)
+ password_reset_code = PasswordResetCode.objects.get(code=code)
+
+ # Delete password reset code if older than expiry period
+ delta = date.today() - password_reset_code.created_at.date()
+ if delta.days > PasswordResetCode.objects.get_expiry_period():
+ password_reset_code.delete()
+ raise PasswordResetCode.DoesNotExist()
+
content = {'success': _('User verified.')}
return Response(content, status=status.HTTP_200_OK)
except PasswordResetCode.DoesNotExist:
@@ -205,7 +217,10 @@ class PasswordResetVerified(APIView):
password_reset_code = PasswordResetCode.objects.get(code=code)
password_reset_code.user.set_password(password)
password_reset_code.user.save()
- PasswordResetCode.objects.filter(code=code).delete()
+
+ # Delete password code just used
+ password_reset_code.delete()
+
content = {'success': _('Password reset.')}
return Response(content, status=status.HTTP_200_OK)
except PasswordResetCode.DoesNotExist: | Delete expired password reset codes if attempt to use | celiao_django-rest-authemail | train |
765805159440148d553501e748afd57a25102e4b | diff --git a/moto/cloudwatch/responses.py b/moto/cloudwatch/responses.py
index <HASH>..<HASH> 100644
--- a/moto/cloudwatch/responses.py
+++ b/moto/cloudwatch/responses.py
@@ -76,7 +76,7 @@ class CloudWatchResponse(BaseResponse):
dimensions = self._get_list_prefix("Dimensions.member")
alarm_actions = self._get_multi_param("AlarmActions.member")
ok_actions = self._get_multi_param("OKActions.member")
- actions_enabled = self._get_param("ActionsEnabled")
+ actions_enabled = self._get_bool_param("ActionsEnabled")
insufficient_data_actions = self._get_multi_param(
"InsufficientDataActions.member"
)
@@ -354,7 +354,7 @@ DESCRIBE_ALARMS_TEMPLATE = """<DescribeAlarmsResponse xmlns="http://monitoring.a
<{{tag_name}}>
{% for alarm in alarms %}
<member>
- <ActionsEnabled>{{ alarm.actions_enabled }}</ActionsEnabled>
+ <ActionsEnabled>{{ "true" if alarm.actions_enabled else "false" }}</ActionsEnabled>
<AlarmActions>
{% for action in alarm.alarm_actions %}
<member>{{ action }}</member>
@@ -480,7 +480,7 @@ DESCRIBE_METRIC_ALARMS_TEMPLATE = """<DescribeAlarmsForMetricResponse xmlns="htt
<MetricAlarms>
{% for alarm in alarms %}
<member>
- <ActionsEnabled>{{ alarm.actions_enabled }}</ActionsEnabled>
+ <ActionsEnabled>{{ "true" if alarm.actions_enabled else "false" }}</ActionsEnabled>
<AlarmActions>
{% for action in alarm.alarm_actions %}
<member>{{ action }}</member>
diff --git a/tests/test_cloudwatch/test_cloudwatch_alarms.py b/tests/test_cloudwatch/test_cloudwatch_alarms.py
index <HASH>..<HASH> 100644
--- a/tests/test_cloudwatch/test_cloudwatch_alarms.py
+++ b/tests/test_cloudwatch/test_cloudwatch_alarms.py
@@ -50,6 +50,8 @@ def test_create_alarm():
alarm.should.have.key("AlarmArn").equal(
"arn:aws:cloudwatch:{}:{}:alarm:{}".format(region, ACCOUNT_ID, name)
)
+ # default value should be True
+ alarm.should.have.key("ActionsEnabled").equal(True)
@mock_cloudwatch
@@ -111,8 +113,9 @@ def test_describe_alarms_for_metric():
)
alarms = conn.describe_alarms_for_metric(MetricName="cpu", Namespace="blah")
alarms.get("MetricAlarms").should.have.length_of(1)
-
- assert "testalarm1" in alarms.get("MetricAlarms")[0].get("AlarmArn")
+ alarm = alarms.get("MetricAlarms")[0]
+ assert "testalarm1" in alarm.get("AlarmArn")
+ alarm.should.have.key("ActionsEnabled").equal(True)
@mock_cloudwatch
@@ -127,7 +130,7 @@ def test_describe_alarms():
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
- ActionsEnabled=True,
+ ActionsEnabled=False,
)
metric_data_queries = [
{
@@ -190,6 +193,7 @@ def test_describe_alarms():
single_metric_alarm["Statistic"].should.equal("Average")
single_metric_alarm["ComparisonOperator"].should.equal("GreaterThanThreshold")
single_metric_alarm["Threshold"].should.equal(2)
+ single_metric_alarm["ActionsEnabled"].should.equal(False)
multiple_metric_alarm.shouldnt.have.property("MetricName")
multiple_metric_alarm["EvaluationPeriods"].should.equal(1)
@@ -197,6 +201,7 @@ def test_describe_alarms():
multiple_metric_alarm["Metrics"].should.equal(metric_data_queries)
multiple_metric_alarm["ComparisonOperator"].should.equal("GreaterThanThreshold")
multiple_metric_alarm["Threshold"].should.equal(1.0)
+ multiple_metric_alarm["ActionsEnabled"].should.equal(True)
@mock_cloudwatch
@@ -243,7 +248,7 @@ def test_alarm_state():
len(resp["MetricAlarms"]).should.equal(1)
resp["MetricAlarms"][0]["AlarmName"].should.equal("testalarm2")
resp["MetricAlarms"][0]["StateValue"].should.equal("OK")
- resp["MetricAlarms"][0]["ActionsEnabled"].should.equal(False)
+ resp["MetricAlarms"][0]["ActionsEnabled"].should.equal(True)
# Just for sanity
resp = client.describe_alarms() | parse 'ActionsEnabled' as boolean for describe-alarms (#<I>) | spulec_moto | train |
99e2ded4a98f22721c4d3bff5fc70cc125b13b5d | diff --git a/Gemfile.lock b/Gemfile.lock
index <HASH>..<HASH> 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
- attachs (0.3.4)
+ attachs (0.3.5)
rails (>= 3.0.0)
GEM
diff --git a/lib/attachs/types/file.rb b/lib/attachs/types/file.rb
index <HASH>..<HASH> 100644
--- a/lib/attachs/types/file.rb
+++ b/lib/attachs/types/file.rb
@@ -68,17 +68,17 @@ module Attachs
def path(*args)
return nil if deleted?
- stored? ? destination_path(*args) : upload.path.to_s
+ (stored? ? destination_path(*args) : upload.path).to_s
end
def realpath(*args)
return nil if deleted? or storage_type == :s3
- stored? ? storage.realpath(path(*args)) : upload.path.to_s
+ (stored? ? storage.realpath(path(*args)) : upload.path).to_s
end
def url(*args)
return nil if deleted? or !stored?
- storage.url path(*args)
+ storage.url(path(*args)).to_s
end
def store
diff --git a/lib/attachs/version.rb b/lib/attachs/version.rb
index <HASH>..<HASH> 100644
--- a/lib/attachs/version.rb
+++ b/lib/attachs/version.rb
@@ -1,5 +1,5 @@
module Attachs
- VERSION = '0.3.4'
+ VERSION = '0.3.5'
end | Force path and urls to always be strings | museways_attachs | train |
624cff239e1b6ed30ecfec7adef945b2aefb489f | diff --git a/salt/grains/zfs.py b/salt/grains/zfs.py
index <HASH>..<HASH> 100644
--- a/salt/grains/zfs.py
+++ b/salt/grains/zfs.py
@@ -70,10 +70,8 @@ def _zfs_support():
on_supported_platform = _check_retcode('ls /sys/module/zfs')
# NOTE: fallback to zfs-fuse if needed
- if not on_supported_platform:
- _zfs_fuse = lambda f: __salt__['service.' + f]('zfs-fuse')
- if _zfs_fuse('available') and (_zfs_fuse('status') or _zfs_fuse('start')):
- on_supported_platform = True
+ if not on_supported_platform and salt.utils.path.which('zfs-fuse'):
+ on_supported_platform = True
# Additional check for the zpool command
if on_supported_platform and salt.utils.path.which('zpool'): | Simpler fallback check for zfs-fuse.
This should fix #<I> | saltstack_salt | train |
181ea9d56224029baee451781e144b6a7bfc94ff | diff --git a/packages/lib/examples/complex/index.js b/packages/lib/examples/complex/index.js
index <HASH>..<HASH> 100644
--- a/packages/lib/examples/complex/index.js
+++ b/packages/lib/examples/complex/index.js
@@ -63,7 +63,7 @@ async function deployVersion2(appManager, donations, txParams) {
async function getStdLib(txParams) {
// Use deployed standard library, or simulate one in local networks.
- if(!network || network === 'development') {
+ if(!network || network === 'local') {
const stdlib = await ContractDirectory.new(txParams);
const tokenImplementation = await MintableERC721Token.new();
await stdlib.setImplementation(tokenClass, tokenImplementation.address, txParams);
diff --git a/packages/lib/examples/complex/package.json b/packages/lib/examples/complex/package.json
index <HASH>..<HASH> 100644
--- a/packages/lib/examples/complex/package.json
+++ b/packages/lib/examples/complex/package.json
@@ -6,7 +6,7 @@
"scripts": {
"compile": "rm -rf build && npx truffle compile",
"start": "npm run deploy",
- "deploy": "npm run compile && npx truffle exec index.js --network development",
+ "deploy": "npm run compile && npx truffle exec index.js --network local",
"deploy_ropsten": "npm run compile && npx truffle exec index.js --network ropsten",
"test": "npm run compile && npx truffle test"
},
diff --git a/packages/lib/examples/complex/truffle.js b/packages/lib/examples/complex/truffle.js
index <HASH>..<HASH> 100644
--- a/packages/lib/examples/complex/truffle.js
+++ b/packages/lib/examples/complex/truffle.js
@@ -8,7 +8,7 @@ module.exports = {
network_id: '3', // eslint-disable-line camelcase
port: 8565
},
- development: {
+ local: {
host: 'localhost',
network_id: '*', // eslint-disable-line camelcase
port: 8545 | Tests can be ran without having to start a local testrpc | zeppelinos_zos | train |
dc7ef8099cdf421feaa3f07289134b5ee1a70c7c | diff --git a/src/resources/assets/react/admin-list/list/HCAdminListCore.js b/src/resources/assets/react/admin-list/list/HCAdminListCore.js
index <HASH>..<HASH> 100644
--- a/src/resources/assets/react/admin-list/list/HCAdminListCore.js
+++ b/src/resources/assets/react/admin-list/list/HCAdminListCore.js
@@ -1,5 +1,4 @@
import React, {Component} from 'react';
-
const uuid = require('uuid/v4');
export default class HCAdminListCore extends Component {
@@ -21,6 +20,8 @@ export default class HCAdminListCore extends Component {
listHeight: {}
};
+ this.bottomMargin = 345;
+
this.handleResize = this.handleResize.bind(this);
}
@@ -37,7 +38,7 @@ export default class HCAdminListCore extends Component {
calculateListHeight (height)
{
- height -= 345;
+ height -= this.bottomMargin;
this.setState({listHeight:height});
}
}
\ No newline at end of file | Bottom marting is can be set for each list indenepndantly | honey-comb_core | train |
0fb8d60f2ddc0a1d144714c0a1f8c20587f0d59f | diff --git a/CHANGELOG b/CHANGELOG
index <HASH>..<HASH> 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,5 @@
+0.1.1
+* SSL certificates are now verified by default (use :openssl_verify_mode => 'none' to disable) per @codahale http://bit.ly/kZr9Jc
0.1.0
* no longer tries to open file with sudo if it gets a permission error
* remove Eat::Config.timeout global option
diff --git a/README.rdoc b/README.rdoc
index <HASH>..<HASH> 100644
--- a/README.rdoc
+++ b/README.rdoc
@@ -15,19 +15,22 @@ Try <tt>#eat</tt>, which ALWAYS returns a <tt>String</tt>:
==Options
- eat('http://yahoo.com', :timeout => 10) # timeout after 10 seconds
- eat('http://yahoo.com', :limit => 1024) # only read the first 1024 chars
+ eat('http://yahoo.com', :timeout => 10) # timeout after 10 seconds
+ eat('http://yahoo.com', :limit => 1024) # only read the first 1024 chars
+ eat('https://yahoo.com', :openssl_verify_mode => 'none) # don't bother verifying SSL certificate
-==Warning: doesn't verify SSL certs
+==Warning: DOES verify SSL certs
-If you need to check SSL certificates, please don't use this gem. It always sets
+If you want to disable verification of SSL certificates, use
- http.verify_mode = ::OpenSSL::SSL::VERIFY_NONE
+ :openssl_verify_mode => 'none'
+
+Thanks @codahale and @peterc for their suggestions.
==Supported schemas
* local filesystem
* http
-* https (it won't check the SSL certificate... if you want security, don't use this!)
+* https
Copyright 2011 Seamus Abshere
diff --git a/lib/eat.rb b/lib/eat.rb
index <HASH>..<HASH> 100644
--- a/lib/eat.rb
+++ b/lib/eat.rb
@@ -13,6 +13,7 @@ module Eat
# Options:
# * <tt>:timeout</tt> in seconds
# * <tt>:limit</tt> is characters (bytes in Ruby 1.8)
+ # * <tt>:openssl_verify_mode</tt> set to 'none' if you don't want to verify SSL certificates
#
# Example:
# eat('http://brighterplanet.com') #=> '...'
@@ -21,6 +22,8 @@ module Eat
def eat(url, options = {})
timeout = options[:timeout] || options['timeout'] || 2
limit = options[:limit] || options['limit'] || ::Infinity
+ openssl_verify_mode = options[:openssl_verify_mode] || options['openssl_verify_mode']
+
uri = ::URI.parse url.to_s
body = []
@@ -45,8 +48,7 @@ module Eat
http = ::Net::HTTP.new uri.host, uri.port
if uri.scheme == 'https'
http.use_ssl = true
- # if you were trying to be real safe, you wouldn't use this library
- http.verify_mode = ::OpenSSL::SSL::VERIFY_NONE
+ http.verify_mode = ::OpenSSL::SSL::VERIFY_NONE if openssl_verify_mode.to_s == 'none'
end
http.start do |session|
catch :stop do
diff --git a/lib/eat/version.rb b/lib/eat/version.rb
index <HASH>..<HASH> 100644
--- a/lib/eat/version.rb
+++ b/lib/eat/version.rb
@@ -1,3 +1,3 @@
module Eat
- VERSION = "0.1.0"
+ VERSION = "0.1.1"
end
diff --git a/test/test_eat.rb b/test/test_eat.rb
index <HASH>..<HASH> 100644
--- a/test/test_eat.rb
+++ b/test/test_eat.rb
@@ -52,4 +52,22 @@ class TestEat < Test::Unit::TestCase
assert_equal 'Use', eat(::URI.parse('http://brighterplanet.com/robots.txt'), :timeout => 10, :limit => 3)
assert_equal 'User-', eat(::URI.parse('http://brighterplanet.com/robots.txt'), :timeout => 10, :limit => 5)
end
+
+ def test_ssl
+ assert_nothing_raised do
+ eat 'https://brighterplanet.com'
+ end
+ end
+
+ def test_openssl_verify_on_by_default
+ assert_raises(OpenSSL::SSL::SSLError) do
+ eat 'https://foo.bar.brighterplanet.com'
+ end
+ end
+
+ def test_disable_openssl_verify
+ assert_nothing_raised do
+ eat 'https://foo.bar.brighterplanet.com', :openssl_verify_mode => 'none'
+ end
+ end
end | change of heart: verify SSL by default (thanks @codahale and @peterc) | seamusabshere_eat | train |
e940724006f4135732b3e362f2fa54939e3f1ad4 | diff --git a/pypd/models/incident.py b/pypd/models/incident.py
index <HASH>..<HASH> 100644
--- a/pypd/models/incident.py
+++ b/pypd/models/incident.py
@@ -8,7 +8,7 @@ from .entity import Entity
from .log_entry import LogEntry
from .note import Note
from .alert import Alert
-from ..errors import MissingFromEmail
+from ..errors import InvalidArguments, MissingFromEmail
class Incident(Entity):
@@ -64,6 +64,42 @@ class Incident(Entity):
data=data,)
return result
+ def reassign(self, from_email, user_ids):
+ """Reassign an incident to other users using a valid email address."""
+ endpoint = '/'.join((self.endpoint, self.id,))
+
+ if from_email is None or not isinstance(from_email, six.string_types):
+ raise MissingFromEmail(from_email)
+
+ if user_ids is None or not isinstance(user_ids, list):
+ raise InvalidArguments(user_ids)
+ if not all([isinstance(i, six.string_types) for i in user_ids]):
+ raise InvalidArguments(user_ids)
+
+ assignees = [
+ {
+ 'assignee': {
+ 'id': user_id,
+ 'type': 'user_reference',
+ }
+ }
+ for user_id in user_ids
+ ]
+
+ add_headers = {'from': from_email, }
+ data = {
+ 'incident': {
+ 'type': 'incident',
+ 'assignments': assignees,
+ }
+ }
+
+ result = self.request('PUT',
+ endpoint=endpoint,
+ add_headers=add_headers,
+ data=data,)
+ return result
+
def log_entries(self, time_zone='UTC', is_overview=False,
include=None, fetch_all=True):
"""Query for log entries on an incident instance."""
diff --git a/test/unit/models/incident.py b/test/unit/models/incident.py
index <HASH>..<HASH> 100644
--- a/test/unit/models/incident.py
+++ b/test/unit/models/incident.py
@@ -13,7 +13,7 @@ from operator import itemgetter
import requests_mock
from pypd import Incident
-from pypd.errors import MissingFromEmail
+from pypd.errors import InvalidArguments, MissingFromEmail
def chunks(l, n):
@@ -55,7 +55,7 @@ class IncidentTestCase(unittest.TestCase):
@requests_mock.Mocker()
def test_resolve_invalid_from_email(self, m):
- """Coverge for using an invalid (cheaply validated) from email."""
+ """Coverage for using an invalid (cheaply validated) from email."""
query = {
'limit': 1,
'offset': 0,
@@ -98,6 +98,46 @@ class IncidentTestCase(unittest.TestCase):
self.assertEqual(incident['id'], response['incidents'][0]['id'])
@requests_mock.Mocker()
+ def test_reassign_invalid_from_email(self, m):
+ """Coverage for using an invalid (cheaply validated) from email."""
+ query = {
+ 'limit': 1,
+ 'offset': 0,
+ }
+ url = self.url + '?{}'.format(urlencode(query))
+ m.register_uri('GET', url, json=self.query_datas[0], complete_qs=True)
+ incident = Incident.find_one(api_key=self.api_key)
+
+ with self.assertRaises(MissingFromEmail):
+ incident.reassign(None, ['foo'])
+ with self.assertRaises(MissingFromEmail):
+ incident.reassign(1, ['foo'])
+ with self.assertRaises(MissingFromEmail):
+ incident.reassign(incident, ['foo'])
+
+ @requests_mock.Mocker()
+ def test_reassign_invalid_user_id(self, m):
+ """Coverage for using an invalid (cheaply validated) assignee."""
+ query = {
+ 'limit': 1,
+ 'offset': 0,
+ }
+ url = self.url + '?{}'.format(urlencode(query))
+ m.register_uri('GET', url, json=self.query_datas[0], complete_qs=True)
+ incident = Incident.find_one(api_key=self.api_key)
+
+ with self.assertRaises(InvalidArguments):
+ incident.reassign('jdc@pagerduty.com', None)
+ with self.assertRaises(InvalidArguments):
+ incident.reassign('jdc@pagerduty.com', 1)
+ with self.assertRaises(InvalidArguments):
+ incident.reassign('jdc@pagerduty.com', 'foo')
+ with self.assertRaises(InvalidArguments):
+ incident.reassign('jdc@pagerduty.com', [None])
+ with self.assertRaises(InvalidArguments):
+ incident.reassign('jdc@pagerduty.com', [1])
+
+ @requests_mock.Mocker()
def test_snooze_invalid_from_email(self, m):
"""Coverage for invalid from email for snooze."""
query = { | Add ability to reassign incidents to other people | PagerDuty_pagerduty-api-python-client | train |
d2fe66d8e5132df5b6344895cd3ebccbd88f7a32 | diff --git a/jobs/github-event/integration_installation/created.js b/jobs/github-event/integration_installation/created.js
index <HASH>..<HASH> 100644
--- a/jobs/github-event/integration_installation/created.js
+++ b/jobs/github-event/integration_installation/created.js
@@ -30,7 +30,7 @@ module.exports = async function ({ installation }) {
github.authenticate({ type: 'token', token })
// getting installation repos from github
- let res = await github.integrations.getInstallationRepositories({
+ let res = await github.apps.getInstallationRepositories({
per_page: 100
})
let { repositories } = res.data
diff --git a/lib/get-token.js b/lib/get-token.js
index <HASH>..<HASH> 100644
--- a/lib/get-token.js
+++ b/lib/get-token.js
@@ -41,7 +41,7 @@ async function getToken (iss) {
type: 'integration',
token
})
- const result = (await github.integrations.createInstallationToken({
+ const result = (await github.apps.createInstallationToken({
installation_id: iss
})).data | fix(github): update deprecated method calls | greenkeeperio_greenkeeper | train |
72855d4d7a932daa0434dd6322f7cc7c714c8409 | diff --git a/src/main/java/org/owasp/dependencycheck/Engine.java b/src/main/java/org/owasp/dependencycheck/Engine.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/owasp/dependencycheck/Engine.java
+++ b/src/main/java/org/owasp/dependencycheck/Engine.java
@@ -209,17 +209,15 @@ public class Engine {
final List<Analyzer> analyzerList = analyzers.get(phase);
for (Analyzer a : analyzerList) {
- final Iterator<Dependency> itrDependencies = dependencies.iterator();
- while (itrDependencies.hasNext()) {
- final Dependency d = itrDependencies.next();
+ //need to create a copy of the collection because some of the
+ // analyzers may modify it. This prevents ConcurrentModificationExceptions.
+ final Set<Dependency> dependencySet = new HashSet<Dependency>();
+ dependencySet.addAll(dependencies);
+ for (Dependency d : dependencySet) {
if (a.supportsExtension(d.getFileExtension())) {
try {
a.analyze(d, this);
- //the following is mainly to deal with the DependencyBundlingAnalyzer
- if (a.getPostAnalysisAction() == Analyzer.PostAnalysisAction.REMOVE_DEPENDENCY) {
- itrDependencies.remove();
- }
- } catch (AnalysisException ex) {
+ } catch (AnalysisException ex) {
d.addAnalysisException(ex);
}
}
diff --git a/src/main/java/org/owasp/dependencycheck/analyzer/AbstractAnalyzer.java b/src/main/java/org/owasp/dependencycheck/analyzer/AbstractAnalyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/owasp/dependencycheck/analyzer/AbstractAnalyzer.java
+++ b/src/main/java/org/owasp/dependencycheck/analyzer/AbstractAnalyzer.java
@@ -58,13 +58,4 @@ public abstract class AbstractAnalyzer implements Analyzer {
public void close() {
//do nothing
}
-
- /**
- * Used to indicate if any steps should be taken after the analysis. The
- * abstract implementation returns NOTHING.
- * @return NOTHING
- */
- public PostAnalysisAction getPostAnalysisAction() {
- return PostAnalysisAction.NOTHING;
- }
}
diff --git a/src/main/java/org/owasp/dependencycheck/analyzer/Analyzer.java b/src/main/java/org/owasp/dependencycheck/analyzer/Analyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/owasp/dependencycheck/analyzer/Analyzer.java
+++ b/src/main/java/org/owasp/dependencycheck/analyzer/Analyzer.java
@@ -99,23 +99,4 @@ public interface Analyzer {
* @throws Exception is thrown if an exception occurs closing the analyzer.
*/
void close() throws Exception;
-
- /**
- * An enumeration of Post Analysis Actions.
- */
- public enum PostAnalysisAction {
- /**
- * No action should be taken.
- */
- NOTHING,
- /**
- * The dependency should be removed from the list of dependencies scanned.
- */
- REMOVE_DEPENDENCY
- }
- /**
- * Returns the post analysis action.
- * @return the post analysis action
- */
- PostAnalysisAction getPostAnalysisAction();
}
diff --git a/src/main/java/org/owasp/dependencycheck/data/cpe/CPEAnalyzer.java b/src/main/java/org/owasp/dependencycheck/data/cpe/CPEAnalyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/owasp/dependencycheck/data/cpe/CPEAnalyzer.java
+++ b/src/main/java/org/owasp/dependencycheck/data/cpe/CPEAnalyzer.java
@@ -512,12 +512,4 @@ public class CPEAnalyzer implements Analyzer {
public void initialize() throws Exception {
this.open();
}
- /**
- * Used to indicate if any steps should be taken after the analysis. The
- * abstract implementation returns NOTHING.
- * @return NOTHING
- */
- public PostAnalysisAction getPostAnalysisAction() {
- return PostAnalysisAction.NOTHING;
- }
}
diff --git a/src/main/java/org/owasp/dependencycheck/data/nvdcve/NvdCveAnalyzer.java b/src/main/java/org/owasp/dependencycheck/data/nvdcve/NvdCveAnalyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/owasp/dependencycheck/data/nvdcve/NvdCveAnalyzer.java
+++ b/src/main/java/org/owasp/dependencycheck/data/nvdcve/NvdCveAnalyzer.java
@@ -159,13 +159,4 @@ public class NvdCveAnalyzer implements Analyzer {
public void initialize() throws Exception {
this.open();
}
-
- /**
- * Used to indicate if any steps should be taken after the analysis. The
- * abstract implementation returns NOTHING.
- * @return NOTHING
- */
- public PostAnalysisAction getPostAnalysisAction() {
- return PostAnalysisAction.NOTHING;
- }
} | removed un-necassary functionality from all analyzers and the base engine
Former-commit-id: e6b<I>ff<I>ced2a<I>d<I>b<I>d<I>b<I>db | jeremylong_DependencyCheck | train |
9f38ef5600d9cf34ae54ae1b456e69426c7df9a4 | diff --git a/gem/lib/frank-cucumber/frankifier.rb b/gem/lib/frank-cucumber/frankifier.rb
index <HASH>..<HASH> 100644
--- a/gem/lib/frank-cucumber/frankifier.rb
+++ b/gem/lib/frank-cucumber/frankifier.rb
@@ -19,10 +19,15 @@ class Frankifier
decide_on_project
decide_on_target
report_project_and_target
+
+ check_target_build_configuration_is_valid!
+
say ''
add_linker_flag
+
say ''
add_library_search_path
+
save_changes
end
@@ -78,11 +83,11 @@ class Frankifier
setting_array = Array( build_settings_to_edit[build_setting] )
if setting_array.find{ |flag| flag.start_with? "$(FRANK_" }
- say "It appears that your Debug configuration's #{build_setting} build setting already include some FRANK setup. Namely: #{setting_array.inspect}. I won't change anything here."
+ say "It appears that your '#{@target_build_configuration}' configuration's #{build_setting} build setting already include some FRANK setup. Namely: #{setting_array.inspect}. I won't change anything here."
return
end
- say "Adding $(inherited) and $(#{entry_to_add}) to your Debug configuration's #{build_setting} build setting ..."
+ say "Adding $(inherited) and $(#{entry_to_add}) to your '#{@target_build_configuration}' configuration's #{build_setting} build setting ..."
setting_array.unshift "$(inherited)"
setting_array << "$(#{entry_to_add})"
setting_array.uniq! # mainly to avoid duplicate $(inherited) entries
@@ -91,6 +96,19 @@ class Frankifier
build_settings_to_edit[build_setting] = setting_array
end
+ def check_target_build_configuration_is_valid!
+ unless @target.build_configuration_list.build_configurations.object_named @target_build_configuration
+ say %Q|I'm trying to Frankify the '#{@target_build_configuration}' build configuration, but I don't see it that build configuration in your XCode target. Here's a list of the build configurations I see:|
+ @target.build_configuration_list.build_configurations.each do |bc|
+ say " '#{bc.name}'"
+ end
+ say ''
+ say %Q|Please specify one of those build configurations using the --build_configuration flag|
+ exit
+ end
+
+ end
+
def build_settings_to_edit
@_build_settings_to_edit ||= @target.build_configuration_list.build_settings(@target_build_configuration)
end
diff --git a/gem/lib/frank-cucumber/version.rb b/gem/lib/frank-cucumber/version.rb
index <HASH>..<HASH> 100644
--- a/gem/lib/frank-cucumber/version.rb
+++ b/gem/lib/frank-cucumber/version.rb
@@ -1,5 +1,5 @@
module Frank
module Cucumber
- VERSION = "0.9.6"
+ VERSION = "0.9.7"
end
end | Fail gracefully when we cannot find the expected XCode build configuration during frank setup` | moredip_Frank | train |
caa32bbe536c298643ab19cfda7477241fe152c2 | diff --git a/Helper/Xpath.php b/Helper/Xpath.php
index <HASH>..<HASH> 100644
--- a/Helper/Xpath.php
+++ b/Helper/Xpath.php
@@ -65,7 +65,7 @@ class Xpath
$literal = $selectorsHandler->xpathLiteral( $search );
return $selectorsHandler
- ->getSelector( 'named' )
+ ->getSelector( 'named_partial' )
->translateToXPath( array( $element, $literal ) );
} | Fix Mink <I> compatability issue | ezsystems_BehatBundle | train |
82270500358bcae4dffbbcd0bee2ad595f091687 | diff --git a/allauth/socialaccount/providers/facebook/static/facebook/js/fbconnect.js b/allauth/socialaccount/providers/facebook/static/facebook/js/fbconnect.js
index <HASH>..<HASH> 100644
--- a/allauth/socialaccount/providers/facebook/static/facebook/js/fbconnect.js
+++ b/allauth/socialaccount/providers/facebook/static/facebook/js/fbconnect.js
@@ -33,9 +33,9 @@
xfbml : true
});
allauth.facebook.login = function(nextUrl, action, process) {
- if (action == 'reauthenticate') {
- opts.loginOptions.auth_type = action;
- }
+ if (action == 'reauthenticate') {
+ opts.loginOptions.auth_type = action;
+ }
FB.login(function(response) {
if (response.authResponse) {
postForm(opts.loginByTokenUrl,
@@ -45,12 +45,17 @@
['expires_in', response.authResponse.expiresIn]]);
} else {
var next;
- if (response && response.status && response.status == "notConnected") {
+ if (response && response.status && ["not_authorized", "unknown"].indexOf(response.status) > -1) {
next = opts.cancelUrl;
} else {
next = opts.errorUrl;
}
- window.location.href = next;
+
+ if (typeof(next) == "function") {
+ next();
+ } else {
+ window.location.href = next;
+ }
}
}, opts.loginOptions);
}; | fixed facebook response.status values and added callback support for cancelUrl and errorUrl | pennersr_django-allauth | train |
1b37dd019973c932e21610da1098cb5221a1e760 | diff --git a/cppimport/import_hook.py b/cppimport/import_hook.py
index <HASH>..<HASH> 100644
--- a/cppimport/import_hook.py
+++ b/cppimport/import_hook.py
@@ -155,7 +155,10 @@ def build_plugin(full_module_name, filepath):
full_module_name,
sources = [temp_filepath],
language = 'c++',
- include_dirs = [pybind11.get_include()],
+ include_dirs = [
+ pybind11.get_include(),
+ pybind11.get_include(True)
+ ],
extra_compile_args = [
'-std=c++11', '-Wall', '-Werror'
]
@@ -212,6 +215,9 @@ def find_module_cpppath(modulename):
if not os.path.exists(d):
continue
+ if os.path.isfile(d):
+ continue
+
for f in os.listdir(d):
if f == modulefilename:
return os.path.join(d, f)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ setup(
zip_safe = False,
name = 'cppimport',
- version = '0.0.5',
+ version = '0.0.6',
description = 'Import C++ files directly from Python!',
long_description = """cppimport is a small import hook that determines whether there is a C++ source file that matches the requested module. If there is, the file is compiled as a Python extension using pybind11 and placed in the same folder as the C++ source file. Python is then able to find the module and load it.
""", | Fixed pybind<I> include path, and os.path.isfile for eggs | tbenthompson_cppimport | train |
b7413510162c9a41bc9db53fd06cdb3e03f1189b | diff --git a/Resources/public/js/utils/Array.js b/Resources/public/js/utils/Array.js
index <HASH>..<HASH> 100644
--- a/Resources/public/js/utils/Array.js
+++ b/Resources/public/js/utils/Array.js
@@ -16,4 +16,8 @@ Array.prototype.inArray = function(value) {
return true;
}
return false;
-}
\ No newline at end of file
+}
+
+Array.prototype.diff = function(part) {
+ return this.filter(function(element) {return part.indexOf(element) < 0;});
+};
\ No newline at end of file | [PortfolioBundle] Add new Array method to get diff with an other array | claroline_Distribution | train |
94e764ce9d6fdb3184a182fec196be7188a76dd9 | diff --git a/src/util/Profiler.php b/src/util/Profiler.php
index <HASH>..<HASH> 100755
--- a/src/util/Profiler.php
+++ b/src/util/Profiler.php
@@ -44,7 +44,7 @@ class Profiler
static $counter = 0;
// Create a unique token based on the counter
- $token = 'kp/' . \base_convert($counter++, 10, 32);
+ $token = 'kp/' . \base_convert((string)$counter++, 10, 32);
Profiler::$_marks[$token] = [
self::GROUP => \strtolower($group),
diff --git a/src/web/Block.php b/src/web/Block.php
index <HASH>..<HASH> 100755
--- a/src/web/Block.php
+++ b/src/web/Block.php
@@ -295,7 +295,7 @@ class Block
// Capture the view output
\ob_start();
- \ob_implicit_flush(0);
+ //\ob_implicit_flush(0);
try {
diff --git a/src/web/Controller.php b/src/web/Controller.php
index <HASH>..<HASH> 100755
--- a/src/web/Controller.php
+++ b/src/web/Controller.php
@@ -54,13 +54,14 @@ class Controller
if (\array_key_exists($name, $params)) {
$is_valid = true;
- if ($param->isArray()) {
+ $type = $param->getType();
+
+ if ($type && $type->getName() === 'array') {
$params[$name] = (array) $params[$name];
} elseif (\is_array($params[$name])) {
$is_valid = false;
} elseif (
- ($type = $param->getType()) !== null &&
- $type->isBuiltin() &&
+ $type && $type->isBuiltin() &&
($params[$name] !== null || !$type->allowsNull())
) {
$type_name = $type->getName();
diff --git a/src/web/ErrorHandler.php b/src/web/ErrorHandler.php
index <HASH>..<HASH> 100644
--- a/src/web/ErrorHandler.php
+++ b/src/web/ErrorHandler.php
@@ -72,7 +72,7 @@ class ErrorHandler extends \mii\core\ErrorHandler
{
$__params['handler'] = $this;
\ob_start();
- \ob_implicit_flush(0);
+ //\ob_implicit_flush(false);
\extract($__params, \EXTR_OVERWRITE);
require $__file;
return \ob_get_clean(); | Why we still need 'ob_implicit_flush(0)'? Commented it. And fixed some small php8 compatibility issues | levmorozov_mii | train |
04380c33d4e223e1c171191d8cd601e91fbd31a2 | diff --git a/src/Sulu/Bundle/ContactBundle/Controller/ActivityController.php b/src/Sulu/Bundle/ContactBundle/Controller/ActivityController.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/ContactBundle/Controller/ActivityController.php
+++ b/src/Sulu/Bundle/ContactBundle/Controller/ActivityController.php
@@ -29,6 +29,7 @@ use Hateoas\Representation\CollectionRepresentation;
use Sulu\Component\Rest\ListBuilder\DoctrineListBuilderFactory;
use Sulu\Component\Rest\ListBuilder\ListRepresentation;
use Sulu\Component\Rest\ListBuilder\FieldDescriptor\DoctrineFieldDescriptor;
+use Sulu\Component\Rest\ListBuilder\FieldDescriptor\DoctrineJoinDescriptor;
/**
* Makes activities available through a REST API
@@ -79,21 +80,30 @@ class ActivityController extends RestController implements ClassResourceInterfac
$this->fieldDescriptors['activityStatus'] = new DoctrineFieldDescriptor(
'name', 'activityStatus', self::$activityStatusEntityName,
array(
- self::$activityStatusEntityName => self::$entityName . '.activityStatus'
+ self::$activityStatusEntityName => new DoctrineJoinDescriptor(
+ self::$activityStatusEntityName,
+ self::$entityName . '.activityStatus'
+ )
)
);
$this->fieldDescriptors['activityPriority'] = new DoctrineFieldDescriptor(
'name', 'activityPriority', self::$activityPriorityEntityName,
array(
- self::$activityPriorityEntityName => self::$entityName . '.activityPriority'
+ self::$activityPriorityEntityName => new DoctrineJoinDescriptor(
+ self::$activityPriorityEntityName,
+ self::$entityName . '.activityPriority'
+ )
)
);
$this->fieldDescriptors['activityType'] = new DoctrineFieldDescriptor(
'name', 'activityType', self::$activityTypeEntityName,
array(
- self::$activityTypeEntityName => self::$entityName . '.activityType'
+ self::$activityTypeEntityName => new DoctrineJoinDescriptor(
+ self::$activityTypeEntityName,
+ self::$entityName . '.activityType'
+ )
)
);
@@ -101,7 +111,10 @@ class ActivityController extends RestController implements ClassResourceInterfac
$this->fieldDescriptors['account'] = new DoctrineFieldDescriptor(
'id', 'account', self::$accountEntityName,
array(
- self::$accountEntityName => self::$entityName . '.account'
+ self::$accountEntityName => new DoctrineJoinDescriptor(
+ self::$accountEntityName,
+ self::$entityName . '.account'
+ )
),
true
);
@@ -110,7 +123,10 @@ class ActivityController extends RestController implements ClassResourceInterfac
$this->fieldDescriptors['contact'] = new DoctrineFieldDescriptor(
'id', 'contact', self::$contactEntityName . 'contact',
array(
- self::$contactEntityName . 'contact' => self::$entityName . '.contact'
+ self::$contactEntityName . 'contact' => new DoctrineJoinDescriptor(
+ self::$contactEntityName . 'contact',
+ self::$entityName . '.contact'
+ )
),
true
);
@@ -119,7 +135,9 @@ class ActivityController extends RestController implements ClassResourceInterfac
$this->fieldDescriptors['assignedContact'] = new DoctrineFieldDescriptor(
'lastName', 'assignedContact', self::$contactEntityName . 'assignedContact',
array(
- self::$contactEntityName . 'assignedContact' => self::$entityName . '.assignedContact'
+ self::$contactEntityName . 'assignedContact' => new DoctrineJoinDescriptor(
+ self::$contactEntityName . 'assignedContact', self::$entityName . '.assignedContact'
+ )
)
);
} | refactored refactoring of refactoring | sulu_sulu | train |
cb784b1b4c5600b62b9b17ced1f39dcbfd7cee94 | diff --git a/lib/deep_cover/covered_code.rb b/lib/deep_cover/covered_code.rb
index <HASH>..<HASH> 100644
--- a/lib/deep_cover/covered_code.rb
+++ b/lib/deep_cover/covered_code.rb
@@ -1,6 +1,6 @@
module DeepCover
class CoveredCode
- attr_accessor :covered_source, :buffer, :executed, :binding, :tracker_global
+ attr_accessor :covered_source, :buffer, :binding, :tracker_global
@@counter = 0
def initialize(path: nil, source: nil, lineno: nil, tracker_global: '$_cov')
@@ -19,16 +19,14 @@ module DeepCover
end
def execute_code(binding: DeepCover::GLOBAL_BINDING.dup)
- return if @executed
- global = eval("#{tracker_global} ||= {}")
- @cover = global[nb] ||= Array.new(@tracker_count, 0) # The reason for the || is for the case of self coverage, where these are prepared in advance
- @executed = true
+ return if has_executed?
+ global[nb] = Array.new(@tracker_count, 0)
eval(@covered_source, binding, @buffer.name || '<raw_code>', @lineno || 1)
end
def cover
must_have_executed
- @cover
+ @cover ||= global[nb]
end
def line_coverage
@@ -110,9 +108,17 @@ module DeepCover
rewriter.process
end
+ def has_executed?
+ global[nb] != nil
+ end
+
protected
+ def global
+ eval("#{tracker_global} ||= {}")
+ end
+
def must_have_executed
- raise "cover not available, file wasn't executed" unless @executed
+ raise "cover not available, file wasn't executed" unless has_executed?
end
end
end | Deduce execution from existence of trackers | deep-cover_deep-cover | train |
b11896aceee5580ce70f0f53e92b3f31c5da5386 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,7 @@
### Enhancements
* Name for GH releases can be optional
+* Cached zip remote contains URL filename in path for better introspection
### Bug fixes
diff --git a/lib/torba/remote_sources/zip.rb b/lib/torba/remote_sources/zip.rb
index <HASH>..<HASH> 100644
--- a/lib/torba/remote_sources/zip.rb
+++ b/lib/torba/remote_sources/zip.rb
@@ -13,14 +13,11 @@ module Torba
class Zip
include Common
- attr_reader :url
+ attr_reader :url, :digest
def initialize(url)
@url = url
- end
-
- def digest
- Torba.digest(url)
+ @digest = "#{File.basename url, '.zip'}-#{Torba.digest(url)}"
end
private
diff --git a/test/remote_sources/zip_test.rb b/test/remote_sources/zip_test.rb
index <HASH>..<HASH> 100644
--- a/test/remote_sources/zip_test.rb
+++ b/test/remote_sources/zip_test.rb
@@ -7,7 +7,7 @@ module Torba
assert_equal "http://jquery.com/jquery.zip", remote.url
end
- def test_digest
+ def test_unique_digest
remote = RemoteSources::Zip.new("http://jquery.com/jquery.zip")
same_remote = RemoteSources::Zip.new("http://jquery.com/jquery.zip")
@@ -18,6 +18,11 @@ module Torba
refute_equal remote.digest, another_remote.digest
end
+ def test_digest_contains_filename
+ remote = RemoteSources::Zip.new("http://jquery.com/jquery.zip")
+ assert_match /^jquery-/, remote.digest
+ end
+
def test_404
exception = assert_raises(Errors::ShellCommandFailed) do
RemoteSources::Zip.new("http://jquery.com/jquery.zip")["*"] | Zip remote: #digest contains URL filename for better introspection | torba-rb_torba | train |
1d987fb655f2a9695b9d28d1bacb742112b6a7de | diff --git a/packages/babel-node/src/_babel-node.js b/packages/babel-node/src/_babel-node.js
index <HASH>..<HASH> 100644
--- a/packages/babel-node/src/_babel-node.js
+++ b/packages/babel-node/src/_babel-node.js
@@ -43,6 +43,10 @@ program.option(
"List of extensions to hook into [.es6,.js,.es,.jsx,.mjs]",
collect,
);
+program.option(
+ "--config-file [path]",
+ "Path to the babel config file to use. Defaults to working directory babel.config.js",
+);
program.option("-w, --plugins [string]", "", collect);
program.option("-b, --presets [string]", "", collect);
@@ -57,6 +61,7 @@ register({
only: program.only,
plugins: program.plugins,
presets: program.presets,
+ configFile: program.configFile,
});
const replPlugin = ({ types: t }) => ({ | Add --config-file option to babel-node, like babel-cli's option. | babel_babel | train |
c4cb8ec9791172b6fce84db9c1ee1212e1fabad7 | diff --git a/lib/rakwik/tracker.rb b/lib/rakwik/tracker.rb
index <HASH>..<HASH> 100644
--- a/lib/rakwik/tracker.rb
+++ b/lib/rakwik/tracker.rb
@@ -45,6 +45,15 @@ module Rakwik
}
header['Accept-Language'] = request.env['HTTP_ACCEPT_LANGUAGE'] unless request.env['HTTP_ACCEPT_LANGUAGE'].nil?
header['DNT'] = request.env['HTTP_DNT'] unless request.env['HTTP_DNT'].nil?
+
+ if c=request.cookies
+ # we'll forward piwik cookies only
+ c.delete_if{ |name, value| !(name =~ /^_pk_id\.|^_pk_ses\./) }
+ unless c.empty?
+ header['Cookie'] = c.map{|k,v| "#{k}=#{v}"}.join(';')
+ end
+ end
+
data = {
'idsite' => piwik_id,
'token_auth' => token_auth, | Forward piwik tracking cookies, if present in the request | datenimperator_rakwik | train |
8baebbb08ee1fa582ac2a7396e990363bcb176ab | diff --git a/luigi/worker.py b/luigi/worker.py
index <HASH>..<HASH> 100644
--- a/luigi/worker.py
+++ b/luigi/worker.py
@@ -75,8 +75,8 @@ class Worker(object):
wait_interval = config.getint('core', 'worker-wait-interval', 1)
self.__wait_interval = wait_interval
- self.__id = worker_id
- self.__scheduler = scheduler
+ self._id = worker_id
+ self._scheduler = scheduler
if (isinstance(scheduler, CentralPlannerScheduler)
and worker_processes != 1):
warnings.warn("Will only use one process when running with local in-process scheduler")
@@ -84,7 +84,7 @@ class Worker(object):
self.worker_processes = worker_processes
self.host = socket.gethostname()
- self.__scheduled_tasks = {}
+ self._scheduled_tasks = {}
# store the previous tasks executed by the same worker
# for debugging reasons
@@ -169,14 +169,17 @@ class Worker(object):
self._log_unexpected_error(task)
self._email_unexpected_error(task, formatted_traceback)
+ def _check_complete(self, task):
+ return task.complete()
+
def _add(self, task):
self._validate_task(task)
- if task.task_id in self.__scheduled_tasks:
+ if task.task_id in self._scheduled_tasks:
return [] # already scheduled
logger.debug("Checking if %s is complete", task)
is_complete = False
try:
- is_complete = task.complete()
+ is_complete = self._check_complete(task)
self._check_complete_value(is_complete)
except KeyboardInterrupt:
raise
@@ -193,7 +196,7 @@ class Worker(object):
if is_complete:
# Not submitting dependencies of finished tasks
- self.__scheduler.add_task(self.__id, task.task_id, status=DONE,
+ self._scheduler.add_task(self._id, task.task_id, status=DONE,
runnable=False)
task.trigger_event(Event.DEPENDENCY_PRESENT, task)
elif task.run == NotImplemented:
@@ -203,8 +206,8 @@ class Worker(object):
return []
def _add_external(self, external_task):
- self.__scheduled_tasks[external_task.task_id] = external_task
- self.__scheduler.add_task(self.__id, external_task.task_id, status=PENDING,
+ self._scheduled_tasks[external_task.task_id] = external_task
+ self._scheduler.add_task(self._id, external_task.task_id, status=PENDING,
runnable=False)
external_task.trigger_event(Event.DEPENDENCY_MISSING, external_task)
logger.warning('Task %s is not complete and run() is not implemented. Probably a missing external dependency.', external_task.task_id)
@@ -216,14 +219,14 @@ class Worker(object):
raise Exception('requires() must return Task objects')
def _add_task_and_deps(self, task):
- self.__scheduled_tasks[task.task_id] = task
+ self._scheduled_tasks[task.task_id] = task
deps = task.deps()
for d in deps:
self._validate_dependency(d)
task.trigger_event(Event.DEPENDENCY_DISCOVERED, task, d)
deps = [d.task_id for d in deps]
- self.__scheduler.add_task(self.__id, task.task_id, status=PENDING,
+ self._scheduler.add_task(self._id, task.task_id, status=PENDING,
deps=deps, runnable=True)
logger.info('Scheduled %s', task.task_id)
@@ -235,7 +238,7 @@ class Worker(object):
raise Exception("Return value of Task.complete() must be boolean (was %r)" % is_complete)
def _run_task(self, task_id):
- task = self.__scheduled_tasks[task_id]
+ task = self._scheduled_tasks[task_id]
logger.info('[pid %s] Running %s', os.getpid(), task_id)
try:
@@ -265,7 +268,7 @@ class Worker(object):
subject = "Luigi: %s FAILED" % task
notifications.send_error_email(subject, error_message)
- self.__scheduler.add_task(self.__id, task_id, status=status,
+ self._scheduler.add_task(self._id, task_id, status=status,
expl=error_message, runnable=None)
return status
@@ -288,7 +291,7 @@ class Worker(object):
def _get_work(self):
logger.debug("Asking scheduler for work...")
- r = self.__scheduler.get_work(worker=self.__id, host=self.host)
+ r = self._scheduler.get_work(worker=self._id, host=self.host)
# Support old version of scheduler
if isinstance(r, tuple) or isinstance(r, list):
n_pending_tasks, task_id = r | Minor refactoring of Worker to allow easier subclassing | spotify_luigi | train |
345ba2600762028f7b71f8cfff010fefbfe8728e | diff --git a/lib/log.rb b/lib/log.rb
index <HASH>..<HASH> 100644
--- a/lib/log.rb
+++ b/lib/log.rb
@@ -10,7 +10,7 @@ module ErnieBrodeur
@options = {}
@options[:utc] = true
- @options[:level] = ::Logger::WARN
+ @options[:level] = ::Logger::DEBUG
@options[:override_puts] = false
@options[:filename] = STDOUT
@@ -37,12 +37,12 @@ module ErnieBrodeur
end
def level(s)
- level = case s
- when 'fatal' then ::Logger::FATAL
- when 'error' then ::Logger::ERROR
- when 'warn' then ::Logger::WARN
- when 'info' then ::Logger::INFO
- when 'debug' then ::Logger::DEBUG
+ level = case s.to_sym
+ when :fatal then ::Logger::FATAL
+ when :error then ::Logger::ERROR
+ when :warn then ::Logger::WARN
+ when :info then ::Logger::INFO
+ when :debug then ::Logger::DEBUG
else ::Logger::UNKNOWN
end | Changed the default value of the loger to debug. Changed Log#level to take sym's or strings. | erniebrodeur_bini | train |
72b35313949619df494bb3eff27e55691cd9f917 | diff --git a/src/com/google/javascript/jscomp/parsing/IRFactory.java b/src/com/google/javascript/jscomp/parsing/IRFactory.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/parsing/IRFactory.java
+++ b/src/com/google/javascript/jscomp/parsing/IRFactory.java
@@ -185,8 +185,8 @@ class IRFactory {
static final String INVALID_ES5_STRICT_OCTAL =
"Octal integer literals are not supported in Ecmascript 5 strict mode.";
- static final String INVALID_NUMBER_LITERAL =
- "Invalid number literal.";
+ static final String INVALID_OCTAL_DIGIT =
+ "Invalid octal digit in octal literal.";
static final String STRING_CONTINUATION_ERROR =
"String continuations are not supported in this language mode.";
@@ -2878,7 +2878,6 @@ class IRFactory {
if (value.charAt(0) == '.') {
return Double.valueOf('0' + value);
} else if (value.charAt(0) == '0' && length > 1) {
- // TODO(johnlenz): accept octal numbers in es3 etc.
switch (value.charAt(1)) {
case '.':
case 'e':
@@ -2925,22 +2924,35 @@ class IRFactory {
}
case '0': case '1': case '2': case '3':
case '4': case '5': case '6': case '7':
- errorReporter.warning(INVALID_ES5_STRICT_OCTAL, sourceName,
- lineno(location.start), charno(location.start));
if (!inStrictContext()) {
double v = 0;
int c = 0;
while (++c < length) {
- v = (v * 8) + octaldigit(value.charAt(c));
+ char digit = value.charAt(c);
+ if (isOctalDigit(digit)) {
+ v = (v * 8) + octaldigit(digit);
+ } else {
+ errorReporter.error(INVALID_OCTAL_DIGIT, sourceName,
+ lineno(location.start), charno(location.start));
+ return 0;
+ }
}
+ errorReporter.warning(INVALID_ES5_STRICT_OCTAL, sourceName,
+ lineno(location.start), charno(location.start));
return v;
} else {
+ // TODO(tbreisacher): Make this an error instead of a warning.
+ errorReporter.warning(INVALID_ES5_STRICT_OCTAL, sourceName,
+ lineno(location.start), charno(location.start));
return Double.valueOf(value);
}
- default:
- errorReporter.error(INVALID_NUMBER_LITERAL, sourceName,
- lineno(location.start), charno(location.start));
+ case '8': case '9':
+ errorReporter.error(INVALID_OCTAL_DIGIT, sourceName,
+ lineno(location.start), charno(location.start));
return 0;
+ default:
+ throw new IllegalStateException(
+ "Unexpected character in number literal: " + value.charAt(1));
}
} else {
return Double.valueOf(value);
diff --git a/test/com/google/javascript/jscomp/parsing/NewParserTest.java b/test/com/google/javascript/jscomp/parsing/NewParserTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/parsing/NewParserTest.java
+++ b/test/com/google/javascript/jscomp/parsing/NewParserTest.java
@@ -1957,15 +1957,21 @@ public final class NewParserTest extends BaseJSTypeTestCase {
public void testInvalidOldStyleOctalLiterals() {
mode = LanguageMode.ECMASCRIPT3;
parseError("08;",
- "Invalid number literal.");
+ "Invalid octal digit in octal literal.");
+ parseError("01238;",
+ "Invalid octal digit in octal literal.");
mode = LanguageMode.ECMASCRIPT5;
parseError("08;",
- "Invalid number literal.");
+ "Invalid octal digit in octal literal.");
+ parseError("01238;",
+ "Invalid octal digit in octal literal.");
mode = LanguageMode.ECMASCRIPT6;
parseError("08;",
- "Invalid number literal.");
+ "Invalid octal digit in octal literal.");
+ parseError("01238;",
+ "Invalid octal digit in octal literal.");
}
public void testGetter() { | Avoid crashing on number literals that appear to be octal but contain an 8 or 9
-------------
Created by MOE: <URL> | google_closure-compiler | train |
296a048f0cf97902eb7e0867056c79d11c0b06dd | diff --git a/lib/moneta/adapters/cassandra.rb b/lib/moneta/adapters/cassandra.rb
index <HASH>..<HASH> 100644
--- a/lib/moneta/adapters/cassandra.rb
+++ b/lib/moneta/adapters/cassandra.rb
@@ -94,6 +94,12 @@ module Moneta
end
self
end
+
+ # (see Proxy#close)
+ def close
+ @backend.disconnect!
+ nil
+ end
end
end
end | disconnect cassandra in #close | moneta-rb_moneta | train |
21c06e753dd47cfd56e62a167a85d74841070679 | diff --git a/brokerlso/qmfv2.py b/brokerlso/qmfv2.py
index <HASH>..<HASH> 100644
--- a/brokerlso/qmfv2.py
+++ b/brokerlso/qmfv2.py
@@ -50,7 +50,7 @@ class RequestCmd:
:param name: Name of exchange to create
:type name: str
:param type_: Type of exchange to create
- Possible values are fanout, ...?
+ Possible values are: direct, fanout, topic?
:type type_: str
:param strict: Whether command should fail when unrecognized properties are provided
Not used by QMFv2
@@ -116,7 +116,9 @@ class RequestCmd:
"""
content = {"_object_id": {"_object_name": self.object_name},
"_method_name": "delete",
- "options": {"type": "queue", "name": name, "options": dict()}}
+ "_arguments": {"type": "queue",
+ "name": name,
+ "options": dict()}}
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties
@@ -131,7 +133,7 @@ class RequestCmd:
"""
content = {"_object_id": {"_object_name": self.object_name},
"_method_name": "delete",
- "options": {"type": "exchange", "name": name, "options": dict()}}
+ "_arguments": {"type": "exchange", "name": name, "options": dict()}}
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties
@@ -146,7 +148,7 @@ class RequestCmd:
"""
content = {"_object_id": {"_object_name": self.object_name},
"_method_name": "delete",
- "options": {"type": "binding", "name": name, "options": dict()}}
+ "_arguments": {"type": "binding", "name": name, "options": dict()}}
logger.debug("Message content -> {0}".format(content))
return content, self.method_properties
diff --git a/tests/test_request_binding.py b/tests/test_request_binding.py
index <HASH>..<HASH> 100644
--- a/tests/test_request_binding.py
+++ b/tests/test_request_binding.py
@@ -46,7 +46,7 @@ class TestRequestBinding:
expected_content = {"_object_id": {"_object_name": "org.apache.qpid.broker:broker:amqp-broker"},
"_method_name": "delete",
- "options": {"type": "binding", "name": binding, "options": dict()}}
+ "_arguments": {"type": "binding", "name": binding, "options": dict()}}
logger.debug("Expected content -> {0}".format(expected_content))
expected_properties = {"x-amqp-0-10.app-id": "qmf2", "qmf.opcode": "_method_request", "method": "request"}
diff --git a/tests/test_request_exchange.py b/tests/test_request_exchange.py
index <HASH>..<HASH> 100644
--- a/tests/test_request_exchange.py
+++ b/tests/test_request_exchange.py
@@ -48,7 +48,7 @@ class TestRequestExchange:
expected_content = {"_object_id": {"_object_name": "org.apache.qpid.broker:broker:amqp-broker"},
"_method_name": "delete",
- "options": {"type": "exchange", "name": exchange, "options": dict()}}
+ "_arguments": {"type": "exchange", "name": exchange, "options": dict()}}
logger.debug("Expected content -> {0}".format(expected_content))
expected_properties = {"x-amqp-0-10.app-id": "qmf2", "qmf.opcode": "_method_request", "method": "request"}
diff --git a/tests/test_request_queue.py b/tests/test_request_queue.py
index <HASH>..<HASH> 100644
--- a/tests/test_request_queue.py
+++ b/tests/test_request_queue.py
@@ -46,7 +46,7 @@ class TestRequestQueue:
expected_content = {"_object_id": {"_object_name": "org.apache.qpid.broker:broker:amqp-broker"},
"_method_name": "delete",
- "options": {"type": "queue", "name": queue, "options": dict()}}
+ "_arguments": {"type": "queue", "name": queue, "options": dict()}}
logger.debug("Expected content -> {0}".format(expected_content))
expected_properties = {"x-amqp-0-10.app-id": "qmf2", "qmf.opcode": "_method_request", "method": "request"} | Fix delete request content & update tests | codeghar_brokerlso | train |
d0efd35ce25a817415c17e82ee4d734c3f4e17d4 | diff --git a/mythril/mythril.py b/mythril/mythril.py
index <HASH>..<HASH> 100644
--- a/mythril/mythril.py
+++ b/mythril/mythril.py
@@ -335,9 +335,9 @@ class Mythril(object):
verbose_report=False, max_depth=12):
all_issues = []
+ if self.dynld and self.eth is None:
+ self.set_api_rpc_infura()
for contract in (contracts or self.contracts):
- if self.eth is None:
- self.set_api_rpc_infura()
sym = SymExecWrapper(contract, address,
dynloader=DynLoader(self.eth) if self.dynld else None,
max_depth=max_depth) | connect to infura with -l | ConsenSys_mythril-classic | train |
841dac5c2172e041988153b7a40763f924c6c6b7 | diff --git a/classes/Collectors.php b/classes/Collectors.php
index <HASH>..<HASH> 100644
--- a/classes/Collectors.php
+++ b/classes/Collectors.php
@@ -24,6 +24,7 @@ class QM_Collectors implements IteratorAggregate {
/**
* @return ArrayIterator<string, QM_Collector>
*/
+ #[\ReturnTypeWillChange]
public function getIterator() {
return new ArrayIterator( $this->items );
}
diff --git a/classes/Dispatchers.php b/classes/Dispatchers.php
index <HASH>..<HASH> 100644
--- a/classes/Dispatchers.php
+++ b/classes/Dispatchers.php
@@ -18,6 +18,7 @@ class QM_Dispatchers implements IteratorAggregate {
/**
* @return ArrayIterator<string, QM_Dispatcher>
*/
+ #[\ReturnTypeWillChange]
public function getIterator() {
return new ArrayIterator( $this->items );
}
diff --git a/collectors/db_queries.php b/collectors/db_queries.php
index <HASH>..<HASH> 100644
--- a/collectors/db_queries.php
+++ b/collectors/db_queries.php
@@ -137,7 +137,7 @@ class QM_Collector_DB_Queries extends QM_Collector {
$has_result = false;
$has_trace = false;
$i = 0;
- $request = trim( $wp_the_query->request );
+ $request = trim( $wp_the_query->request ? $wp_the_query->request : '' );
if ( method_exists( $db, 'remove_placeholder_escape' ) ) {
$request = $db->remove_placeholder_escape( $request );
diff --git a/collectors/request.php b/collectors/request.php
index <HASH>..<HASH> 100644
--- a/collectors/request.php
+++ b/collectors/request.php
@@ -177,7 +177,8 @@ class QM_Collector_Request extends QM_Collector {
if ( is_admin() ) {
if ( isset( $_SERVER['REQUEST_URI'] ) ) {
- $home_path = trim( parse_url( home_url(), PHP_URL_PATH ), '/' );
+ $path = parse_url( home_url(), PHP_URL_PATH );
+ $home_path = trim( $path ? $path : '', '/' );
$request = wp_unslash( $_SERVER['REQUEST_URI'] ); // phpcs:ignore
$this->data['request']['request'] = str_replace( "/{$home_path}/", '', $request ); | Avoid some deprecation notices in PHP <I>. | johnbillion_query-monitor | train |
2c10cc574446f0885a592d29e8cebba45d448ebb | diff --git a/andes/io/psse.py b/andes/io/psse.py
index <HASH>..<HASH> 100644
--- a/andes/io/psse.py
+++ b/andes/io/psse.py
@@ -143,21 +143,9 @@ def read(system, file):
return ret
-def read_add(system, file):
+def _read_dyr_dict(file):
"""
- Read an addition PSS/E dyr file.
-
- Parameters
- ----------
- system : System
- System instance to which data will be loaded
- file : str
- Path to the additional `dyr` file
-
- Returns
- -------
- bool
- data parsing status
+ Parse dyr file into a dict where keys are model names and values are dataframes.
"""
with open(file, 'r') as f:
input_list = [line.strip() for line in f]
@@ -190,6 +178,28 @@ def read_add(system, file):
dev_params_num = [([to_number(cell) for cell in row.split()]) for row in all_rows]
dyr_dict[psse_model] = pd.DataFrame(dev_params_num)
+ return dyr_dict
+
+
+def read_add(system, file):
+ """
+ Read an addition PSS/E dyr file.
+
+ Parameters
+ ----------
+ system : System
+ System instance to which data will be loaded
+ file : str
+ Path to the additional `dyr` file
+
+ Returns
+ -------
+ bool
+ data parsing status
+ """
+ dyr_dict = _read_dyr_dict(file)
+ system.dyr_dict = dyr_dict
+
# read yaml and set header for each pss/e model
dirname = os.path.dirname(__file__)
with open(f'{dirname}/psse-dyr.yaml', 'r') as f: | Separated dyr file-to-dict parser from the processor | cuihantao_andes | train |
0962a07717313da887a04f6f31346a022138742f | diff --git a/features/support/env.rb b/features/support/env.rb
index <HASH>..<HASH> 100644
--- a/features/support/env.rb
+++ b/features/support/env.rb
@@ -15,9 +15,11 @@ require "phantomjs/poltergeist"
Capybara.app = lambda { |env|
request_path = env["REQUEST_PATH"] || "/"
request_path = "/index.html" if request_path == "/"
-
- [200, {"Content-Type" => "text/html"},
- [File.read(File.join(File.dirname(__FILE__), "../../tmp/aruba/project/coverage", request_path))]]
+ [
+ 200,
+ {"Content-Type" => "text/html"},
+ [File.read(File.join(File.dirname(__FILE__), "../../tmp/aruba/project/coverage", request_path))],
+ ]
}
Capybara.default_driver = Capybara.javascript_driver = :poltergeist | Align the elements of an array literal if they span more than one line | colszowka_simplecov | train |
a69acfd7d55bc354efc1292fbbedef6407e590ae | diff --git a/lib/database_rewinder/active_record_monkey.rb b/lib/database_rewinder/active_record_monkey.rb
index <HASH>..<HASH> 100644
--- a/lib/database_rewinder/active_record_monkey.rb
+++ b/lib/database_rewinder/active_record_monkey.rb
@@ -3,15 +3,15 @@
module DatabaseRewinder
module InsertRecorder
module Execute
- module WithKwargs
- def execute(sql, *, **)
+ module NoKwargs
+ def execute(sql, *)
DatabaseRewinder.record_inserted_table self, sql
super
end
end
- module NoKwargs
- def execute(sql, *)
+ module WithKwargs
+ def execute(sql, *, **)
DatabaseRewinder.record_inserted_table self, sql
super
end | Consistency leads to readability and maintainability | amatsuda_database_rewinder | train |
a7fd4322ea621abca8172d23e55ae56109cf622f | diff --git a/src/main/java/org/workdocx/cryptolite/KeyWrapper.java b/src/main/java/org/workdocx/cryptolite/KeyWrapper.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/workdocx/cryptolite/KeyWrapper.java
+++ b/src/main/java/org/workdocx/cryptolite/KeyWrapper.java
@@ -59,7 +59,7 @@ public class KeyWrapper {
private static final String WRAP_ALGORITHM_ASYMMETRIC = "AES/ECB/PKCS7Padding";
/** The key size for the wrapping key: {@value #WRAP_KEY_SIZE}. */
- private static final int WRAP_KEY_SIZE = 128;
+ private static final int WRAP_KEY_SIZE = Keys.SYMMETRIC_KEY_SIZE;
/**
* The algorithm to use for password-based key derivation, which is used to generate the | Modified the KeyWrapper so that the key size it uses is obtained from
the key size for secret keys in Keys class. | davidcarboni_cryptolite-java | train |
5e33f5ff568b078c0fc83310f64a90f7dabccdf6 | diff --git a/lib/Thelia/Core/DependencyInjection/Compiler/RegisterHookListenersPass.php b/lib/Thelia/Core/DependencyInjection/Compiler/RegisterHookListenersPass.php
index <HASH>..<HASH> 100644
--- a/lib/Thelia/Core/DependencyInjection/Compiler/RegisterHookListenersPass.php
+++ b/lib/Thelia/Core/DependencyInjection/Compiler/RegisterHookListenersPass.php
@@ -87,7 +87,12 @@ class RegisterHookListenersPass implements CompilerPassInterface
if (method_exists($class, 'getSubscribedHooks')) {
foreach ($class::getSubscribedHooks() as $eventName => $attributesArray) {
- $events[] = array_merge($attributesArray, ['event' => $eventName]);
+ if (isset($attributesArray['type'])) {
+ $attributesArray = [$attributesArray];
+ }
+ foreach ($attributesArray as $attributes) {
+ $events[] = array_merge($attributes, ['event' => $eventName]);
+ }
}
} | Fix for multiple action on 1 hook (#<I>) | thelia_core | train |
27bd01343feaaf80b04f63c0871682651c8809bd | diff --git a/aioxmpp/node.py b/aioxmpp/node.py
index <HASH>..<HASH> 100644
--- a/aioxmpp/node.py
+++ b/aioxmpp/node.py
@@ -9,6 +9,8 @@ stream based on a presence setting is provided.
Using XMPP
==========
+.. autoclass:: AbstractClient
+
.. autoclass:: PresenceManagedClient
Connecting streams low-level
@@ -206,12 +208,42 @@ class AbstractClient:
provides functionality for connecting the xmlstream as well as signals
which indicate changes in the stream state.
+ The *jid* must be a :class:`~aioxmpp.structs.JID` for which to connect. The
+ *security_layer* is best created using the
+ :func:`~aioxmpp.security_layer.security_layer` function and must provide
+ authentication for the given *jid*.
+
+ The *negotiation_timeout* argument controls the :attr:`negotiation_timeout`
+ attribute.
+
+ If *loop* is given, it must be a :class:`asyncio.BaseEventLoop`
+ instance. If it is not given, the current event loop is used.
+
As a glue between the stanza stream and the XML stream, it also knows about
stream management and performs stream management negotiation. It is
specialized on client operations, which implies that it will try to keep
the stream alive as long as wished by the client.
- .. autoattribute:: local_jid
+ In general, there are no fatal errors (aside from stream negotiation
+ problems) which stop a :class:`AbstractClient` from working. It makes use
+ of stream management as far as possible and abstracts away the gritty low
+ level details. In general, it is sufficient to observe the
+ :attr:`on_stream_established` and :attr:`on_stream_destroyed` events, which
+ notify a user about when a stream becomes available and when it becomes
+ unavailable.
+
+ If authentication fails (or another stream negotiation error occurs), the
+ client fails and :attr:`on_failure` is fired. :attr:`running` becomes false
+ and the client needs to be re-started manually by calling :meth:`start`.
+
+
+ Controlling the client:
+
+ .. automethod:: start
+
+ .. automethod:: stop
+
+ .. autoattribute:: running
.. attribute:: negotiation_timeout = timedelta(seconds=60)
@@ -219,19 +251,17 @@ class AbstractClient:
of negotiating the stream. See the *negotiation_timeout* argument to
:func:`connect_secured_xmlstream`.
- .. attribute:: on_failure
-
- A :class:`~aioxmpp.callbacks.Signal` which is fired when the client
- fails and stops.
+ Connection information:
- .. autoattribute:: running
+ .. autoattribute:: established
+ .. autoattribute:: local_jid
- Exponential backoff on failure:
+ Exponential backoff on interruptions:
.. attribute:: backoff_start
- When connecting a stream fails due to connectivity issues (generic
+ When an underlying XML stream fails due to connectivity issues (generic
:class:`OSError` raised), exponential backoff takes place before
attempting to reconnect.
@@ -248,6 +278,28 @@ class AbstractClient:
The backoff time is capped to :attr:`backoff_cap`, to avoid having
unrealistically high values.
+ Signals:
+
+ .. attribute:: on_failure
+
+ A :class:`~aioxmpp.callbacks.Signal` which is fired when the client
+ fails and stops.
+
+ .. attribute:: on_stream_established
+
+ When the stream is established and resource binding took place, this
+ event is fired. It means that the stream can now be used for XMPP
+ interactions.
+
+ .. attribute:: on_stream_destroyed
+
+ This is called whenever a stream is destroyed. The conditions for this
+ are the same as for :attr:`.StanzaStream.on_stream_destroyed`.
+
+ This event can be used to know when to discard all state about the XMPP
+ connection, such as roster information.
+
+
"""
on_failure = callbacks.Signal()
@@ -273,6 +325,8 @@ class AbstractClient:
self._sm_id = None
self._sm_location = None
+ self._established = False
+
self.negotiation_timeout = negotiation_timeout
self.backoff_start = timedelta(seconds=1)
self.backoff_factor = 1.2
@@ -295,7 +349,9 @@ class AbstractClient:
def _stream_destroyed(self):
self._bind_task.cancel()
- self.on_stream_destroyed()
+ if self._established:
+ self._established = False
+ self.on_stream_destroyed()
def _on_bind_done(self, task):
try:
@@ -402,6 +458,7 @@ class AbstractClient:
self._local_jid = result.payload.jid
+ self._established = True
self.on_stream_established()
@asyncio.coroutine
@@ -428,6 +485,10 @@ class AbstractClient:
self._logger.error("stream failed: %s", exc)
finally:
self.stream.stop()
+ try:
+ yield from self.stream._task
+ except:
+ pass
@asyncio.coroutine
def _main(self):
@@ -462,6 +523,13 @@ class AbstractClient:
def start(self):
+ """
+ Start the client. If it is already :attr:`running`,
+ :class:`RuntimeError` is raised.
+
+ While the client is running, it will try to keep an XMPP connection
+ open to the server associated with :attr:`local_jid`.
+ """
if self.running:
raise RuntimeError("client already running")
@@ -472,8 +540,17 @@ class AbstractClient:
self._main_task.add_done_callback(self._on_main_done)
def stop(self):
+ """
+ Stop the client. This sends a signal to the clients main task which
+ makes it terminate.
+
+ It may take some cycles through the event loop to stop the client
+ task. To check whether the task has actually stopped, query
+ :attr:`running`.
+ """
if not self.running:
return
+
self._main_task.cancel()
# properties
@@ -498,8 +575,19 @@ class AbstractClient:
@property
def running(self):
+ """
+ true if the client is currently running, false otherwise.
+ """
return self._main_task is not None and not self._main_task.done()
+ @property
+ def established(self):
+ """
+ true if the stream is currently established (as defined in
+ :attr:`on_stream_established`) and false otherwise.
+ """
+ return self._established
+
class ClientStatus(Enum):
DISCONNECTED = 0 | Improve AbstractClient docs and fix minor issue in on_stream_destroyed | horazont_aioxmpp | train |
9f93bfab656538cae16ad32b783bb5e2d23e4a9b | diff --git a/src/interfaces/xmlcontenthandler.js b/src/interfaces/xmlcontenthandler.js
index <HASH>..<HASH> 100644
--- a/src/interfaces/xmlcontenthandler.js
+++ b/src/interfaces/xmlcontenthandler.js
@@ -64,7 +64,6 @@
*
* @method gpf.interfaces.IXmlContentHandler#startElement
* @param {String} qName Qualified name, [prefix:]localName
- * @param {String} [uri=""] Namespace URI associated with the name
* @param {Object} [attributes={}] attribute dictionary (string/string)
* @return {Promise} Resolved when ready
*/ | Designing the interface (#<I> & #<I>) | ArnaudBuchholz_gpf-js | train |
94c38361e73bdd8462e126035460673c4be1213d | diff --git a/job/controller/src/main/java/org/talend/esb/job/controller/internal/ESBProvider.java b/job/controller/src/main/java/org/talend/esb/job/controller/internal/ESBProvider.java
index <HASH>..<HASH> 100644
--- a/job/controller/src/main/java/org/talend/esb/job/controller/internal/ESBProvider.java
+++ b/job/controller/src/main/java/org/talend/esb/job/controller/internal/ESBProvider.java
@@ -22,8 +22,11 @@ package org.talend.esb.job.controller.internal;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import javax.annotation.Resource;
import javax.xml.namespace.QName;
import javax.xml.transform.Source;
+import javax.xml.ws.WebServiceContext;
+import javax.xml.ws.handler.MessageContext;
import org.apache.cxf.endpoint.Server;
import org.apache.cxf.jaxws.JaxWsServerFactoryBean;
@@ -42,6 +45,9 @@ class ESBProvider implements javax.xml.ws.Provider<javax.xml.transform.Source> {
private QName serviceName;
private QName portName;
+ @Resource
+ private WebServiceContext context;
+
public ESBProvider(String publishedEndpointUrl,
final QName serviceName,
final QName portName) {
@@ -89,27 +95,41 @@ class ESBProvider implements javax.xml.ws.Provider<javax.xml.transform.Source> {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
-
+
+ // TODO: add dynamic WebMethod
@Override
-// @javax.jws.WebMethod(operationName = "getCustomersByName", action = "http://talend.org/esb/service/job/invoke")
+ @javax.jws.WebMethod(operationName = "getCustomersByName", action = "http://talend.org/esb/service/job/invoke")
// @javax.jws.WebResult(name = "jobOutput", targetNamespace = "http://talend.org/esb/service/job",
// partName = "response")
public Source invoke(Source request) {
- //callbacks.get(key)
+ QName operationName = (QName)context.getMessageContext().get(MessageContext.WSDL_OPERATION);
+ RuntimeESBProviderCallback esbProviderCallback =
+ getESBProviderCallback(operationName.getLocalPart());
+ if(esbProviderCallback == null) {
+ throw new RuntimeException("Handler for operation '" + operationName.getLocalPart() + "' cannot be found");
+ }
try {
org.dom4j.io.DocumentResult docResult = new org.dom4j.io.DocumentResult();
- factory.newTransformer().transform(request,
- docResult);
- org.dom4j.Document requestDoc = docResult
- .getDocument();
- System.out.println("request: " +
- requestDoc.asXML());
+ factory.newTransformer().transform(request, docResult);
+ org.dom4j.Document requestDoc = docResult.getDocument();
+
+ System.out.println("request: " +requestDoc.asXML());
+ Object result = esbProviderCallback.invoke(requestDoc);
+
+ if(result instanceof org.dom4j.Document) {
+ return new org.dom4j.io.DocumentSource(
+ (org.dom4j.Document)result);
+ } else {
+ throw new RuntimeException("Provider return incompatible object: " + result.getClass().getName());
+ }
+
+ } catch (RuntimeException e) {
+ throw e;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
+ throw new RuntimeException(e);
}
-
- return null;
}
public RuntimeESBProviderCallback createESBProviderCallback(String operationName) { | job controller: added implementation for generic provider | Talend_tesb-rt-se | train |
7ac0f3f72a217fc36fd3c801242f011ec86e42cf | diff --git a/src/geshi.php b/src/geshi.php
index <HASH>..<HASH> 100644
--- a/src/geshi.php
+++ b/src/geshi.php
@@ -2061,7 +2061,7 @@ class GeSHi {
//All this formats are matched case-insensitively!
static $numbers_format = array(
GESHI_NUMBER_INT_BASIC =>
- '(?:(?<![0-9a-z_\.%])|(?<=\.\.))(?<![\d\.]e[+\-])([1-9]\d*?|0)(?![0-9a-z]|\.(?:[eE][+\-]?)?\d)',
+ '(?:(?<![0-9a-z_\.%$@])|(?<=\.\.))(?<![\d\.]e[+\-])([1-9]\d*?|0)(?![0-9a-z]|\.(?:[eE][+\-]?)?\d)',
GESHI_NUMBER_INT_CSTYLE =>
'(?<![0-9a-z_\.%])(?<![\d\.]e[+\-])([1-9]\d*?|0)l(?![0-9a-z]|\.(?:[eE][+\-]?)?\d)',
GESHI_NUMBER_BIN_SUFFIX =>
@@ -3444,6 +3444,7 @@ class GeSHi {
// Highlight numbers. As of 1.0.8 we support different types of numbers
$numbers_found = false;
+
if ($this->lexic_permissions['NUMBERS'] && preg_match($this->language_data['PARSER_CONTROL']['NUMBERS']['PRECHECK_RX'], $stuff_to_parse )) {
$numbers_found = true; | fix: Disallow $ and @ before integer literals (compatibility with MOS <I>/<I>k asm files) | GeSHi_geshi-1.0 | train |