hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
567f8f46eb85909fe540a1ca9ad8cd62d52e8c71
diff --git a/pdb.py b/pdb.py index <HASH>..<HASH> 100644 --- a/pdb.py +++ b/pdb.py @@ -704,8 +704,8 @@ class Pdb(pdb.Pdb, ConfigurableClass, object): and (cmd in self.curframe.f_globals or cmd in self.curframe_locals) ) or arg.startswith("="): cmd, arg, newline = None, None, line - elif cmd == "list" and arg.startswith("("): - # heuristic: handle "list(..." as the builtin. + elif arg.startswith("(") and cmd in ("list", "next"): + # heuristic: handle "list(...", "next(..." etc as builtin. cmd, arg, newline = None, None, line # Fix cmd to not be None when used in completions. diff --git a/testing/test_pdb.py b/testing/test_pdb.py index <HASH>..<HASH> 100644 --- a/testing/test_pdb.py +++ b/testing/test_pdb.py @@ -1071,6 +1071,8 @@ def test_parseline_with_existing_command(): (None, None, 'a = ') # print(pdb.local.GLOBAL_PDB.parseline("list()")) (None, None, 'list()') +# print(pdb.local.GLOBAL_PDB.parseline("next(my_iter)")) +(None, None, 'next(my_iter)') # c 42 # cont
parseline: handle `next()` as builtin also (#<I>) Without this `next(foo)` would call the `do_next` command.
antocuni_pdb
train
5dc1299e9e6808ab20e3b0d8b50b4a8a364b7cea
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,8 @@ #### Additions * `AIXM::Component::FATO` -* `AIXM::Component::Helipad#helicopter_class` and `AIXM::Component::Helipad#marking` +* `AIXM::Component::Helipad#helicopter_class` and `AIXM::Component::Helipad#marking` +* `AIXM::XY#seconds?` to detect possibly rounded or estimated coordinates #### Changes * Generate `Airport#id` from region and `Airport#name` diff --git a/lib/aixm/xy.rb b/lib/aixm/xy.rb index <HASH>..<HASH> 100644 --- a/lib/aixm/xy.rb +++ b/lib/aixm/xy.rb @@ -69,6 +69,12 @@ module AIXM end end + # @return [Boolean] +false+ if both longitude and latitude have zero DMS + # seconds which may indicate rounded or estimated coordinates + def seconds? + !(long.to_dms[-6,5].to_f.zero? && lat.to_dms[-6,5].to_f.zero?) + end + # @return [AIXM::Component::Geometry::Point] convert to point def to_point AIXM.point(xy: self) diff --git a/spec/lib/aixm/xy_spec.rb b/spec/lib/aixm/xy_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/aixm/xy_spec.rb +++ b/spec/lib/aixm/xy_spec.rb @@ -111,6 +111,17 @@ describe AIXM::XY do end end + describe :seconds? do + it "must detect coordinates with zero DMS seconds" do + AIXM.xy(lat: %q(44°33'00"N), long: %q(004°03'00"E)).wont_be :seconds? + AIXM.xy(lat: %q(44°33'00.01"N), long: %q(004°03'00"E)).must_be :seconds? + AIXM.xy(lat: %q(44°33'00"N), long: %q(004°03'00.01"E)).must_be :seconds? + AIXM.xy(lat: %q(47°29'10"N), long: %q(000°33'15"W)).must_be :seconds? + AIXM.xy(lat: %q(44°36'50"N), long: %q(004°23'50"E)).must_be :seconds? + AIXM.xy(lat: %q(44°48'00"N), long: %q(000°34'27"W)).must_be :seconds? + end + end + describe :to_point do subject do AIXM.xy(lat: %q(44°00'07.63"N), long: %q(004°45'07.81"E))
AIXM::XY#seconds? to detect possibly rounded or estimated coordinates
svoop_aixm
train
b455b2673cf743ea9b4e7ca85f0425d2aaad2f8e
diff --git a/UserAgentParserEnhanced.php b/UserAgentParserEnhanced.php index <HASH>..<HASH> 100644 --- a/UserAgentParserEnhanced.php +++ b/UserAgentParserEnhanced.php @@ -335,17 +335,17 @@ class UserAgentParserEnhanced protected function getOsRegexes() { - return Spyc::YAMLLoad(__DIR__ . self::$regexesDir . self::$osRegexesFile); + return Spyc::YAMLLoad(dirname(__FILE__) . self::$regexesDir . self::$osRegexesFile); } protected function getBrowserRegexes() { - return Spyc::YAMLLoad(__DIR__ . self::$regexesDir . self::$browserRegexesFile); + return Spyc::YAMLLoad(dirname(__FILE__) . self::$regexesDir . self::$browserRegexesFile); } protected function getMobileRegexes() { - return Spyc::YAMLLoad(__DIR__ . self::$regexesDir . self::$mobileRegexesFile); + return Spyc::YAMLLoad(dirname(__FILE__) . self::$regexesDir . self::$mobileRegexesFile); } public function parse()
DeviceDetection compatibility with PHP <I>
matomo-org_device-detector
train
c5853d515ff04ba72d2fc950588c2da1b467f074
diff --git a/core/src/main/java/cj/restspecs/core/RestSpec.java b/core/src/main/java/cj/restspecs/core/RestSpec.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/cj/restspecs/core/RestSpec.java +++ b/core/src/main/java/cj/restspecs/core/RestSpec.java @@ -42,6 +42,7 @@ import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.*; +import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.io.IOUtils; import org.codehaus.jackson.JsonNode; @@ -194,7 +195,8 @@ public class RestSpec { } } - return ""; + String parameterNotFoundMessage = String.format("Parameter name '%s' not found in specification.", parameterName); + throw new RuntimeException(parameterNotFoundMessage); } public List<String> queryParameterValues(String parameterName) { @@ -215,7 +217,12 @@ public class RestSpec { } } - return values; + if (values.isEmpty()) { + String parameterNotFoundMessage = String.format("Parameter name '%s' not found in specification.", parameterName); + throw new RuntimeException(parameterNotFoundMessage); + } else { + return values; + } } /** diff --git a/core/src/test/java/cj/restspecifications/core/RestSpecTest.java b/core/src/test/java/cj/restspecifications/core/RestSpecTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/cj/restspecifications/core/RestSpecTest.java +++ b/core/src/test/java/cj/restspecifications/core/RestSpecTest.java @@ -390,4 +390,24 @@ public class RestSpecTest { assertThat(spec.queryParameterNames(), equalTo(asList("answer"))); assertThat(spec.queryParameterValues("answer"), equalTo(asList("yes", "no", "maybe"))); } + + @Test + public void whenAskingForAQueryParameterNameThatDoesNotExistThrowAnExceptionBecauseYouProbablyHaveATestLogicError() { + String badSpecJson = "{ \"url\": \"/spelling?mistake=not-me\" }"; + RestSpec spec = new RestSpec("badSpecJson", new StringLoader(badSpecJson)); + + try { + spec.queryParameterValue("misteak"); + fail("Should have thrown an exception invoking queryParameterValue because the parameter 'misteak' does not exist"); + } catch (RuntimeException expected) { + assertThat(expected.getMessage(), equalTo("Parameter name 'misteak' not found in specification.")); + } + + try { + spec.queryParameterValues("misstake"); + fail("Should have thrown an exception invoking queryParameterValues because the parameter 'misstake' does not exist"); + } catch (RuntimeException expected) { + assertThat(expected.getMessage(), equalTo("Parameter name 'misstake' not found in specification.")); + } + } }
When one queries for a parameter name in their test, and it does not exist, we will throw an exception because most likely there is a typo in the test. (pair [dsiefert kramakrishnan])
cjdev_rest-specs
train
161db94caded01ce6633c385e291799f22ca242e
diff --git a/jaide/core.py b/jaide/core.py index <HASH>..<HASH> 100644 --- a/jaide/core.py +++ b/jaide/core.py @@ -116,7 +116,7 @@ class Jaide(): self._scp = "" self.conn_type = connect self._in_cli = False - self._filename = "" + self._filename = None # make the connection to the device if connect: self.connect() @@ -452,7 +452,8 @@ class Jaide(): (float(sent) / float(size) * 100), path.normpath(filename)) output += (' ' * (120 - len(output))) if filename != self._filename: - print('') + if self._filename is not None: + print('') self._filename = filename print(output, end='\r') @@ -767,8 +768,10 @@ class Jaide(): @type src: str @param dest: destination string of where to put the file(s)/dir @type dest: str - @param progress: set to true to have the progress callback be - | returned as the operation is copying. + @param progress: set to `True` to have the progress callback be + | printed as the operation is copying. Can also pass + | a function pointer to handoff the progress callback + | elsewhere. @type progress: bool @param preserve_times: Set to false to have the times of the copied | files set at the time of copy. @@ -777,10 +780,17 @@ class Jaide(): @rtype: bool """ # set up the progress callback if they want to see the process - self._scp._progress = self._copy_status if progress else None + if progress is True: + self._scp._progress = self._copy_status + # redirect to another function + elif hasattr(progress, '__call__'): + self._scp._progress = progress + else: # no progress callback + self._scp._progress = None # retrieve the file(s) self._scp.get(src, dest, recursive=True, preserve_times=preserve_times) - return True + self._filename = None + return False @check_instance def scp_push(self, src, dest, progress=False, preserve_times=True): @@ -790,8 +800,10 @@ class Jaide(): @type src: str @param dest: destination string of where to put the file(s)/dir @type dest: str - @param progress: set to true to have the progress callback be - | returned as the operation is copying. + @param progress: set to `True` to have the progress callback be + | printed as the operation is copying. Can also pass + | a function pointer to handoff the progress callback + | elsewhere. @type progress: bool @param preserve_times: Set to false to have the times of the copied | files set at the time of copy. @@ -800,10 +812,17 @@ class Jaide(): @rtype: bool """ # set up the progress callback if they want to see the process - self._scp._progress = self._copy_status if progress else None + if progress is True: + self._scp._progress = self._copy_status + # redirect to another function + elif hasattr(progress, '__call__'): + self._scp._progress = progress + else: # no progress callback + self._scp._progress = None # push the file(s) self._scp.put(src, dest, recursive=True, preserve_times=preserve_times) - return True + self._filename = None + return False @check_instance def shell_cmd(self, command=""):
SCP function progress callback update SCP functions can now accept a function pointer as the progress parameter, to redirect the callback to a user defined location, rather than to the _copy_status() function.
NetworkAutomation_jaide
train
9ea62e0c9f64321d56f5e03c5b10d3c5ec771ff5
diff --git a/umbra/ui/highlighters.py b/umbra/ui/highlighters.py index <HASH>..<HASH> 100644 --- a/umbra/ui/highlighters.py +++ b/umbra/ui/highlighters.py @@ -29,7 +29,7 @@ import foundations.common import foundations.core as core import foundations.dataStructures import foundations.exceptions -from foundations.dag import AbstractCompositeNode +from foundations.nodes import AbstractCompositeNode from umbra.globals.constants import Constants from umbra.ui.nodes import DefaultNode from umbra.ui.nodes import FormatNode diff --git a/umbra/ui/models.py b/umbra/ui/models.py index <HASH>..<HASH> 100644 --- a/umbra/ui/models.py +++ b/umbra/ui/models.py @@ -35,7 +35,7 @@ import foundations.core as core import foundations.exceptions import foundations.strings as strings import umbra.ui.nodes -from foundations.dag import AbstractCompositeNode +from foundations.nodes import AbstractCompositeNode from umbra.globals.constants import Constants #********************************************************************************************************************** diff --git a/umbra/ui/nodes.py b/umbra/ui/nodes.py index <HASH>..<HASH> 100644 --- a/umbra/ui/nodes.py +++ b/umbra/ui/nodes.py @@ -25,8 +25,8 @@ from PyQt4.QtCore import Qt #********************************************************************************************************************** import foundations.core as core import foundations.exceptions -from foundations.dag import AbstractCompositeNode -from foundations.dag import Attribute +from foundations.nodes import AbstractCompositeNode +from foundations.nodes import Attribute from umbra.globals.constants import Constants #**********************************************************************************************************************
Implement "Foundations" package changes.
KelSolaar_Umbra
train
ece0cde51e34698a5f172e426e139988151a0289
diff --git a/richtextfx/src/main/java/org/fxmisc/richtext/skin/StyledTextAreaBehavior.java b/richtextfx/src/main/java/org/fxmisc/richtext/skin/StyledTextAreaBehavior.java index <HASH>..<HASH> 100644 --- a/richtextfx/src/main/java/org/fxmisc/richtext/skin/StyledTextAreaBehavior.java +++ b/richtextfx/src/main/java/org/fxmisc/richtext/skin/StyledTextAreaBehavior.java @@ -424,7 +424,8 @@ public class StyledTextAreaBehavior implements Behavior { private void firstLeftPress(CharacterHit hit) { clearTargetCaretOffset(); IndexRange selection = area.getSelection(); - if(selection.getLength() != 0 && + if(area.isEditable() && + selection.getLength() != 0 && hit.getCharacterIndex() >= selection.getStart() && hit.getCharacterIndex() < selection.getEnd()) { // press inside selection
Disallow editing by selection dragging when not editable. Fixes #<I>.
FXMisc_RichTextFX
train
5f982e11345b11b295a73359706de0c452f02222
diff --git a/packages/aws-amplify-vue/__tests__/SignUp.test.js b/packages/aws-amplify-vue/__tests__/SignUp.test.js index <HASH>..<HASH> 100644 --- a/packages/aws-amplify-vue/__tests__/SignUp.test.js +++ b/packages/aws-amplify-vue/__tests__/SignUp.test.js @@ -82,7 +82,7 @@ describe('SignUp', () => { label: 'Username', key: 'username', required: true, - type: 'string', + type: 'text', displayOrder: 1, }, { @@ -96,7 +96,7 @@ describe('SignUp', () => { label: 'Email', key: 'email', required: true, - type: 'string', + type: 'text', displayOrder: 3, }, {
fix(@aws-amplify/aws-amplify-vue): Change the tests to check for the HTML valid attribute value 'text' rather than 'string'
aws-amplify_amplify-js
train
6ea0590805ec24a6bc25165e694419121f5da891
diff --git a/Core/Bundle.php b/Core/Bundle.php index <HASH>..<HASH> 100644 --- a/Core/Bundle.php +++ b/Core/Bundle.php @@ -374,6 +374,9 @@ class Bundle extends \Asgard\Core\BundleLoader { $migrationCreate = new \Asgard\Migration\Commands\CreateCommand($container['kernel']['root'].'/migrations'); $container['console']->add($migrationCreate); + $compile = new \Asgard\Core\Commands\CompileCommand($container['config']['compile'], $container['kernel']->getCompiledFile()); + $container['console']->add($compile); + $showEnv = new \Asgard\Core\Commands\ShowEnvironmentCommand($container['kernel']); $container['console']->add($showEnv); diff --git a/Core/Commands/CompileCommand.php b/Core/Commands/CompileCommand.php index <HASH>..<HASH> 100644 --- a/Core/Commands/CompileCommand.php +++ b/Core/Commands/CompileCommand.php @@ -17,6 +17,27 @@ class CompileCommand extends \Asgard\Console\Command { * {@inheritDoc} */ protected $description = 'Compile classes into one file for better performance'; + /** + * Flag to compile classes. + * @var boolean + */ + protected $compile; + /** + * Compiled classes file path. + * @var string + */ + protected $compiledClassesFile; + + /** + * Constructor. + * @param boolean $compile + * @param string $compiledClassesFile + */ + public function __construct($compile, $compiledClassesFile) { + $this->compile = $compile; + $this->compiledClassesFile = $compiledClassesFile; + parent::__construct(); + } /** * {@inheritDoc} @@ -25,7 +46,12 @@ class CompileCommand extends \Asgard\Console\Command { $this->getApplication()->add(new \ClassPreloader\Command\PreCompileCommand); $container = $this->getContainer(); - $outputPath = $container['kernel']['root'].'/storage/compiled.php'; + if(!$this->compile) { + $this->comment('Do no compile classes because of configuration (compile).'); + return; + } + + $outputPath = $this->compiledClassesFile; $classes = require __DIR__.'/compile/classes.php'; @@ -34,5 +60,6 @@ class CompileCommand extends \Asgard\Console\Command { '--output' => $outputPath, '--strip_comments' => 1, ]); + $this->info('Classes have been compiled into: '.$outputPath.'.'); } } \ No newline at end of file diff --git a/Core/Kernel.php b/Core/Kernel.php index <HASH>..<HASH> 100644 --- a/Core/Kernel.php +++ b/Core/Kernel.php @@ -44,6 +44,11 @@ class Kernel implements \ArrayAccess { * @var array */ protected $onShutdown; + /** + * Compiled classes file path. + * @var string + */ + protected $compiledFile; /** * Constructor. @@ -143,14 +148,16 @@ class Kernel implements \ArrayAccess { register_shutdown_function([$this, 'shutdownFunction']); $this->addShutdownCallback([$errorHandler, 'shutdownFunction']); - if($this->getEnv() === 'prod' && file_exists($this->params['root'].'/storage/compiled.php')) - include_once $this->params['root'].'/storage/compiled.php'; + $compiledFile = $this->getCompiledFile(); + if($compiledFile && file_exists($compiledFile)) + include_once $compiledFile; return $this; } /** * Load the bundles. + * @return Kernel $this */ public function loadBundles() { if($this->loaded) @@ -454,4 +461,23 @@ class Kernel implements \ArrayAccess { return; return $this->params[$offset]; } + + /** + * Set the compiled classes file. + * @return Kernel $this + */ + public function setCompiledFile($compiledFile) { + $this->compiledFile = $compiledFile; + return $this; + } + + /** + * Get the compiled classes file. + * @return string + */ + public function getCompiledFile() { + if($this->compiledFile === null) + $this->compiledFile = $this->params['root'].'/storage/compiled.php';#default path + return $this->compiledFile; + } } \ No newline at end of file
core: add a parameter for compiled classes file path and a configurable option to compile them
asgardphp_asgard
train
d2afc34ba6efe60f9123b03c72f3f7535c83c2a4
diff --git a/validate.js b/validate.js index <HASH>..<HASH> 100644 --- a/validate.js +++ b/validate.js @@ -377,6 +377,7 @@ function checkPathItem(pathItem, openapi, options) { contextAppend(options, o); var op = pathItem[o]; if (o === '$ref') { + should(op).be.ok(); op.should.have.type('string'); should(op.startsWith('#/')).equal(false,'PathItem $refs must be external'); }
validate; hardening last change against nulls i.e. incorrectly quoted $refs looking like yaml comments.
wework_speccy
train
c1594c68bc3ae3a94c824ff2a71033331d30d5a6
diff --git a/pypot/creatures/ik.py b/pypot/creatures/ik.py index <HASH>..<HASH> 100644 --- a/pypot/creatures/ik.py +++ b/pypot/creatures/ik.py @@ -66,7 +66,21 @@ class IKChain(Chain): return self.forward_kinematics(angles)[:3, 0] @property + def pose(self): + """ + Gives the 4x4 affine transformation matrix of the current position + *Used for debug* + :return: 4x4 affine transformation matrix (float) + """ + angles = self.convert_to_ik_angles(self.joints_position) + return self.forward_kinematics(angles) + + @property def rpy(self): + """ + Gives the rpy values of the current position + :return: roll, pitch, yaw (float) + """ angles = self.convert_to_ik_angles(self.joints_position) R = self.forward_kinematics(angles) yaw = arctan2(R[2][1], R[1][1]) @@ -74,8 +88,14 @@ class IKChain(Chain): roll = arctan2(R[3][2], R[3][3]) return roll, pitch, yaw - @staticmethod - def rpy_to_rotation_matrix(r, p, y): + def rpy_to_rotation_matrix(self, r, p, y): + """ + converts rpy to a 3x3 rotation matrix + :param r: roll (float) + :param p: pitch (float) + :param y: yaw (float) + :return: 3x3 rotation matrix + """ return rpy_matrix(r, p, y) def goto(self, position, orientation, duration, wait=False, accurate=False): diff --git a/pypot/server/rest.py b/pypot/server/rest.py index <HASH>..<HASH> 100644 --- a/pypot/server/rest.py +++ b/pypot/server/rest.py @@ -7,7 +7,6 @@ from pathlib import Path class RESTRobot(object): - """ REST API for a Robot. Through the REST API you can currently access: @@ -214,12 +213,26 @@ class RESTRobot(object): return False def ik_endeffector(self, chain): + """ + Gives position & orientation of the end effector + :param chain: name of the IK chain + :return: tuple of strings for position & orientation ("x,y,z", "Rx.x,Rx.y,Rx.z") + """ c = getattr(self.robot, chain) position = ','.join(map(str, list(round(c.position, 4)))) orientation = ','.join(map(str, list(round(c.orientation, 4)))) return position, orientation def ik_goto(self, chain, xyz, rot, duration, wait=False): + """ + goto a position defined by a xyz and/or an orientation + :param chain: name of the IK chain + :param xyz: cartesian coordinates (list of floats, in m) + :param rot: [Rx.x, Rx.y, Rx.z] (see https://www.brainvoyager.com/bv/doc/UsersGuide/CoordsAndTransforms/SpatialTransformationMatrices.html) + :param duration: duration of the movement (float, in s) + :param wait: do we wait the end of the move before giving the answer ? (boolean) + :return: Gives position & orientation of the end effector after the move + """ c = getattr(self.robot, chain) c.goto(xyz, rot, duration, wait) return self.ik_endeffector(chain)
feat(IK): Added comments Ik files were lacking comments to be read by other developers.
poppy-project_pypot
train
2441c168bf16618ec3d750c2b0ef6f2cbe5f9158
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,10 @@ import sys from os.path import dirname, join -from pip.req import parse_requirements +try: + from pip._internal.req import parse_requirements +except ImportError: + from pip.req import parse_requirements from setuptools import ( find_packages,
fix bug with pip <I>
ricequant_rqalpha
train
0decd46dd069285e95278bc4f6f2a50fd4ba0e3a
diff --git a/savannaclient/api/shell.py b/savannaclient/api/shell.py index <HASH>..<HASH> 100644 --- a/savannaclient/api/shell.py +++ b/savannaclient/api/shell.py @@ -13,8 +13,11 @@ # License for the specific language governing permissions and limitations # under the License. +import argparse +import inspect import json from savannaclient.nova import utils +import sys def _print_list_field(field): @@ -266,7 +269,7 @@ def do_cluster_delete(cs, args): # # node-group-template-show --name <template>|--id <template_id> [--json] # -# TODO(mattf): node-group-template-create +# node-group-template-create [--json <file>] # # node-group-template-delete --name <template>|--id <template_id> # @@ -301,6 +304,22 @@ def do_node_group_template_show(cs, args): _show_node_group_template(template) +@utils.arg('--json', + default=sys.stdin, + type=argparse.FileType('r'), + help='JSON representation of node group template') +def do_node_group_template_create(cs, args): + """Create a node group template.""" + # TODO(mattf): improve template validation, e.g. template w/o name key + template = json.loads(args.json.read()) + valid_args = inspect.getargspec(cs.node_group_templates.create).args + for name in template.keys(): + if name not in valid_args: + # TODO(mattf): make this verbose - bug/1271147 + del template[name] + _show_node_group_template(cs.node_group_templates.create(**template)) + + # TODO(mattf): Add --name #@utils.arg('--name', # metavar='<template>',
Add node-group-template-create to CLI node-group-template-create takes a json data structure and passes it on to the Savanna API. The data structure is read from stdin by default or from a file provided by the --json parameter. Change-Id: I9f<I>df<I>d<I>e<I>e<I>ccafee<I>c<I>c9d Implements: blueprint python-savannaclient-cli
openstack_python-saharaclient
train
7e5c64ca7fddc672339a70229042d7d00933d3a2
diff --git a/provisioner/salt-masterless/provisioner.go b/provisioner/salt-masterless/provisioner.go index <HASH>..<HASH> 100644 --- a/provisioner/salt-masterless/provisioner.go +++ b/provisioner/salt-masterless/provisioner.go @@ -5,11 +5,10 @@ package saltmasterless import ( "errors" "fmt" - "github.com/mitchellh/mapstructure" + "github.com/mitchellh/packer/builder/common" "github.com/mitchellh/packer/packer" "os" "path/filepath" - "sort" "strings" ) @@ -17,7 +16,7 @@ var Ui packer.Ui const DefaultTempConfigDir = "/tmp/salt" -type config struct { +type Config struct { // If true, run the salt-bootstrap script SkipBootstrap bool `mapstructure:"skip_bootstrap"` BootstrapArgs string `mapstructure:"bootstrap_args"` @@ -30,52 +29,29 @@ type config struct { } type Provisioner struct { - config config + config Config } func (p *Provisioner) Prepare(raws ...interface{}) error { - var md mapstructure.Metadata - decoderConfig := &mapstructure.DecoderConfig{ - Metadata: &md, - Result: &p.config, - } - - decoder, err := mapstructure.NewDecoder(decoderConfig) + md, err := common.DecodeConfig(&p.config, raws...) if err != nil { return err } - for _, raw := range raws { - err := decoder.Decode(raw) - if err != nil { - return err - } + if p.config.TempConfigDir == "" { + p.config.TempConfigDir = DefaultTempConfigDir } // Accumulate any errors - errs := make([]error, 0) - - // Unused keys are errors - if len(md.Unused) > 0 { - sort.Strings(md.Unused) - for _, unused := range md.Unused { - if unused != "type" && !strings.HasPrefix(unused, "packer_") { - errs = append( - errs, fmt.Errorf("Unknown configuration key: %s", unused)) - } - } - } + errs := common.CheckUnusedConfig(md) if p.config.LocalStateTree == "" { - errs = append(errs, errors.New("Please specify a local_state_tree")) - } - - if p.config.TempConfigDir == "" { - p.config.TempConfigDir = DefaultTempConfigDir + errs = packer.MultiErrorAppend(errs, + errors.New("Please specify a local_state_tree")) } - if len(errs) > 0 { - return &packer.MultiError{errs} + if errs != nil && len(errs.Errors) > 0 { + return errs } return nil
provisioner/salt-masterless: use common lib for config decoding /cc @rgarcia - Since you opened the pull request, common config decoding stuff has been pulled out into the common Package. This simplifies a lot of code.
hashicorp_packer
train
ec8d8cc901510e44a9404965dfdbf0c3f994136f
diff --git a/lib/puppet/util/colors.rb b/lib/puppet/util/colors.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/util/colors.rb +++ b/lib/puppet/util/colors.rb @@ -79,8 +79,9 @@ module Puppet::Util::Colors # We define console_has_color? at load time since it's checking the # underlying platform which will not change, and we don't want to perform # the check every time we use logging - if Puppet::Util::Platform.windows? - # We're on windows, need win32console for color to work + if Puppet::Util::Platform.windows? && RUBY_VERSION =~ /^1\./ + # We're on windows and using ruby less than v2 + # so we need win32console for color to work begin require 'ffi' require 'win32console' diff --git a/spec/unit/util/colors_spec.rb b/spec/unit/util/colors_spec.rb index <HASH>..<HASH> 100755 --- a/spec/unit/util/colors_spec.rb +++ b/spec/unit/util/colors_spec.rb @@ -66,4 +66,16 @@ describe Puppet::Util::Colors do end end end + + context "on Windows in Ruby 1.x", :if => Puppet.features.microsoft_windows? && RUBY_VERSION =~ /^1./ do + it "should load win32console" do + Gem.loaded_specs["win32console"].should_not be_nil + end + end + + context "on Windows in Ruby 2.x", :if => Puppet.features.microsoft_windows? && RUBY_VERSION =~ /^2./ do + it "should not load win32console" do + Gem.loaded_specs["win32console"].should be_nil + end + end end
(PUP-<I>) Don't use win<I>console if ruby v2 Some of this work was done with PUP-<I> (support bundler workflow). That limits win<I>console down to being installed/depended upon only for ruby 1.x. This builds on top of that to only setup functions for colorizing output when using Ruby 1.x on Windows.
puppetlabs_puppet
train
3d0edb41452077f34c543cc9996e23872ca46938
diff --git a/modules/admin/src/ngrest/base/Plugin.php b/modules/admin/src/ngrest/base/Plugin.php index <HASH>..<HASH> 100644 --- a/modules/admin/src/ngrest/base/Plugin.php +++ b/modules/admin/src/ngrest/base/Plugin.php @@ -77,7 +77,7 @@ abstract class Plugin extends Component throw new Exception("Plugin attributes name, alias and i18n must be configured."); } - $this->addEvent(NgRestModel::EVENT_BEFORE_VALIDATE, 'onSave'); + $this->addEvent(NgRestModel::EVENT_AFTER_VALIDATE, 'onSave'); $this->addEvent(NgRestModel::EVENT_AFTER_FIND, 'onFind'); $this->addEvent(NgRestModel::EVENT_AFTER_NGREST_FIND, 'onListFind'); $this->addEvent(NgRestModel::EVENT_AFTER_NGREST_UPDATE_FIND, 'onExpandFind');
user after valide event for ngrest attribute #<I>
luyadev_luya
train
063683a48df3b1c156955f97e0421e27825a9349
diff --git a/lxd/db/certificates.go b/lxd/db/certificates.go index <HASH>..<HASH> 100644 --- a/lxd/db/certificates.go +++ b/lxd/db/certificates.go @@ -82,8 +82,9 @@ func (cert *Certificate) ToAPIType() string { return api.CertificateTypeUnknown } -// ToAPI converts the database Certificate struct to an api.Certificate entry. -func (cert *Certificate) ToAPI() api.Certificate { +// ToAPI converts the database Certificate struct to an api.Certificate +// entry filling fields from the database as necessary. +func (cert *Certificate) ToAPI(tx *ClusterTx) (*api.Certificate, error) { resp := api.Certificate{} resp.Fingerprint = cert.Fingerprint resp.Certificate = cert.Certificate @@ -91,7 +92,17 @@ func (cert *Certificate) ToAPI() api.Certificate { resp.Restricted = cert.Restricted resp.Type = cert.ToAPIType() - return resp + projects, err := tx.GetCertificateProjects(cert.ID) + if err != nil { + return nil, err + } + + resp.Projects = make([]string, len(projects)) + for i, p := range projects { + resp.Projects[i] = p.Name + } + + return &resp, nil } // CertificateFilter specifies potential query parameter fields.
lxd/db: Use ClusterTx with ToAPI for filling reference fields As we don't necessarily want to query every table associated with an entity each time, pass in a transaction to ToAPI so that the fields can at least be filled for the public facing representations.
lxc_lxd
train
8c83ce4b880dd8c7a15e46416f72e77a54f4deaf
diff --git a/hazelcast/src/main/java/com/hazelcast/client/impl/ClientEndpointImpl.java b/hazelcast/src/main/java/com/hazelcast/client/impl/ClientEndpointImpl.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/client/impl/ClientEndpointImpl.java +++ b/hazelcast/src/main/java/com/hazelcast/client/impl/ClientEndpointImpl.java @@ -282,8 +282,12 @@ public final class ClientEndpointImpl implements ClientEndpoint { public void provideDynamicMetrics(MetricDescriptor descriptor, MetricsCollectionContext context) { ClientStatistics clientStatistics = statsRef.get(); if (clientStatistics != null && clientStatistics.metricsBlob() != null) { - long timestamp = clientStatistics.timestamp(); byte[] metricsBlob = clientStatistics.metricsBlob(); + if (metricsBlob.length == 0) { + // zero length means that the client does not support the new format + return; + } + long timestamp = clientStatistics.timestamp(); MetricConsumer consumer = new MetricConsumer() { @Override public void consumeLong(MetricDescriptor descriptor, long value) {
Remove warnings for old client stats format (#<I>)
hazelcast_hazelcast
train
0f69b924fbda6a442d721b10ece38ccfc6b67275
diff --git a/src/transformers/models/auto/tokenization_auto.py b/src/transformers/models/auto/tokenization_auto.py index <HASH>..<HASH> 100644 --- a/src/transformers/models/auto/tokenization_auto.py +++ b/src/transformers/models/auto/tokenization_auto.py @@ -225,6 +225,13 @@ else: None, ), ), + ( + "xglm", + ( + "XGLMTokenizer" if is_sentencepiece_available() else None, + "XGLMTokenizerFast" if is_tokenizers_available() else None, + ), + ), ] ) diff --git a/src/transformers/models/xglm/__init__.py b/src/transformers/models/xglm/__init__.py index <HASH>..<HASH> 100644 --- a/src/transformers/models/xglm/__init__.py +++ b/src/transformers/models/xglm/__init__.py @@ -18,14 +18,22 @@ from typing import TYPE_CHECKING # rely on isort to merge the imports -from ...file_utils import _LazyModule, is_flax_available, is_tokenizers_available, is_torch_available +from ...file_utils import ( + _LazyModule, + is_flax_available, + is_sentencepiece_available, + is_tokenizers_available, + is_torch_available, +) _import_structure = { "configuration_xglm": ["XGLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "XGLMConfig"], - "tokenization_xglm": ["XGLMTokenizer"], } +if is_sentencepiece_available(): + _import_structure["tokenization_xglm"] = ["XGLMTokenizer"] + if is_tokenizers_available(): _import_structure["tokenization_xglm_fast"] = ["XGLMTokenizerFast"] @@ -48,7 +56,9 @@ if is_flax_available(): if TYPE_CHECKING: from .configuration_xglm import XGLM_PRETRAINED_CONFIG_ARCHIVE_MAP, XGLMConfig - from .tokenization_xglm import XGLMTokenizer + + if is_sentencepiece_available(): + from .tokenization_xglm import XGLMTokenizer if is_tokenizers_available(): from .tokenization_xglm_fast import XGLMTokenizerFast
[XGLMTokenizer] fix init and add in AutoTokenizer (#<I>)
huggingface_pytorch-pretrained-BERT
train
95e1c45249ac4d719830fc4fbb8d107ec77d866c
diff --git a/tests/Http/HttpTestingFileFactoryTest.php b/tests/Http/HttpTestingFileFactoryTest.php index <HASH>..<HASH> 100644 --- a/tests/Http/HttpTestingFileFactoryTest.php +++ b/tests/Http/HttpTestingFileFactoryTest.php @@ -7,11 +7,17 @@ use PHPUnit\Framework\TestCase; /** * @requires extension gd + * + * @link https://www.php.net/manual/en/function.gd-info.php */ class HttpTestingFileFactoryTest extends TestCase { public function testImagePng() { + if (! $this->isGDSupported('PNG Support')) { + $this->markTestSkipped('Requires PNG support.'); + } + $image = (new FileFactory)->image('test.png', 15, 20); $info = getimagesize($image->getRealPath()); @@ -23,6 +29,10 @@ class HttpTestingFileFactoryTest extends TestCase public function testImageJpeg() { + if (! $this->isGDSupported('JPEG Support')) { + $this->markTestSkipped('Requires JPEG support.'); + } + $jpeg = (new FileFactory)->image('test.jpeg', 15, 20); $jpg = (new FileFactory)->image('test.jpg'); @@ -39,6 +49,10 @@ class HttpTestingFileFactoryTest extends TestCase public function testImageGif() { + if (! $this->isGDSupported('GIF Create Support')) { + $this->markTestSkipped('Requires GIF Create support.'); + } + $image = (new FileFactory)->image('test.gif'); $this->assertSame( @@ -49,6 +63,10 @@ class HttpTestingFileFactoryTest extends TestCase public function testImageWebp() { + if (! $this->isGDSupported('WebP Support')) { + $this->markTestSkipped('Requires Webp support.'); + } + $image = (new FileFactory)->image('test.webp'); $this->assertSame( @@ -59,6 +77,10 @@ class HttpTestingFileFactoryTest extends TestCase public function testImageWbmp() { + if (! $this->isGDSupported('WBMP Support')) { + $this->markTestSkipped('Requires WBMP support.'); + } + $image = (new FileFactory)->image('test.wbmp'); $this->assertSame( @@ -93,4 +115,19 @@ class HttpTestingFileFactoryTest extends TestCase (new FileFactory)->create('someaudio.webm')->getMimeType() ); } + + /** + * @param string $driver + * @return bool + */ + private function isGDSupported(string $driver = 'GD Version'): bool + { + $gdInfo = gd_info(); + + if (isset($gdInfo[$driver])) { + return $gdInfo[$driver]; + } + + return false; + } }
[9.x] skip image create tests if gd library is not supported (#<I>) * [9.x] skip image create test if gd library is not supported * fix styling * replace constants with direct usage
laravel_framework
train
d91ec18f3a268446354b2655ecb2d6588997fb1d
diff --git a/src/styles.js b/src/styles.js index <HASH>..<HASH> 100644 --- a/src/styles.js +++ b/src/styles.js @@ -36,20 +36,16 @@ module.exports = { margin: '0 15px 0 0', }, clearBtn: { - background: 'rgba(255, 255, 255, 0.498039)', - borderBottom: 'none', - borderImage: 'initial', - borderLeft: '1px solid rgba(0, 0, 0, 0.2)', - borderRight: 'none', - borderRadius: '4px 0px 0px', - borderTop: '1px solid rgba(0, 0, 0, 0.2)', + background: 'none', + border: 'none', bottom: '0px', color: 'orange', fontSize: '.9em', outline: 'none', padding: '5px 10px', - position: 'absolute', - right: '0px' + position: 'fixed', + right: '.8em', + bottom: '.8em' }, logIcon: { display: 'inline-block',
css changes to clean button
rafaelrozon_react-storybook-console
train
02ebfa06f78a590b607f86a44878c02b7a0f322f
diff --git a/presto-main/src/main/java/com/facebook/presto/memory/ClusterMemoryManager.java b/presto-main/src/main/java/com/facebook/presto/memory/ClusterMemoryManager.java index <HASH>..<HASH> 100644 --- a/presto-main/src/main/java/com/facebook/presto/memory/ClusterMemoryManager.java +++ b/presto-main/src/main/java/com/facebook/presto/memory/ClusterMemoryManager.java @@ -62,6 +62,7 @@ import static com.facebook.presto.SystemSessionProperties.getQueryMaxMemory; import static com.facebook.presto.SystemSessionProperties.resourceOvercommit; import static com.facebook.presto.memory.LocalMemoryManager.GENERAL_POOL; import static com.facebook.presto.memory.LocalMemoryManager.RESERVED_POOL; +import static com.facebook.presto.memory.LocalMemoryManager.SYSTEM_POOL; import static com.facebook.presto.spi.NodeState.ACTIVE; import static com.facebook.presto.spi.NodeState.SHUTTING_DOWN; import static com.facebook.presto.spi.StandardErrorCode.CLUSTER_OUT_OF_MEMORY; @@ -77,7 +78,10 @@ import static java.util.Objects.requireNonNull; public class ClusterMemoryManager implements ClusterMemoryPoolManager { + private static final Set<MemoryPoolId> POOLS = ImmutableSet.of(GENERAL_POOL, RESERVED_POOL, SYSTEM_POOL); + private static final Logger log = Logger.get(ClusterMemoryManager.class); + private final ExecutorService listenerExecutor = Executors.newSingleThreadExecutor(); private final InternalNodeManager nodeManager; private final LocationFactory locationFactory; @@ -100,7 +104,7 @@ public class ClusterMemoryManager private final Map<MemoryPoolId, List<Consumer<MemoryPoolInfo>>> changeListeners = new HashMap<>(); @GuardedBy("this") - private final Map<MemoryPoolId, ClusterMemoryPool> pools = new HashMap<>(); + private final Map<MemoryPoolId, ClusterMemoryPool> pools; @GuardedBy("this") private long lastTimeNotOutOfMemory = System.nanoTime(); @@ -133,6 +137,20 @@ public class ClusterMemoryManager this.coordinatorId = queryIdGenerator.getCoordinatorId(); this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); + + ImmutableMap.Builder<MemoryPoolId, ClusterMemoryPool> builder = ImmutableMap.builder(); + for (MemoryPoolId poolId : POOLS) { + ClusterMemoryPool pool = new ClusterMemoryPool(poolId); + builder.put(poolId, pool); + String objectName = ObjectNames.builder(ClusterMemoryPool.class, poolId.toString()).build(); + try { + exporter.export(objectName, pool); + } + catch (JmxException e) { + log.error(e, "Error exporting memory pool %s", poolId); + } + } + this.pools = builder.build(); } @Override @@ -364,37 +382,11 @@ public class ClusterMemoryManager .sum(); clusterMemoryBytes.set(totalClusterMemory); - Set<MemoryPoolId> activePoolIds = nodeMemoryInfos.stream() - .flatMap(info -> info.getPools().keySet().stream()) - .collect(toImmutableSet()); - - // Make a copy to materialize the set difference - Set<MemoryPoolId> removedPools = ImmutableSet.copyOf(difference(pools.keySet(), activePoolIds)); - for (MemoryPoolId removed : removedPools) { - unexport(pools.get(removed)); - pools.remove(removed); - if (changeListeners.containsKey(removed)) { - for (Consumer<MemoryPoolInfo> listener : changeListeners.get(removed)) { - listenerExecutor.execute(() -> listener.accept(new MemoryPoolInfo(0, 0, 0, ImmutableMap.of(), ImmutableMap.of()))); - } - } - } - for (MemoryPoolId id : activePoolIds) { - ClusterMemoryPool pool = pools.computeIfAbsent(id, poolId -> { - ClusterMemoryPool newPool = new ClusterMemoryPool(poolId); - String objectName = ObjectNames.builder(ClusterMemoryPool.class, newPool.getId().toString()).build(); - try { - exporter.export(objectName, newPool); - } - catch (JmxException e) { - log.error(e, "Error exporting memory pool %s", poolId); - } - return newPool; - }); + for (ClusterMemoryPool pool : pools.values()) { pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); - if (changeListeners.containsKey(id)) { + if (changeListeners.containsKey(pool.getId())) { MemoryPoolInfo info = pool.getInfo(); - for (Consumer<MemoryPoolInfo> listener : changeListeners.get(id)) { + for (Consumer<MemoryPoolInfo> listener : changeListeners.get(pool.getId())) { listenerExecutor.execute(() -> listener.accept(info)); } } @@ -408,7 +400,6 @@ public class ClusterMemoryManager for (ClusterMemoryPool pool : pools.values()) { unexport(pool); } - pools.clear(); } finally { listenerExecutor.shutdownNow();
Remove redundant code from ClusterMemoryManager Presto memory pools are not dynamic, so ClusterMemoryManager can assert that the GENERAL, RESERVED and SYSTEM pools exist and act accordingly.
prestodb_presto
train
498970147b3ca5f7444507d11c2bd70539b7a80e
diff --git a/ConfigurationReader/GeneratorOptions.php b/ConfigurationReader/GeneratorOptions.php index <HASH>..<HASH> 100755 --- a/ConfigurationReader/GeneratorOptions.php +++ b/ConfigurationReader/GeneratorOptions.php @@ -50,7 +50,12 @@ class GeneratorOptions extends AbstractYamlReader */ protected function parseOptions($filename) { - $this->options = $this->loadYaml($filename); + $options = $this->loadYaml($filename); + if (is_array($options)) { + $this->options = $options; + } else { + throw new \InvalidArgumentException(sprintf('Settings contained by "%s" are not valid as the settings are not contained by an array: "%s"', $filename, gettype($options))); + } } /** * Returns the option value
improve method with sanity check
WsdlToPhp_PackageGenerator
train
1e09e783f637b6c9057662d9ee2cc6d6f7aca011
diff --git a/phypno/ioeeg/blackrock.py b/phypno/ioeeg/blackrock.py index <HASH>..<HASH> 100644 --- a/phypno/ioeeg/blackrock.py +++ b/phypno/ioeeg/blackrock.py @@ -219,7 +219,7 @@ def _read_neuralsg(filename): hdr['SamplingFreq'] = int(hdr['TimeRes'] / unpack('<I', f.read(4))[0]) n_chan = unpack('<I', f.read(4))[0] hdr['ChannelCount'] = n_chan - hdr['ChannelID'] = unpack('<I' * n_chan, f.read(4 * n_chan)) + hdr['ChannelID'] = unpack('<' + 'I' * n_chan, f.read(4 * n_chan)) BOData = f.tell() f.seek(0, SEEK_END)
bugfix: unpack had the wrong format with multiple channels
wonambi-python_wonambi
train
2b2b1f89bb46243740d5b36a8bdecd5ba3633be7
diff --git a/openquake/hazardlib/geo/line.py b/openquake/hazardlib/geo/line.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/geo/line.py +++ b/openquake/hazardlib/geo/line.py @@ -34,7 +34,7 @@ class Line(object): This class represents a geographical line, which is basically a sequence of geographical points. - A line is defined by at least one point. + A line is defined by at least two points. :param points: The sequence of points defining this line. @@ -43,11 +43,9 @@ class Line(object): """ def __init__(self, points): - self.points = utils.clean_points(points) - - if len(self.points) < 1: - raise ValueError("One point needed to create a line!") - + self.points = utils.clean_points(points) # can remove points! + if len(self.points) < 2: + raise ValueError("At least two points are needed for a line!") self.coo = np.array([[p.longitude, p.latitude] for p in self.points]) def __eq__(self, other):
A line must have at least 2 points
gem_oq-engine
train
7c1acf7d15eec01f75833f6415bc07d8245c2788
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup,find_packages setup( name='synapse', - version='0.0.16', # sync with synapse.version! + version='0.0.17', # sync with synapse.version! description='Synapse Distributed Key-Value Hypergraph Analysis Framework', author='Invisigoth Kenshoto', author_email='invisigoth.kenshoto@gmail.com', diff --git a/synapse/__init__.py b/synapse/__init__.py index <HASH>..<HASH> 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -14,7 +14,7 @@ if msgpack.version < (0,4,2): if tornado.version_info < (3,2,2): raise Exception('synapse requires tornado >= 3.2.2') -version = (0,0,16) +version = (0,0,17) verstring = '.'.join([ str(x) for x in version ]) import synapse.lib.modules as s_modules
update versions in prep for <I> tag
vertexproject_synapse
train
29ff72ad7dfa568882d5ccb09ad3a01c140c4b8e
diff --git a/ruby_event_store/lib/ruby_event_store/mappers/default.rb b/ruby_event_store/lib/ruby_event_store/mappers/default.rb index <HASH>..<HASH> 100644 --- a/ruby_event_store/lib/ruby_event_store/mappers/default.rb +++ b/ruby_event_store/lib/ruby_event_store/mappers/default.rb @@ -1,5 +1,4 @@ require 'yaml' -require 'active_support' module RubyEventStore module Mappers @@ -20,7 +19,7 @@ module RubyEventStore def serialized_record_to_event(record) event_type = @events_class_remapping.fetch(record.event_type) { record.event_type } - ActiveSupport::Inflector.constantize(event_type).new( + Object.const_get(event_type).new( event_id: record.event_id, metadata: @serializer.load(record.metadata), data: @serializer.load(record.data) diff --git a/ruby_event_store/lib/ruby_event_store/mappers/protobuf.rb b/ruby_event_store/lib/ruby_event_store/mappers/protobuf.rb index <HASH>..<HASH> 100644 --- a/ruby_event_store/lib/ruby_event_store/mappers/protobuf.rb +++ b/ruby_event_store/lib/ruby_event_store/mappers/protobuf.rb @@ -17,7 +17,7 @@ module RubyEventStore def serialized_record_to_event(record) event_type = events_class_remapping.fetch(record.event_type) { record.event_type } - ActiveSupport::Inflector.constantize(event_type).decode(record.data) + Object.const_get(event_type).decode(record.data) end def add_metadata(event, key, value) diff --git a/ruby_event_store/ruby_event_store.gemspec b/ruby_event_store/ruby_event_store.gemspec index <HASH>..<HASH> 100644 --- a/ruby_event_store/ruby_event_store.gemspec +++ b/ruby_event_store/ruby_event_store.gemspec @@ -20,7 +20,6 @@ Gem::Specification.new do |spec| spec.require_paths = ['lib'] - spec.add_dependency 'activesupport' spec.add_dependency 'concurrent-ruby', '~> 1.0' spec.add_development_dependency 'bundler', '~> 1.15' spec.add_development_dependency 'rake', '~> 10.0'
Remove ActiveSupport dependency from `ruby_event_store` gem
RailsEventStore_rails_event_store
train
16033e4924261d8c90b7dbc67b3d20767b16225c
diff --git a/__tests__/actions/__snapshots__/api.js.snap b/__tests__/actions/__snapshots__/api.js.snap index <HASH>..<HASH> 100644 --- a/__tests__/actions/__snapshots__/api.js.snap +++ b/__tests__/actions/__snapshots__/api.js.snap @@ -11,6 +11,15 @@ Array [ "type": "ROUTING_REQUEST", }, ], + Array [ + Object { + "payload": Object { + "error": [Error: Received error from server], + "searchId": 2, + }, + "type": "ROUTING_ERROR", + }, + ], ] `; @@ -25,6 +34,17 @@ Array [ "type": "ROUTING_REQUEST", }, ], + Array [ + Object { + "payload": Object { + "response": Object { + "fake": "response", + }, + "searchId": 1, + }, + "type": "ROUTING_RESPONSE", + }, + ], ] `; diff --git a/lib/components/narrative/printable/printable-itinerary.js b/lib/components/narrative/printable/printable-itinerary.js index <HASH>..<HASH> 100644 --- a/lib/components/narrative/printable/printable-itinerary.js +++ b/lib/components/narrative/printable/printable-itinerary.js @@ -12,7 +12,7 @@ export default class PrintableItinerary extends Component { } render () { - const { itinerary, companies, timeFormat } = this.props + const { itinerary, timeFormat } = this.props const timeOptions = { format: timeFormat, diff --git a/lib/util/itinerary.js b/lib/util/itinerary.js index <HASH>..<HASH> 100644 --- a/lib/util/itinerary.js +++ b/lib/util/itinerary.js @@ -197,9 +197,9 @@ export function getLegModeLabel (leg) { * the icon will be attempted to be used from that lookup of icons. Otherwise, * a ModeIcon element will be returned. * - * @param {String} iconId A string with the desired icon ID. This icon can + * @param {string} iconId A string with the desired icon ID. This icon can * include modes or companies or anything that is defined in the customIcons. - * @param {[Map<String, React.Element>]} customIcons A customized lookup of + * @param {[Map<string, React.Element>]} customIcons A customized lookup of * icons. These are defined as part of the implementing webapp. If this lookup * is not defined, then the ModeIcon class will be used instead. * @return {React.Element}
test: fix linting and testing errors
opentripplanner_otp-react-redux
train
c68957b14ffb17eeaae4f733847f93c845281e81
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1,41 +1,6 @@ var _ = require('lodash'); /** - * Check that a compatible version of gulp is available in the project - */ - -function fatal() { - var msg = '\n\n'; - for (var i = 0; i < arguments.length; i++) { - msg += arguments[i] + '\n\n'; - } - console.log(msg); - process.exit(1); -} - -try { - var projectGulpVersion = require(module.parent.paths[0] + '/gulp/package.json').version; -} catch(e) { - // If we can't find gulp in the parent project, it's a fatal problem. - fatal( - 'You do not seem to have Gulp installed in your project.', - 'Please add gulp ^' + packageGulpVersion + ' to your package.json, npm install and try again.' - ); -} -try { - // Check to make sure the local gulp and the project gulp match. - var packageGulpVersion = require('./node_modules/gulp/package.json').version; - if (!semver.satisfies(projectGulpVersion, '^' + packageGulpVersion)) { - fatal( - 'You have an incompatible version of Gulp installed (' + projectGulpVersion + ').', - 'Please add gulp ^' + packageGulpVersion + ' to your package.json, npm install and try again.' - ); - } -} catch(e) { - // Assume gulp has been loaded from ../node_modules and it matches the requirements. -} - -/** * Helper method to extract metadata from package.json */ @@ -81,7 +46,7 @@ function initTasks(gulp, config) { if (!config.component.name) { config.component.name = _.capitalize(_.camelCase(config.component.pkgName)); } - + if (!config.aliasify) { config.aliasify = pkg.aliasify; }
remove global gulp support, gulp version check Again, this wasn't working anyway due to `semver` being undefined
JedWatson_react-component-gulp-tasks
train
e2a6ce1968236da4a642a6f17a1ff1deeeadf911
diff --git a/Resources/Private/JavaScript/Guest/Components/Editors/CKEditorAdaptor/CreateCKEditorInstance/index.js b/Resources/Private/JavaScript/Guest/Components/Editors/CKEditorAdaptor/CreateCKEditorInstance/index.js index <HASH>..<HASH> 100644 --- a/Resources/Private/JavaScript/Guest/Components/Editors/CKEditorAdaptor/CreateCKEditorInstance/index.js +++ b/Resources/Private/JavaScript/Guest/Components/Editors/CKEditorAdaptor/CreateCKEditorInstance/index.js @@ -15,6 +15,34 @@ const createButtonCreator = (ckApi, editor) => (icon, command) => ({ } }); +const createDropDown = (...items) => ({ + type: 'DropDown', + options: { + items + } +}); + +const createDropDownItemCreator = (ckApi, editor) => (icon, label, styleDefinition) => { + const Style = ckApi.style; + const style = new Style(styleDefinition); + const isActive = () => editor.elementPath() && style.checkActive(editor.elementPath(), editor); + + return { + icon, + label, + isActive, + isEnabled: () => true, + onSelect: createSignal( + () => { + const op = isActive(editor) ? 'removeStyle' : 'applyStyle'; + + editor[op](style); + editor.fire('change'); + } + ) + }; +}; + export default (ckApi, editorApi, dom, getSelectionData) => { const editor = ckApi.inline(dom, { removePlugins: 'toolbar', @@ -22,9 +50,20 @@ export default (ckApi, editorApi, dom, getSelectionData) => { }); const createButton = createButtonCreator(ckApi, editor); + const createDropDownItem = createDropDownItemCreator(ckApi, editor); const updateToolbarConfiguration = debounce( editorApi.registerToolbar({ components: [ + createDropDown( + createDropDownItem('paragraph', 'Paragraph', {element: 'p'}), + createDropDownItem('header', 'Headline 1', {element: 'h1'}), + createDropDownItem('header', 'Headline 2', {element: 'h2'}), + createDropDownItem('header', 'Headline 3', {element: 'h3'}), + createDropDownItem('header', 'Headline 4', {element: 'h4'}), + createDropDownItem('header', 'Headline 5', {element: 'h5'}), + createDropDownItem('header', 'Headline 6', {element: 'h6'}), + createDropDownItem('font', 'Preformatted Text', {element: 'pre'}) + ), createButton('bold', 'bold'), createButton('italic', 'italic'), createButton('underline', 'underline'), diff --git a/Resources/Private/JavaScript/Guest/Containers/EditorToolbar/index.js b/Resources/Private/JavaScript/Guest/Containers/EditorToolbar/index.js index <HASH>..<HASH> 100644 --- a/Resources/Private/JavaScript/Guest/Containers/EditorToolbar/index.js +++ b/Resources/Private/JavaScript/Guest/Containers/EditorToolbar/index.js @@ -3,7 +3,7 @@ import {connect} from 'react-redux'; import mergeClassNames from 'classnames'; import {$get} from 'plow-js'; -import {IconButton} from 'Components/index'; +import {IconButton, DropDown, Icon} from 'Components/index'; import {actions} from 'Guest/Redux/index'; import processConfiguration from './ProcessConfiguration/index'; @@ -46,6 +46,35 @@ export default class Toolbar extends Component { hoverStyle="brand" /> } + + if (component.type === 'DropDown') { + return <DropDown> + <DropDown.Header className={style.dropDown__btn}> + {component.options.items.filter( + item => item.isActive + ).map(item => + [ + <Icon icon={item.icon} />, + item.label + ] + )} + </DropDown.Header> + <DropDown.Contents> + {component.options.items.filter( + item => item.isEnabled + ).map( + item => ( + <li> + <button type="button" onClick={() => dispatchEditorSignal(item.onSelect)}> + <Icon icon={item.icon} /> + {item.label} + </button> + </li> + ) + )} + </DropDown.Contents> + </DropDown> + } } )} </div>
TASK: Re-create format drop down for new editor toolbar
neos_neos-ui
train
270050d6fc6ddc9d8bb338dee9daa30f4a2f4780
diff --git a/api/api_v1.py b/api/api_v1.py index <HASH>..<HASH> 100644 --- a/api/api_v1.py +++ b/api/api_v1.py @@ -491,8 +491,8 @@ def get_slack_users(): def upload_data(): data = json.loads(request.data) - file_url = s3_upload_file('blockstack', data['value'], data['key'], - public=True) + file_url = s3_upload_file( + 'blockstack', data['value'], 'staging/' + data['key'], public=True) if file_url is not None: resp = {
make sure all file uploads are placed in a staging folder
blockstack_blockstack-core
train
2bd40836abdcae4be3946ed4642e80954f4fe777
diff --git a/set/dict_test.go b/set/dict_test.go index <HASH>..<HASH> 100644 --- a/set/dict_test.go +++ b/set/dict_test.go @@ -118,6 +118,19 @@ func TestFlattenCaches(t *testing.T) { } } +func TestFlattenCaches_CacheReturn(t *testing.T) { + set := New() + item := `test` + set.Add(item) + + flatten1 := set.Flatten() + flatten2 := set.Flatten() + + if !reflect.DeepEqual(flatten1, flatten2) { + t.Errorf(`Flatten cache is not the same as original result. Got %+v, expected %+v`, 1, flatten2, flatten1) + } +} + func TestAddClearsCache(t *testing.T) { set := New() item := `test`
Set: Add unit test to confirm that Flatten return the same cached result as before
Workiva_go-datastructures
train
2d3b63c3f5008741ad7aa8ae1c0bdd522c1887e9
diff --git a/src/Illuminate/Collections/Collection.php b/src/Illuminate/Collections/Collection.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Collections/Collection.php +++ b/src/Illuminate/Collections/Collection.php @@ -1056,25 +1056,31 @@ class Collection implements ArrayAccess, Enumerable * Get the first item in the collection, but only if exactly * item exists. Otherwise, throw an exception. * - * @param callable|null $callback + * @param mixed $key + * @param mixed $operator + * @param mixed $value * @return mixed * * @throws \Illuminate\Collections\ItemNotFoundException * @throws \Illuminate\Collections\MultipleItemsFoundException */ - public function sole(callable $callback = null) + public function sole($key = null, $operator = null, $value = null) { - $items = $this->when($callback)->filter($callback); + if (func_num_args() <= 1) { + $items = $this->when($key)->filter($key); - if ($items->isEmpty()) { - throw new ItemNotFoundException; - } + if ($items->isEmpty()) { + throw new ItemNotFoundException; + } + + if ($items->count() > 1) { + throw new MultipleItemsFoundException; + } - if ($items->count() > 1) { - throw new MultipleItemsFoundException; + return $items->first(); } - return $items->first(); + return $this->sole($this->operatorForWhere(...func_get_args())); } /** diff --git a/src/Illuminate/Collections/LazyCollection.php b/src/Illuminate/Collections/LazyCollection.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Collections/LazyCollection.php +++ b/src/Illuminate/Collections/LazyCollection.php @@ -1014,20 +1014,26 @@ class LazyCollection implements Enumerable * Get the first item in the collection, but only if exactly * item exists. Otherwise, throw an exception. * - * @param callable|null $callback + * @param mixed $key + * @param mixed $operator + * @param mixed $value * @return mixed * * @throws \Illuminate\Collections\ItemNotFoundException * @throws \Illuminate\Collections\MultipleItemsFoundException */ - public function sole(callable $callback = null) + public function sole($key = null, $operator = null, $value = null) { - return $this - ->when($callback) - ->filter($callback) - ->take(2) - ->collect() - ->sole(); + if (func_num_args() <= 1) { + return $this + ->when($key) + ->filter($key) + ->take(2) + ->collect() + ->sole(); + } + + return $this->sole($this->operatorForWhere(...func_get_args())); } /** diff --git a/tests/Support/SupportCollectionTest.php b/tests/Support/SupportCollectionTest.php index <HASH>..<HASH> 100755 --- a/tests/Support/SupportCollectionTest.php +++ b/tests/Support/SupportCollectionTest.php @@ -79,6 +79,8 @@ class SupportCollectionTest extends TestCase ]); $this->assertSame(['name' => 'foo'], $collection->where('name', 'foo')->sole()); + $this->assertSame(['name' => 'foo'], $collection->sole('name', '=', 'foo')); + $this->assertSame(['name' => 'foo'], $collection->sole('name', 'foo')); } /**
Added operator suport for the sole() method.
laravel_framework
train
4beadef8b5a254a08e03ab80ae1f32a5c07d5278
diff --git a/neutronclient/neutron/v2_0/fw/firewallrule.py b/neutronclient/neutron/v2_0/fw/firewallrule.py index <HASH>..<HASH> 100644 --- a/neutronclient/neutron/v2_0/fw/firewallrule.py +++ b/neutronclient/neutron/v2_0/fw/firewallrule.py @@ -97,7 +97,8 @@ class CreateFirewallRule(neutronv20.CreateCommand): 'a:b).')) parser.add_argument( '--enabled', - dest='enabled', choices=['True', 'False'], + dest='enabled', metavar='{True,False}', + choices=['True', 'true', 'False', 'false'], help=_('Whether to enable or disable this rule.'), default=argparse.SUPPRESS) parser.add_argument( diff --git a/neutronclient/neutron/v2_0/nec/packetfilter.py b/neutronclient/neutron/v2_0/nec/packetfilter.py index <HASH>..<HASH> 100644 --- a/neutronclient/neutron/v2_0/nec/packetfilter.py +++ b/neutronclient/neutron/v2_0/nec/packetfilter.py @@ -71,7 +71,8 @@ class PacketFilterOptionMixin(object): help=_('Set Admin State Up to false')) else: parser.add_argument( - '--admin-state', choices=['True', 'False'], + '--admin-state', metavar='{True,False}', + choices=['True', 'true', 'False', 'false'], help=_('Set a value of Admin State Up')) parser.add_argument( diff --git a/neutronclient/neutron/v2_0/router.py b/neutronclient/neutron/v2_0/router.py index <HASH>..<HASH> 100644 --- a/neutronclient/neutron/v2_0/router.py +++ b/neutronclient/neutron/v2_0/router.py @@ -74,8 +74,8 @@ class CreateRouter(neutronV20.CreateCommand): help=_('Create a distributed router.')) parser.add_argument( '--ha', - dest='ha', - choices=['True', 'False'], + dest='ha', metavar='{True,False}', + choices=['True', 'true', 'false', 'False'], default=argparse.SUPPRESS, help=_('Create a highly available router.')) diff --git a/neutronclient/tests/unit/fw/test_cli20_firewallrule.py b/neutronclient/tests/unit/fw/test_cli20_firewallrule.py index <HASH>..<HASH> 100644 --- a/neutronclient/tests/unit/fw/test_cli20_firewallrule.py +++ b/neutronclient/tests/unit/fw/test_cli20_firewallrule.py @@ -46,6 +46,12 @@ class CLITestV20FirewallRuleJSON(test_cli20.CLITestV20Base): protocol=protocol, action=action, enabled=enabled, tenant_id=tenant_id) + def test_create_enabled_firewall_rule_with_mandatory_params_lcase(self): + self._test_create_firewall_rule_with_mandatory_params(enabled='true') + + def test_create_disabled_firewall_rule_with_mandatory_params_lcase(self): + self._test_create_firewall_rule_with_mandatory_params(enabled='false') + def test_create_enabled_firewall_rule_with_mandatory_params(self): self._test_create_firewall_rule_with_mandatory_params(enabled='True') diff --git a/neutronclient/tests/unit/test_cli20_router.py b/neutronclient/tests/unit/test_cli20_router.py index <HASH>..<HASH> 100644 --- a/neutronclient/tests/unit/test_cli20_router.py +++ b/neutronclient/tests/unit/test_cli20_router.py @@ -87,8 +87,17 @@ class CLITestV20RouterJSON(test_cli20.CLITestV20Base): """Create router: --distributed=True.""" self._create_router_distributed_or_ha(distributed='True') - def test_create_router_ha(self): - self._create_router_distributed_or_ha(ha=True) + def test_create_router_ha_with_True(self): + self._create_router_distributed_or_ha(ha='True') + + def test_create_router_ha_with_true(self): + self._create_router_distributed_or_ha(ha='true') + + def test_create_router_ha_with_False(self): + self._create_router_distributed_or_ha(ha='False') + + def test_create_router_ha_with_false(self): + self._create_router_distributed_or_ha(ha='false') def test_create_router_distributed_False(self): """Create router: --distributed=False."""
Fix True/False to accept Camel and Lower case There are couple of inconsistency in using the Camelcase and Lower case for 'True/False' options in the python-neutronclient. With this fix it will be consistent across all CLI commands. Change-Id: Ifc<I>ac<I>e<I>f<I>f<I>b<I>a<I>fa0
rackerlabs_rackspace-python-neutronclient
train
7f696c0b50ff18faa948ac0c33064bd1e4c17292
diff --git a/core/server/lib/security/password.js b/core/server/lib/security/password.js index <HASH>..<HASH> 100644 --- a/core/server/lib/security/password.js +++ b/core/server/lib/security/password.js @@ -1,3 +1,5 @@ +const Promise = require('bluebird'); + module.exports.hash = function hash(plainPassword) { const bcrypt = require('bcryptjs'), bcryptGenSalt = Promise.promisify(bcrypt.genSalt),
Fixed missing Bluebird require in `security/password.js` (#<I>) no issue
TryGhost_Ghost
train
4a6b8ba7ced6bb841000a59bdef7f9879fb6578d
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index <HASH>..<HASH> 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -263,3 +263,16 @@ def test_maintainer_author(name, attrs, tmpdir): else: line = '%s: %s' % (fkey, val) assert line in pkg_lines_set + + +def test_provides_extras_deterministic_order(): + attrs = dict(extras_require=dict( + a=['foo'], + b=['bar'], + )) + dist = Distribution(attrs) + assert dist.metadata.provides_extras == ['a', 'b'] + attrs['extras_require'] = dict( + reversed(list(attrs['extras_require'].items()))) + dist = Distribution(attrs) + assert dist.metadata.provides_extras == ['b', 'a']
Add test capturing expectation that provides_extras are ordered.
pypa_setuptools
train
d4ec3b5dfc7fded539b22b431f380df87491caa7
diff --git a/plugins/providers/hyperv/action/read_state.rb b/plugins/providers/hyperv/action/read_state.rb index <HASH>..<HASH> 100644 --- a/plugins/providers/hyperv/action/read_state.rb +++ b/plugins/providers/hyperv/action/read_state.rb @@ -12,8 +12,15 @@ module VagrantPlugins def call(env) if env[:machine].id options = { VmId: env[:machine].id } - response = env[:machine].provider.driver.execute('get_vm_status.ps1', options) + response = env[:machine].provider.driver.execute( + "get_vm_status.ps1", options) env[:machine_state_id] = response["state"].downcase.to_sym + + # If the machine isn't created, then our ID is stale, so just + # mark it as not created. + if env[:machine_state_id] == :not_created + env[:machine].id = nil + end else env[:machine_state_id] = :not_created end
providers/hyperv: reset machine ID if machine deleted outside
hashicorp_vagrant
train
6a9d3f4eaa1ea67f0058c73abc8af81293b4a377
diff --git a/src/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixer.php b/src/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixer.php index <HASH>..<HASH> 100644 --- a/src/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixer.php +++ b/src/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixer.php @@ -264,8 +264,12 @@ final class MyTest extends \PHPUnit_Framework_TestCase { $tag = $annotation->getTag()->getName(); - Preg::match('/@'.$tag.'\s+(.+)$/s', $annotation->getContent(), $matches); + if (1 !== Preg::match('/@'.$tag.'\s+(.+)$/s', $annotation->getContent(), $matches)) { + return ''; + } + $content = $matches[1]; + if (Preg::match('/\R/u', $content)) { $content = Preg::replace('/\s*\R+\s*\*\s*/u', ' ', $content); } diff --git a/tests/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixerTest.php b/tests/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixerTest.php index <HASH>..<HASH> 100644 --- a/tests/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixerTest.php +++ b/tests/Fixer/PhpUnit/PhpUnitNoExpectationAnnotationFixerTest.php @@ -41,6 +41,32 @@ final class PhpUnitNoExpectationAnnotationFixerTest extends AbstractFixerTestCas public function provideTestFixCases() { return [ + 'empty exception message' => [ + '<?php + final class MyTest extends \PHPUnit_Framework_TestCase + { + /** + */ + public function testFnc() + { + $this->setExpectedException(\FooException::class, \'\'); + + aaa(); + } + }', + '<?php + final class MyTest extends \PHPUnit_Framework_TestCase + { + /** + * @expectedException FooException + * @expectedExceptionMessage + */ + public function testFnc() + { + aaa(); + } + }', + ], 'expecting exception' => [ '<?php final class MyTest extends \PHPUnit_Framework_TestCase
Undefined offset: 1 error when exception message is empty An edge case I had with an empty @expectedExceptionMessage anootation uncovered an unchecked return resulting in the undefined offset. Fix is to fall-back to empty string which also solves the fixer for me.
FriendsOfPHP_PHP-CS-Fixer
train
a148b79017dd1d930214786b670b8a80cedf5d9c
diff --git a/lib/nicorepo.rb b/lib/nicorepo.rb index <HASH>..<HASH> 100644 --- a/lib/nicorepo.rb +++ b/lib/nicorepo.rb @@ -13,6 +13,7 @@ class Nicorepo @agent = Mechanize.new @agent.ssl_version = 'SSLv3' @agent.request_headers = { 'accept-language' => 'ja-JP', 'content-language' => 'ja-JP' } + @parser = Parser.new(@agent) end def login(mail, pass) @@ -22,15 +23,15 @@ class Nicorepo def all(req_num = PER_PAGE) page_nest_max = req_num / PER_PAGE + 1 - Reports.new(@agent).fetch(req_num, page_nest_max) + Reports.new(@parser).fetch(req_num, page_nest_max) end def videos(req_num = 3, page_nest_max = 5) - VideoReports.new(@agent).fetch(req_num, page_nest_max) + VideoReports.new(@parser).fetch(req_num, page_nest_max) end def lives(req_num = 3, page_nest_max = 5) - LiveReports.new(@agent).fetch(req_num, page_nest_max) + LiveReports.new(@parser).fetch(req_num, page_nest_max) end end diff --git a/lib/nicorepo/reports.rb b/lib/nicorepo/reports.rb index <HASH>..<HASH> 100644 --- a/lib/nicorepo/reports.rb +++ b/lib/nicorepo/reports.rb @@ -12,11 +12,8 @@ class Nicorepo attr_reader :reports def_delegators :@reports, :size - def initialize(agent) - # TODO: agentの代わりにparserを渡すようにする - # agentやcssの情報に左右されずに取得できるようにしたい - @agent = agent - @parser = Parser.new(@agent, TOP_URL) + def initialize(parser) + @parser = parser @reports = [] end
Remove mechanize agent from Reports
upinetree_nicorepo
train
83d07d11da281b7c14a53450000cc4e6524fb4e7
diff --git a/wmi.go b/wmi.go index <HASH>..<HASH> 100644 --- a/wmi.go +++ b/wmi.go @@ -209,14 +209,22 @@ func loadEntity(dst interface{}, src *ole.IDispatch) (errFieldMismatch error) { } defer prop.Clear() - switch val := prop.Value(); reflect.ValueOf(val).Kind() { - case reflect.Int64: - iv := val.(int64) + switch val := prop.Value().(type) { + case int, int64: + var v int64 + switch val := val.(type) { + case int: + v = int64(val) + case int64: + v = val + default: + panic("unexpected type") + } switch f.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - f.SetInt(iv) + f.SetInt(v) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - f.SetUint(uint64(iv)) + f.SetUint(uint64(v)) default: return &ErrFieldMismatch{ StructType: f.Type(), @@ -224,12 +232,11 @@ func loadEntity(dst interface{}, src *ole.IDispatch) (errFieldMismatch error) { Reason: "not an integer class", } } - case reflect.String: - sv := val.(string) - iv, err := strconv.ParseInt(sv, 10, 64) + case string: + iv, err := strconv.ParseInt(val, 10, 64) switch f.Kind() { case reflect.String: - f.SetString(sv) + f.SetString(val) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: if err != nil { return err @@ -243,21 +250,20 @@ func loadEntity(dst interface{}, src *ole.IDispatch) (errFieldMismatch error) { case reflect.Struct: switch f.Type() { case timeType: - if len(sv) == 25 { - sv = sv[:22] + "0" + sv[22:] + if len(val) == 25 { + val = val[:22] + "0" + val[22:] } - t, err := time.Parse("20060102150405.000000-0700", sv) + t, err := time.Parse("20060102150405.000000-0700", val) if err != nil { return err } f.Set(reflect.ValueOf(t)) } } - case reflect.Bool: - bv := val.(bool) + case bool: switch f.Kind() { case reflect.Bool: - f.SetBool(bv) + f.SetBool(val) default: return &ErrFieldMismatch{ StructType: f.Type(),
Refactor type assertions to switch on type
StackExchange_wmi
train
2fa6465d19840cb78371d481f59f4e782e8f95f0
diff --git a/cutil/__init__.py b/cutil/__init__.py index <HASH>..<HASH> 100644 --- a/cutil/__init__.py +++ b/cutil/__init__.py @@ -336,7 +336,11 @@ def parse_price(price): except IndexError: # Price is 99 or less with no cents pass - found_price[key] = float(new_value) + + if new_value != '': + found_price[key] = float(new_value) + else: + found_price[key] = None return found_price diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from distutils.core import setup setup( name='cutil', packages=['cutil'], - version='2.6.4', + version='2.6.5', description='A collection of useful functions', author='Eddy Hintze', author_email="eddy.hintze@gmail.com",
Updated parse_price to ignore empty strings as high value
xtream1101_cutil
train
e31100fc3afeb1c53349b6017fa97461ca410d5a
diff --git a/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyStaticMethods.java b/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyStaticMethods.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyStaticMethods.java +++ b/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyStaticMethods.java @@ -180,23 +180,6 @@ public class DefaultGroovyStaticMethods { sleepImpl(milliseconds, onInterrupt); } - @Deprecated - public static Date parse(Date self, String format, String input) throws ParseException { - return new SimpleDateFormat(format).parse(input); - } - - @Deprecated - public static Date parse(Date self, String format, String input, TimeZone zone) throws ParseException { - SimpleDateFormat sdf = new SimpleDateFormat(format); - sdf.setTimeZone(zone); - return sdf.parse(input); - } - - @Deprecated - public static Date parseToStringDate(Date self, String dateToString) throws ParseException { - return new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US).parse(dateToString); - } - /** * Works exactly like ResourceBundle.getBundle(String). This is needed * because the java method depends on a particular stack configuration that @@ -248,18 +231,18 @@ public class DefaultGroovyStaticMethods { } catch (IOException ioe) { if (ioe.getMessage().startsWith("Access is denied")) { accessDeniedCounter++; - try { Thread.sleep(100); } catch (InterruptedException e) {} + try { + Thread.sleep(100); + } catch (InterruptedException ignore) { + } } - if (i==MAXTRIES-1) { - if (accessDeniedCounter==MAXTRIES) { - String msg = - "Access is denied.\nWe tried " + - + accessDeniedCounter+ - " times to create a temporary directory"+ - " and failed each time. If you are on Windows"+ - " you are possibly victim to"+ - " http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6325169. "+ - " this is no bug in Groovy."; + if (i == MAXTRIES - 1) { + if (accessDeniedCounter == MAXTRIES) { + String msg = "Access is denied.\nWe tried " + accessDeniedCounter + + " times to create a temporary directory and failed each time." + + " If you are on Windows, you are possibly victim to" + + " http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6325169." + + " This is not a bug in Groovy."; throw new IOException(msg); } else { throw ioe;
remove some long deprecated methods
apache_groovy
train
5b3a3fb9e4ccf4003f69272c1d935317578362fc
diff --git a/tests/system/Database/Builder/WhereTest.php b/tests/system/Database/Builder/WhereTest.php index <HASH>..<HASH> 100644 --- a/tests/system/Database/Builder/WhereTest.php +++ b/tests/system/Database/Builder/WhereTest.php @@ -148,9 +148,7 @@ final class WhereTest extends CIUnitTestCase // Closure $builder = $this->db->table('neworder'); - $builder->where('advance_amount <', static function (BaseBuilder $builder) { - return $builder->select('MAX(advance_amount)', false)->from('orders')->where('id >', 2); - }); + $builder->where('advance_amount <', static fn (BaseBuilder $builder) => $builder->select('MAX(advance_amount)', false)->from('orders')->where('id >', 2)); $this->assertSame($expectedSQL, str_replace("\n", ' ', $builder->getCompiledSelect())); diff --git a/tests/system/Helpers/HTMLHelperTest.php b/tests/system/Helpers/HTMLHelperTest.php index <HASH>..<HASH> 100755 --- a/tests/system/Helpers/HTMLHelperTest.php +++ b/tests/system/Helpers/HTMLHelperTest.php @@ -24,7 +24,7 @@ final class HTMLHelperTest extends CIUnitTestCase /** * @var string Path to the test file for img_data */ - private $imgPath = SUPPORTPATH . 'Images' . DIRECTORY_SEPARATOR . 'ci-logo.gif'; + private string $imgPath = SUPPORTPATH . 'Images' . DIRECTORY_SEPARATOR . 'ci-logo.gif'; /** * @var string Expected base64 encoding of img path
refactor: fix rector errors
codeigniter4_CodeIgniter4
train
90f5b5d13a4f1afd54460676929d2694c7039d85
diff --git a/amazon/ion/core.py b/amazon/ion/core.py index <HASH>..<HASH> 100644 --- a/amazon/ion/core.py +++ b/amazon/ion/core.py @@ -19,7 +19,15 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from collections import MutableMapping, MutableSequence, OrderedDict +# in Python 3.10, abstract collections have moved into their own module +# for compatibility with 3.10+, first try imports from the new location +# if that fails, try from the pre-3.10 location +try: + from collections.abc import MutableMapping, MutableSequence + from collections import OrderedDict +except: + from collections import MutableMapping, MutableSequence, OrderedDict + from datetime import datetime, timedelta, tzinfo from decimal import Decimal, ROUND_FLOOR, Context, Inexact from math import isnan diff --git a/amazon/ion/simple_types.py b/amazon/ion/simple_types.py index <HASH>..<HASH> 100644 --- a/amazon/ion/simple_types.py +++ b/amazon/ion/simple_types.py @@ -24,7 +24,14 @@ from __future__ import division from __future__ import print_function from decimal import Decimal -from collections import MutableMapping + +# in Python 3.10, abstract collections have moved into their own module +# for compatibility with 3.10+, first try imports from the new location +# if that fails, try from the pre-3.10 location +try: + from collections.abc import MutableMapping +except: + from collections import MutableMapping import six
for <I> compatibility, attempt import of abstract collections from collections.abc and then from collections (for pre <I>)
amzn_ion-python
train
c4621c00a261a58e191ebefe367288ce3edd838f
diff --git a/tests/unit/Palladium/Mapper/IdentityTest.php b/tests/unit/Palladium/Mapper/IdentityTest.php index <HASH>..<HASH> 100644 --- a/tests/unit/Palladium/Mapper/IdentityTest.php +++ b/tests/unit/Palladium/Mapper/IdentityTest.php @@ -108,4 +108,38 @@ final class IdentityTest extends TestCase $instance = new Identity($pdo, 'table'); $instance->fetch($identity); } + + + public function test_Retrieving_Identity_by_Token_withg_Payload() + { + $statement = $this + ->getMockBuilder(PDOStatement::class) + ->disableOriginalConstructor() + ->getMock(); + $statement + ->method('bindValue') + ->withConsecutive( + [$this->equalTo(':token'), $this->equalTo('12345678901234567890123456789012'), null], + [$this->equalTo(':action'), $this->equalTo(Entity\Identity::ACTION_VERIFY), null], + [$this->equalTo(':expires'), $this->equalTo(1493377286), null] + ); + $statement + ->expects($this->once())->method('fetch') + ->will($this->returnValue(['id' => '8', 'tokenPayload' => '[]'])); + + $pdo = $this + ->getMockBuilder(PDO::class) + ->disableOriginalConstructor() + ->getMock(); + $pdo->expects($this->once())->method('prepare')->will($this->returnValue($statement)); + + + $identity = new Entity\Identity; + $identity->setToken('12345678901234567890123456789012'); + $identity->setTokenAction(Entity\Identity::ACTION_VERIFY); + $identity->setTokenEndOfLife(1493377286); + + $instance = new Identity($pdo, 'table'); + $instance->fetch($identity); + } }
Test for covering case, where payload actually contains JSON
teresko_palladium
train
3d19862a7cae65457c97c8a1dfa00c82ecf5af6d
diff --git a/telemetry/telemetry/core/tab.py b/telemetry/telemetry/core/tab.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/core/tab.py +++ b/telemetry/telemetry/core/tab.py @@ -25,7 +25,7 @@ class Tab(web_contents.WebContents): """ def __init__(self, inspector_backend): super(Tab, self).__init__(inspector_backend) - self._previous_tab_contents_bounding_box = None + self._tab_contents_bounding_box = None @property def browser(self): @@ -138,7 +138,7 @@ class Tab(web_contents.WebContents): self.browser.platform.StartVideoCapture(min_bitrate_mbps) self.ClearHighlight(bitmap.WEB_PAGE_TEST_ORANGE) - def _FindHighlightBoundingBox(self, bmp, color, bounds_tolerance=4, + def _FindHighlightBoundingBox(self, bmp, color, bounds_tolerance=8, color_tolerance=8): """Returns the bounding box of the content highlight of the given color. @@ -162,27 +162,26 @@ class Tab(web_contents.WebContents): raise BoundingBoxNotFoundException( 'Low count of pixels in tab contents matching expected color.') - # Since Telemetry doesn't know how to resize the window, we assume - # that we should always get the same content box for a tab. If this - # fails, it means either that assumption has changed or something is - # awry with our bounding box calculation. If this assumption changes, - # this can be removed. + # Since we allow some fuzziness in bounding box finding, we want to make + # sure that the bounds are always stable across a run. So we cache the + # first box, whatever it may be. # - # TODO(tonyg): This assert doesn't seem to work. - if (self._previous_tab_contents_bounding_box and - self._previous_tab_contents_bounding_box != content_box): - # Check if there's just a minor variation on the bounding box. If it's - # just a few pixels, we can assume it's probably due to - # compression artifacts. - for i in xrange(len(content_box)): - bounds_difference = abs(content_box[i] - - self._previous_tab_contents_bounding_box[i]) - if bounds_difference > bounds_tolerance: - raise BoundingBoxNotFoundException( - 'Unexpected change in tab contents box.') - self._previous_tab_contents_bounding_box = content_box - - return content_box + # This relies on the assumption that since Telemetry doesn't know how to + # resize the window, we should always get the same content box for a tab. + # If this assumption changes, this caching needs to be reworked. + if not self._tab_contents_bounding_box: + self._tab_contents_bounding_box = content_box + + # Verify that there is only minor variation in the bounding box. If it's + # just a few pixels, we can assume it's due to compression artifacts. + for x, y in zip(self._tab_contents_bounding_box, content_box): + if abs(x - y) > bounds_tolerance: + # If this fails, it means either that either the above assumption has + # changed or something is awry with our bounding box calculation. + raise BoundingBoxNotFoundException( + 'Unexpected change in tab contents box.') + + return self._tab_contents_bounding_box def StopVideoCapture(self): """Stops recording video of the tab's contents.
[Telemetry] Only calculate tab contents bounding box once. Cache it and use the first calculated box. This helps Speed Index be more stable and should avoid some asserts we were seeing. BUG= Review URL: <URL>
catapult-project_catapult
train
a81380d6ffee7a6c7a278b2be1741a250dd9d3c9
diff --git a/examples/specification-importer/utils.js b/examples/specification-importer/utils.js index <HASH>..<HASH> 100644 --- a/examples/specification-importer/utils.js +++ b/examples/specification-importer/utils.js @@ -9,7 +9,7 @@ var _ = require('lodash'); var unitCodesByUnitFamily = { 'Energy': [ 'WattHours', 'KilowattHours', 'MegawattHours', 'Btus', 'KiloBtus', 'MegaBtus', 'GramsCO2Gas', 'KilogramsCO2Gas', 'TonsCO2Gas', 'GramsCO2Oil', 'KilogramsCO2Oil', 'TonsCO2Oil' ], - 'Pressure': [ 'Bars' ], + 'Pressure': [ 'Bars', 'PoundsForcePerSquareInch' ], 'Temperature': [ 'DegreesCelsius', 'DegreesFahrenheit' ], 'Time': [ 'Seconds', 'Minutes', 'Hours', 'Days' ], 'Volume': [ 'Liters', 'CubicMeters', 'Gallons' ], @@ -56,6 +56,7 @@ var unitsByUnitCode = { 'None': [ '', ' ', null ], 'Ohms': [ ' Ω', ' Ohm' ], 'Percent': [ '%', ' %' ], + 'PoundsForcePerSquareInch': [ ' psi' ], 'Seconds': [ ' s', ' sec' ], 'SquareMeters': [ ' m²' ], 'TonsCO2Gas': [ ' t CO₂ (Gas)' ],
Add pressure unit `psi` to VBus specification importer.
danielwippermann_resol-vbus
train
cdd244e68017736f8240e1e18631a1820ea1f1b5
diff --git a/lib/bigbio/db/emitters/orf_emitter.rb b/lib/bigbio/db/emitters/orf_emitter.rb index <HASH>..<HASH> 100644 --- a/lib/bigbio/db/emitters/orf_emitter.rb +++ b/lib/bigbio/db/emitters/orf_emitter.rb @@ -59,12 +59,17 @@ module Bio end def stopstop? + found?({ | codon | STOP_CODONS.include?(codon) }, + { | codon | STOP_CODONS.include?(codon) }) + end + + def found? &func1, &func2 codons = added_codons codon1 = 0 if @start == nil # look for first STOP codon codons.each_with_index { | codon, idx | - if STOP_CODONS.include? codon + if func1.call(codon) codon1 = idx @start = idx * 3 + @c_pos break @@ -74,7 +79,7 @@ module Bio if @start != nil and @stop == nil # look for 2nd STOP codon codons[codon1+1..-1].each_with_index { | codon, idx | - if STOP_CODONS.include? codon + if func2.call(codon) # p [idx,codon] @stop = (codon1 + 1 + idx)*3 + @c_pos break
Find multiple ORFs in partial sequence
pjotrp_bigbio
train
f520a015a91eb1bf2666892d3ef362607a74396f
diff --git a/src/Message/AbstractRequest.php b/src/Message/AbstractRequest.php index <HASH>..<HASH> 100644 --- a/src/Message/AbstractRequest.php +++ b/src/Message/AbstractRequest.php @@ -314,7 +314,9 @@ abstract class AbstractRequest extends \Omnipay\Common\Message\AbstractRequest public function sendData($data) { $url = $this->getEndpoint().'?'.http_build_query($data, '', '&'); - $httpResponse = $this->httpClient->get($url)->send(); + $httpRequest = $this->httpClient->get($url); + $httpRequest->getCurlOptions()->set(CURLOPT_SSLVERSION, 6); // CURL_SSLVERSION_TLSv1_2 + $httpResponse = $httpRequest->send(); return $this->createResponse($httpResponse->getBody()); }
Add support for TLS <I> for Operating System without TLS <I> bundled by default (CentOS <I> for example)
thephpleague_omnipay-paypal
train
36627aabdfebcd7f38698cb0990ff24acc6769fc
diff --git a/src/create/aws/create-table-index/_get-global-secondary-indexes.js b/src/create/aws/create-table-index/_get-global-secondary-indexes.js index <HASH>..<HASH> 100644 --- a/src/create/aws/create-table-index/_get-global-secondary-indexes.js +++ b/src/create/aws/create-table-index/_get-global-secondary-indexes.js @@ -9,8 +9,8 @@ module.exports = function _getGsi(name, attr) { ProjectionType: 'ALL' }, ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5 + ReadCapacityUnits: 1, + WriteCapacityUnits: 1 } } }] diff --git a/src/create/aws/create-tables/_create-table.js b/src/create/aws/create-tables/_create-table.js index <HASH>..<HASH> 100644 --- a/src/create/aws/create-tables/_create-table.js +++ b/src/create/aws/create-tables/_create-table.js @@ -35,8 +35,8 @@ module.exports = function _createTable(name, attr, callback) { AttributeDefinitions: getAttributeDefinitions(clean(attr)), KeySchema: getKeySchema(attr, keys), ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5 + ReadCapacityUnits: 1, + WriteCapacityUnits: 1 } }, callback) },
fix arc-repos/arc-workflows#<I>
architect_architect
train
e61b21f4c87d4df6c3b7c60e738092ef8f5f80bb
diff --git a/lib/test/ServiceTest.js b/lib/test/ServiceTest.js index <HASH>..<HASH> 100644 --- a/lib/test/ServiceTest.js +++ b/lib/test/ServiceTest.js @@ -1,5 +1,11 @@ var oo = require('@carbon-io/carbon-core').atom.oo(module) var tt = require('@carbon-io/carbon-core').testtube + +function sleep(ms, cb) { + setTimeout(function() { + cb() + }, ms) +} /****************************************************************************** * @class ServiceTest @@ -30,35 +36,11 @@ module.exports = oo({ * setup */ setup: function() { - function sleep(ms, cb) { - setTimeout(function() { - cb() - }, ms) - } - if (this.suppressServiceLogging) { this.service.verbosity = 'fatal' } - for (var i=0; i<3; i++) { - try { - this.service.start() - break - } catch (e) { - if (i === 2) { - throw new Error( - 'Failed to start server. Port ' + - this.service.port + - ' already bound.') - } - if (e.message.includes('EADDRINUSE')) { - console.warn('caught EADDRINUSE, will try again in 1 second...') - sleep.sync(1000) - } else { - throw e - } - } - } + this.service.start() // XXX: Service.sslOptions is not initialized until start is called var sslOptions = this.service.sslOptions @@ -72,6 +54,7 @@ module.exports = oo({ teardown: function() { try { this.service.stop() + sleep.sync(1000) } catch (e) { console.error('Error encountered stopping test service: ' + e.toString()) }
testing if sleeping after shutdown fixes travis
carbon-io_carbond
train
b4b430776dee5cf09518b8403de1d5f6011092e0
diff --git a/pyqode/cobol/widgets/code_edit.py b/pyqode/cobol/widgets/code_edit.py index <HASH>..<HASH> 100644 --- a/pyqode/cobol/widgets/code_edit.py +++ b/pyqode/cobol/widgets/code_edit.py @@ -128,6 +128,9 @@ class CobolCodeEdit(api.CodeEdit): self.backspace_mode = self.modes.append( modes.SmartBackSpaceMode() ) + self.extended_selection_mode = self.modes.append( + modes.ExtendedSelectionMode() + ) def _setup_panels(self): self.folding_panel = self.panels.append(
Add extended mode selection. Particularly useful for cobol since this mode allow to select entire word, even if there is a dash in the middle ( FOO-BAR -> FOO, - and BAR will be selected by a double click).
pyQode_pyqode.cobol
train
6145b896e2d57652804cb25400484043582976a1
diff --git a/src/services/popup.js b/src/services/popup.js index <HASH>..<HASH> 100644 --- a/src/services/popup.js +++ b/src/services/popup.js @@ -80,7 +80,9 @@ ngeo.Popup.prototype.setTitle = function(title) { /** * Set the popup's content. - * @param {string} content The content. + * Note: the type of the `content` param is `*` instead of `string`, this + * is because the content may be trusted using `$sce.trustAsHtml`. + * @param {*} content The content. */ ngeo.Popup.prototype.setContent = function(content) { this.scope_['content'] = content;
Change param type for ngeo.Popup#setContent
camptocamp_ngeo
train
ecbbf7a2eae12d6a1fbd78e7f77aa72dddebbf3a
diff --git a/validator/sawtooth_validator/consensus/proxy.py b/validator/sawtooth_validator/consensus/proxy.py index <HASH>..<HASH> 100644 --- a/validator/sawtooth_validator/consensus/proxy.py +++ b/validator/sawtooth_validator/consensus/proxy.py @@ -137,12 +137,12 @@ class ConsensusProxy: result = [] for setting in settings: try: - value = settings_view.get_setting(key) + value = settings_view.get_setting(setting) except KeyError: # if the key is missing, leave it out of the response continue - result.append((key, value)) + result.append((setting, value)) return result
Fix consensus proxy for settings_get unused variable
hyperledger_sawtooth-core
train
98ff98e2b3d535f2f9fe70f8778a092a19463442
diff --git a/src/Controller/Controller.php b/src/Controller/Controller.php index <HASH>..<HASH> 100644 --- a/src/Controller/Controller.php +++ b/src/Controller/Controller.php @@ -173,6 +173,24 @@ class Controller implements EventListener { public $viewClass = 'Cake\View\View'; /** + * The path to this controllers view templates. + * Example `Articles` + * + * Set automatically using conventions in Controller::__construct(). + * + * @var string + */ + public $viewPath; + +/** + * The name of the view file to render. The name specified + * is the filename in /app/Template/<SubFolder> without the .ctp extension. + * + * @var string + */ + public $view = null; + +/** * Instance of the View created during rendering. Won't be set until after * Controller::render() is called. * @@ -214,16 +232,6 @@ class Controller implements EventListener { public $methods = array(); /** - * The path to this controllers view templates. - * Example `Articles` - * - * Set automatically using conventions in Controller::__construct(). - * - * @var string - */ - public $viewPath; - -/** * Constructor. * * Sets a number of properties based on conventions if they are empty. To override the
Add missing property to Controller. This property was lost in the shuffle but should exist on Controller, as it is modified by methods like setRequest().
cakephp_cakephp
train
0fe3b345153d3e614f33ada538632b6c66f9222a
diff --git a/lib/natural/inflectors/noun_inflector.js b/lib/natural/inflectors/noun_inflector.js index <HASH>..<HASH> 100755 --- a/lib/natural/inflectors/noun_inflector.js +++ b/lib/natural/inflectors/noun_inflector.js @@ -61,6 +61,7 @@ var NounInflector = function() { this.addIrregular("foot", "feet"); this.addIrregular("tooth", "teeth"); this.addIrregular("goose", "geese"); + this.addIrregular("ephemeris", "ephemerides"); // see if it is possible to unify the creation of both the singular and // plural regexes or maybe even just have one list. with a complete list @@ -69,9 +70,9 @@ var NounInflector = function() { this.pluralForms.regularForms.push([/ife$/i, 'ives']); this.pluralForms.regularForms.push([/(antenn|formul|nebul|vertebr|vit)a$/i, '$1ae']); this.pluralForms.regularForms.push([/(octop|vir|radi|nucle|fung|cact|stimul)us$/i, '$1i']); - this.pluralForms.regularForms.push([/(buffal|tomat)o$/i, '$1oes']); + this.pluralForms.regularForms.push([/(buffal|tomat|tornad)o$/i, '$1oes']); this.pluralForms.regularForms.push([/(sis)$/i, 'ses']); - this.pluralForms.regularForms.push([/(matr|vert|ind)(ix|ex)$/i, '$1ices']); + this.pluralForms.regularForms.push([/(matr|vert|ind|cort)(ix|ex)$/i, '$1ices']); this.pluralForms.regularForms.push([/(x|ch|ss|sh|s|z)$/i, '$1es']); this.pluralForms.regularForms.push([/^(?!talis|.*hu)(.*)man$/i, '$1men']); this.pluralForms.regularForms.push([/(.*)/i, '$1s']); @@ -80,9 +81,9 @@ var NounInflector = function() { this.singularForms.regularForms.push([/ives$/i, 'ife']); this.singularForms.regularForms.push([/(antenn|formul|nebul|vertebr|vit)ae$/i, '$1a']); this.singularForms.regularForms.push([/(octop|vir|radi|nucle|fung|cact|stimul)(i)$/i, '$1us']); - this.singularForms.regularForms.push([/(buffal|tomat)(oes)$/i, '$1o']); + this.singularForms.regularForms.push([/(buffal|tomat|tornad)(oes)$/i, '$1o']); this.singularForms.regularForms.push([/(analy|naly|synop|parenthe|diagno|the)ses$/i, '$1sis']); - this.singularForms.regularForms.push([/(vert|ind)(ices)$/i, '$1ex']); + this.singularForms.regularForms.push([/(vert|ind|cort)(ices)$/i, '$1ex']); // our pluralizer won''t cause this form of appendix (appendicies) // but we should handle it this.singularForms.regularForms.push([/(matr|append)(ices)$/i, '$1ix']);
corrected some plural forms support for tornadoes, ephemerides and cortices nouns
NaturalNode_natural
train
6171e078c8fdd2250e331f1c225db2b4e3be8809
diff --git a/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java index <HASH>..<HASH> 100644 --- a/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java @@ -26,10 +26,10 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.metamx.common.StringUtils; import io.druid.query.extraction.ExtractionFn; +import io.druid.query.filter.DimFilterCacheHelper; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.query.lookup.LookupReferencesManager; -import io.druid.query.filter.DimFilterCacheHelper; import io.druid.segment.DimensionSelector; import javax.annotation.Nullable; @@ -129,10 +129,11 @@ public class LookupDimensionSpec implements DimensionSpec final LookupExtractor lookupExtractor = Strings.isNullOrEmpty(name) ? this.lookup : Preconditions.checkNotNull( - this.lookupReferencesManager.get(name).get(), - "can not find lookup with name [%s]", + lookupReferencesManager.get(name), + "Lookup [%s] not found", name - ); + ).get(); + return new LookupExtractionFn( lookupExtractor, retainMissingValue, @@ -140,7 +141,6 @@ public class LookupDimensionSpec implements DimensionSpec lookupExtractor.isOneToOne(), optimize ); - } @Override
Improve NPE message in LookupDimensionSpec when lookup does not exist. (#<I>) The message used to be empty, which made things hard to debug.
apache_incubator-druid
train
99c62543798608506fad776d62297eaab7c1f16b
diff --git a/spec/signore/executable/settings_spec.rb b/spec/signore/executable/settings_spec.rb index <HASH>..<HASH> 100644 --- a/spec/signore/executable/settings_spec.rb +++ b/spec/signore/executable/settings_spec.rb @@ -3,6 +3,12 @@ require_relative '../../spec_helper' require_relative '../../../lib/signore/executable/settings' module Signore class Executable; describe Settings do + describe '#action' do + it 'is defined by the first argument' do + Settings.new(['prego']).action.must_equal 'prego' + end + end + describe '#db_path' do it 'defaults to $XDG_DATA_HOME/signore/signatures.yml' do begin
spec Executable::Settings#action
chastell_signore
train
9cb181e9fff00340b25510ab859ff8641371965a
diff --git a/chess/__init__.py b/chess/__init__.py index <HASH>..<HASH> 100644 --- a/chess/__init__.py +++ b/chess/__init__.py @@ -3582,9 +3582,6 @@ class SquareSet(object): def symmetric_difference(self, other): return self ^ other - def copy(self): - return type(self)(self.mask) - def update(self, other): self |= other @@ -3597,9 +3594,12 @@ class SquareSet(object): def symmetric_difference_update(self, other): self ^= other + def copy(self): + return type(self)(self.mask) + def add(self, square): """Add a square to the set.""" - self |= BB_SQUARES[square] + self.mask |= BB_SQUARES[square] def remove(self, square): """ @@ -3615,7 +3615,7 @@ class SquareSet(object): def discard(self, square): """Discards a square from the set.""" - self &= ~BB_SQUARES[square] + self.mask &= ~BB_SQUARES[square] def pop(self): """ @@ -3644,7 +3644,7 @@ class SquareSet(object): def __ne__(self, other): try: - return int(self) != int(other) + return self.mask != int(other) except ValueError: return NotImplemented @@ -3754,9 +3754,6 @@ class SquareSet(object): import chess.svg return chess.svg.board(squares=self) - def __hash__(self): - return hash(self.mask) - @classmethod def from_square(cls, square): """
SquareSet should not be hashable
niklasf_python-chess
train
ff67b5ba849e55f2bfa70e1665038e856e7ebd42
diff --git a/forms/DropdownField.php b/forms/DropdownField.php index <HASH>..<HASH> 100644 --- a/forms/DropdownField.php +++ b/forms/DropdownField.php @@ -13,8 +13,8 @@ * * public function getCMSFields() { * $fields = parent::getCMSFields(); - * $field = DropdownField::create('GalleryID', 'Gallery', Gallery::get()->map('ID', 'Title')); - * $field->setEmptyString('(Select one)'); + * $field = DropdownField::create('GalleryID', 'Gallery', Gallery::get()->map('ID', 'Title')) + * ->setEmptyString('(Select one)'); * $fields->addFieldToTab('Root.Content', $field, 'Content'); * </code> * @@ -33,7 +33,7 @@ * * Example instantiation: * <code> - * new DropdownField( + * DropdownField::create( * 'Country', * 'Country', * array( @@ -59,7 +59,7 @@ * * Field construction: * <code> - * new DropdownField( + * DropdownField::create( * 'Country', * 'Country', * singleton('MyObject')->dbObject('Country')->enumValues()
Update DropdownField.php Update to DropdownField::create, and in one instance set the Empty Title on the instantiation, rather than afterwards.
silverstripe_silverstripe-framework
train
6c93b8a74cd75709484642b0dd281ce25df19b66
diff --git a/test/test_plotting.py b/test/test_plotting.py index <HASH>..<HASH> 100644 --- a/test/test_plotting.py +++ b/test/test_plotting.py @@ -12,25 +12,43 @@ import networkx as nx import pandas as pd import matplotlib.pyplot as plt +try: + import cartopy + cartopy_present = True +except ImportError as e: + cartopy_present = False + -@pytest.mark.parametrize("geomap", (True, False)) @pytest.mark.parametrize("margin", (None, 0.1)) @pytest.mark.parametrize("jitter", (None, 1)) -def test_plot_standard_params(ac_dc_network, geomap, margin, jitter): +def test_plot_standard_params_wo_geomap(ac_dc_network, margin, jitter): n = ac_dc_network + n.plot(geomap=False, margin=margin, jitter=jitter) + plt.close() + - n.plot(geomap=geomap, margin=margin, jitter=jitter) +@pytest.mark.skipif(not cartopy_present, reason="Cartopy not installed") +@pytest.mark.parametrize("margin", (None, 0.1)) +@pytest.mark.parametrize("jitter", (None, 1)) +def test_plot_standard_params_w_geomap(ac_dc_network, margin, jitter): + n = ac_dc_network + n.plot(geomap=True, margin=margin, jitter=jitter) + plt.close() -def test_plot_on_axis(ac_dc_network): +def test_plot_on_axis_wo_geomap(ac_dc_network): n = ac_dc_network fig, ax = plt.subplots() n.plot(ax=ax, geomap=False) plt.close() +@pytest.mark.skipif(not cartopy_present, reason="Cartopy not installed") +def test_plot_on_axis_w_geomap(ac_dc_network): + n = ac_dc_network + fig, ax = plt.subplots() with pytest.raises(AssertionError): n.plot(ax=ax, geomap=True) - + plt.close() def test_plot_bus_circles(ac_dc_network): n = ac_dc_network @@ -38,7 +56,7 @@ def test_plot_bus_circles(ac_dc_network): bus_sizes = n.generators.groupby(["bus", "carrier"]).p_nom.mean() bus_sizes[:] = 1 bus_colors = pd.Series(["blue", "red", "green"], index=n.carriers.index) - n.plot(bus_sizes=bus_sizes, bus_colors=bus_colors) + n.plot(bus_sizes=bus_sizes, bus_colors=bus_colors, geomap=False) plt.close() # Retrieving the colors from carriers also should work @@ -52,7 +70,7 @@ def test_plot_with_bus_cmap(ac_dc_network): buses = n.buses.index colors = pd.Series(np.random.rand(len(buses)), buses) - n.plot(bus_colors=colors, bus_cmap="coolwarm") + n.plot(bus_colors=colors, bus_cmap="coolwarm", geomap=False) plt.close() @@ -61,14 +79,14 @@ def test_plot_with_line_cmap(ac_dc_network): lines = n.lines.index colors = pd.Series(np.random.rand(len(lines)), lines) - n.plot(line_colors=colors, line_cmap="coolwarm") + n.plot(line_colors=colors, line_cmap="coolwarm", geomap=False) plt.close() def test_plot_layouter(ac_dc_network): n = ac_dc_network - n.plot(layouter=nx.layout.planar_layout) + n.plot(layouter=nx.layout.planar_layout, geomap=False) plt.close() @@ -77,13 +95,13 @@ def test_plot_map_flow(ac_dc_network): branches = n.branches() flow = pd.Series(range(len(branches)), index=branches.index) - n.plot(flow=flow) + n.plot(flow=flow, geomap=False) plt.close() n.lines_t.p0.loc[:, flow.Line.index] = 0 n.lines_t.p0 += flow.Line - n.plot(flow="mean") + n.plot(flow="mean", geomap=False) plt.close() - n.plot(flow=n.snapshots[0]) + n.plot(flow=n.snapshots[0], geomap=False) plt.close()
test_plotting: skip tests if cartopy not present
PyPSA_PyPSA
train
3405b606f9262656ddb8a26fca3cd528ffa7d83c
diff --git a/growler/http/responder.py b/growler/http/responder.py index <HASH>..<HASH> 100644 --- a/growler/http/responder.py +++ b/growler/http/responder.py @@ -5,12 +5,14 @@ The Growler class responsible for responding to HTTP requests. """ -import asyncio - from .parser import Parser from .request import HTTPRequest from .response import HTTPResponse +from .errors import ( + HTTPErrorBadRequest +) + class GrowlerHTTPResponder(): """ @@ -97,12 +99,12 @@ class GrowlerHTTPResponder(): try: maxlen = self.headers['CONTENT-LENGTH'] except KeyError: - raise BadHTTPRequest + raise HTTPErrorBadRequest try: self.content_length += len(data) except AttributeError: - raise BadHTTPRequest + raise HTTPErrorBadRequest if self.content_length > maxlen: raise HTTPErrorBadRequest
Fixed some incorrect error names in Responder
pyGrowler_Growler
train
bd68d7ea7656ba4d3098a62cf16c52faa46b2a9c
diff --git a/_config/config-default.js b/_config/config-default.js index <HASH>..<HASH> 100644 --- a/_config/config-default.js +++ b/_config/config-default.js @@ -10,7 +10,9 @@ */ // Resolve the path to the validation module, via xtc's lib/configure.js -var check = require(require('path').join(module.parent.filename.split('/lib')[0], 'lib/validator')); +var path = require('path') + ,check = require(path.join(module.parent.filename.split(path.sep +'lib')[0], 'lib/validator')) +; module.exports = {
Fix another hardcoded forward slash that could bite us on Windows.
MarcDiethelm_xtc
train
d0647ad6a44d1a0785f29f40bd30412f8592e291
diff --git a/extensions/guacamole-auth-jdbc/modules/guacamole-auth-jdbc-base/src/main/java/org/apache/guacamole/auth/jdbc/tunnel/RestrictedGuacamoleTunnelService.java b/extensions/guacamole-auth-jdbc/modules/guacamole-auth-jdbc-base/src/main/java/org/apache/guacamole/auth/jdbc/tunnel/RestrictedGuacamoleTunnelService.java index <HASH>..<HASH> 100644 --- a/extensions/guacamole-auth-jdbc/modules/guacamole-auth-jdbc-base/src/main/java/org/apache/guacamole/auth/jdbc/tunnel/RestrictedGuacamoleTunnelService.java +++ b/extensions/guacamole-auth-jdbc/modules/guacamole-auth-jdbc-base/src/main/java/org/apache/guacamole/auth/jdbc/tunnel/RestrictedGuacamoleTunnelService.java @@ -191,9 +191,22 @@ public class RestrictedGuacamoleTunnelService public int compare(ModeledConnection a, ModeledConnection b) { logger.trace("Comparing {} to {}.", a.getName(), b.getName()); - return getActiveConnections(a).size() - - getActiveConnections(b).size(); - + int cw = 0; + + try { + if(a.getConnectionWeight() > 0 && b.getConnectionWeight() > 0) + cw = (int)(a.getConnectionWeight()/getActiveConnections(a).size() - b.getConnectionWeight()/getActiveConnections(b).size()); + else + cw = getActiveConnections(a).size() - getActiveConnections(b).size(); + + } + catch (GuacamoleException e) { + logger.error("Could not compare connections.", e.getMessage()); + logger.debug("Could not compare connections.", e); + } + + return cw; + } });
GUACAMOLE-<I>: Initial stab at a WLC algorithm.
glyptodon_guacamole-client
train
46ddfd1c9f3efa57a171287719c44216020ca3be
diff --git a/lib/commands/run.js b/lib/commands/run.js index <HASH>..<HASH> 100644 --- a/lib/commands/run.js +++ b/lib/commands/run.js @@ -99,7 +99,9 @@ function run(scriptPath, options) { context.script.config.payload = results; return callback(null, context); }); - } else if (context.script.config.payload && _.isObject(context.script.config.payload)) { + } else if (context.script.config.payload && + _.isObject(context.script.config.payload) && + options.payload) { let csvdata = fs.readFileSync(options.payload, 'utf-8'); csv(csvdata, function(err, payload) { @@ -119,6 +121,7 @@ function run(scriptPath, options) { 'WARNING: payload file not set, but payload is configured in %s\n', scriptPath); } + context.payload = [[]]; return callback(null, context); }
Warn if payload file not set on the command line
artilleryio_artillery
train
1598a02508305c9b42442742318d3995687812b3
diff --git a/src-setup/org/opencms/setup/CmsUpdateBean.java b/src-setup/org/opencms/setup/CmsUpdateBean.java index <HASH>..<HASH> 100644 --- a/src-setup/org/opencms/setup/CmsUpdateBean.java +++ b/src-setup/org/opencms/setup/CmsUpdateBean.java @@ -882,28 +882,33 @@ public class CmsUpdateBean extends CmsSetupBean { // 5) because the setup bean implements I_CmsShellCommands, the shell constructor can pass the shell's CmsObject back to the setup bean // 6) thus, the setup bean can do things with the Cms - if ((m_cms != null) && (m_installModules != null)) { + if (m_cms != null) { + I_CmsReport report = new CmsShellReport(m_cms.getRequestContext().getLocale()); - Set<String> utdModules = new HashSet<String>(getUptodateModules()); + // remove obsolete modules in any case for (String moduleToRemove : getModulesToDelete()) { removeModule(moduleToRemove, report); } - List<String> installList = Lists.newArrayList(m_installModules); - for (String name : installList) { - if (!utdModules.contains(name)) { - String filename = m_moduleFilenames.get(name); - try { - updateModule(name, filename, report); - } catch (Exception e) { - // log a exception during module import, but make sure the next module is still imported - e.printStackTrace(System.err); + // check if there are any modules to install + if (m_installModules != null) { + Set<String> utdModules = new HashSet<String>(getUptodateModules()); + List<String> installList = Lists.newArrayList(m_installModules); + for (String name : installList) { + if (!utdModules.contains(name)) { + String filename = m_moduleFilenames.get(name); + try { + updateModule(name, filename, report); + } catch (Exception e) { + // log a exception during module import, but make sure the next module is still imported + e.printStackTrace(System.err); + } + } else { + report.println( + Messages.get().container(Messages.RPT_MODULE_UPTODATE_1, name), + I_CmsReport.FORMAT_HEADLINE); } - } else { - report.println( - Messages.get().container(Messages.RPT_MODULE_UPTODATE_1, name), - I_CmsReport.FORMAT_HEADLINE); } } } @@ -1036,11 +1041,9 @@ public class CmsUpdateBean extends CmsSetupBean { protected List<String> getModulesToDelete() { List<String> result = new ArrayList<String>(); - if (m_installModules.contains("org.opencms.ade.config")) { - for (int i = 0; i < OBSOLETE_MODULES.length; i++) { - if (OpenCms.getModuleManager().hasModule(OBSOLETE_MODULES[i])) { - result.add(OBSOLETE_MODULES[i]); - } + for (int i = 0; i < OBSOLETE_MODULES.length; i++) { + if (OpenCms.getModuleManager().hasModule(OBSOLETE_MODULES[i])) { + result.add(OBSOLETE_MODULES[i]); } } return result;
Fixing issue where obsolete modules where not deleted during the update.
alkacon_opencms-core
train
2814efad6bfc967e9522fed86b5112bc8e9076d7
diff --git a/lib/hipbot/adapters/hipchat/initializer.rb b/lib/hipbot/adapters/hipchat/initializer.rb index <HASH>..<HASH> 100644 --- a/lib/hipbot/adapters/hipchat/initializer.rb +++ b/lib/hipbot/adapters/hipchat/initializer.rb @@ -53,7 +53,7 @@ module Hipbot end def clean_other_objects klass, object_ids - klass.to_a.select{ |r| !object_ids.include?(r.id) }.each(&:destroy) + klass.all.select{ |r| !object_ids.include?(r.id) }.each(&:destroy) end def initialize_bot_user diff --git a/lib/hipbot/storages/hash.rb b/lib/hipbot/storages/hash.rb index <HASH>..<HASH> 100644 --- a/lib/hipbot/storages/hash.rb +++ b/lib/hipbot/storages/hash.rb @@ -43,6 +43,10 @@ module Hipbot module ClassMethods include Cache + def all + collection.values + end + def create params collection[params[:id]] = new(params) end @@ -76,7 +80,7 @@ module Hipbot protected def method_missing name, *args, &block - return collection.values.public_send(name, *args, &block) if Array.instance_methods.include?(name) + return all.public_send(name, *args, &block) if Array.instance_methods.include?(name) super end end
add "all" method to hash storage
pewniak747_hipbot
train
6886d87cff3035b2e021cc141b7890e248f25920
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -57,14 +57,14 @@ async function resolveAsDirectory(request: string, parent: string, config: Confi async function resolveModulePath(request: string, parent: string, config: Config): Promise<string> { const chunks = getChunks(request) const moduleName = chunks.shift() - const packageRoots = [] + const packageRoots = new Set() const localRoot = getLocalPackageRoot(Path.dirname(parent), config) if (localRoot) { - packageRoots.push(localRoot) + packageRoots.add(localRoot) } if (typeof config.root === 'string') { - packageRoots.push(config.root) + packageRoots.add(config.root) } const absoluteModuleDirectories = config.moduleDirectories.filter(i => Path.isAbsolute(i))
:new: Use Set instead of Array for packageRoots
steelbrain_resolve
train
fef678fe9bd7f285967a0e68c8109999e187a66b
diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index <HASH>..<HASH> 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -823,13 +823,22 @@ class EDisGo: "be set as active power time series was not provided." ) else: - self.set_time_series_reactive_power_control( - **{ + other_comps = [ + _ + for _ in ["generator", "load", "storage_unit"] + if _ != comp_type + ] + parameter_dict = { + f"{t}s_parametrisation": None for t in other_comps + } + parameter_dict.update( + { f"{comp_type}s_parametrisation": _get_q_default_df( comp_name ) } ) + self.set_time_series_reactive_power_control(**parameter_dict) if comp_type == "bus": comp_name = self.topology.add_bus(**kwargs) diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index <HASH>..<HASH> 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -184,6 +184,13 @@ class TestEDisGo: self.edisgo.timeseries.loads_reactive_power.loc[:, load_name] == dummy_ts * np.tan(np.arccos(0.9)) ).all() + # check that reactive power time series were not all set to default + assert ( + self.edisgo.timeseries.loads_active_power.loc[ + :, "Conventional_Load_MVGrid_1_residential_4" + ] + == dummy_ts + ).all() # Test add generator (without time series) num_gens = len(self.edisgo.topology.generators_df)
Bug fix - do not overwrite other reactive power time series
openego_eDisGo
train
84a791bfdcf8220ea7c603faec281e1ec93c3dc0
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py index <HASH>..<HASH> 100644 --- a/salt/client/ssh/shell.py +++ b/salt/client/ssh/shell.py @@ -6,6 +6,7 @@ Manage transport commands via ssh # Import python libs import re import os +import json import time import logging import subprocess @@ -313,52 +314,38 @@ class Shell(object): sent_passwd = 0 ret_stdout = '' ret_stderr = '' - while True: - stdout, stderr = term.recv() - if stdout: - ret_stdout += stdout - if stderr: - ret_stderr += stderr - if stdout and SSH_PASSWORD_PROMPT_RE.search(stdout): - if len(stdout) > 256: - pass - elif not self.passwd: - try: - term.close(terminate=True, kill=True) - except salt.utils.vt.TerminalException: - pass - return '', 'Permission denied, no authentication information', 254 - if sent_passwd < passwd_retries: - term.sendline(self.passwd) - sent_passwd += 1 - continue - else: - # asking for a password, and we can't seem to send it - try: - term.close(terminate=True, kill=True) - except salt.utils.vt.TerminalException: - pass - return '', 'Password authentication failed', 254 - elif stdout and KEY_VALID_RE.search(stdout): - if key_accept: - term.sendline('yes') - continue - else: - term.sendline('no') - ret_stdout = ('The host key needs to be accepted, to ' - 'auto accept run salt-ssh with the -i ' - 'flag:\n{0}').format(stdout) - return ret_stdout, '', 254 - if not term.isalive(): - while True: - stdout, stderr = term.recv() - if stdout: - ret_stdout += stdout - if stderr: - ret_stderr += stderr - if stdout is None and stderr is None: - break - term.close(terminate=True, kill=True) - break - time.sleep(0.5) - return ret_stdout, ret_stderr, term.exitstatus + + try: + while term.has_unread_data: + stdout, stderr = term.recv() + if stdout: + ret_stdout += stdout + if stderr: + ret_stderr += stderr + if stdout and SSH_PASSWORD_PROMPT_RE.search(stdout): + if not self.passwd: + return '', 'Permission denied, no authentication information', 254 + if sent_passwd < passwd_retries: + term.sendline(self.passwd) + sent_passwd += 1 + continue + else: + # asking for a password, and we can't seem to send it + return '', 'Password authentication failed', 254 + elif stdout and KEY_VALID_RE.search(stdout): + if key_accept: + term.sendline('yes') + continue + else: + term.sendline('no') + ret_stdout = ('The host key needs to be accepted, to ' + 'auto accept run salt-ssh with the -i ' + 'flag:\n{0}').format(stdout) + return ret_stdout, '', 254 + elif stdout and stdout.endswith('_||ext_mods||_'): + mods_raw = json.dumps(self.mods, separators=(',', ':')) + '|_E|0|' + term.sendline(mods_raw) + time.sleep(0.5) + return ret_stdout, ret_stderr, term.exitstatus + finally: + term.close(terminate=True, kill=True)
VT now consumes all of it's data when looping using it's `has_unread_data` attribute
saltstack_salt
train
a63d12f1c5df036f818274706d098bad30cbb044
diff --git a/src/main/java/com/authlete/jaxrs/BaseEndpoint.java b/src/main/java/com/authlete/jaxrs/BaseEndpoint.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/authlete/jaxrs/BaseEndpoint.java +++ b/src/main/java/com/authlete/jaxrs/BaseEndpoint.java @@ -17,8 +17,16 @@ package com.authlete.jaxrs; +import java.security.cert.CertificateEncodingException; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; import javax.ws.rs.WebApplicationException; +import org.apache.commons.codec.binary.Base64; + /** * A base class for endpoints. @@ -29,6 +37,20 @@ import javax.ws.rs.WebApplicationException; */ public class BaseEndpoint { + /** + * Headers to check for certificate path with proxy-forwarded certificate + * information; the first entry is the client's certificate itself + */ + private String[] clientCertificatePathHeaders = { + "X-Ssl-Cert", // the client's certificate + "X-Ssl-Cert-Chain-1", "X-Ssl-Cert-Chain-2", "X-Ssl-Cert-Chain-3", "X-Ssl-Cert-Chain-4" // the intermediate certificate path, not including the client's certificate or root + }; + + /* + * Used for handling PEM format certificates. + */ + private Base64 base64 = new Base64(Base64.PEM_CHUNK_SIZE, "\n".getBytes()); + /** * Called when the internal request handler raises an exception. * The default implementation of this method calls {@code @@ -42,4 +64,77 @@ public class BaseEndpoint { exception.printStackTrace(); } + + protected String[] extractClientCertificateChain(HttpServletRequest request) + { + // try to get the certificates from the servlet context directly + X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate"); + + if (certs == null || certs.length == 0) + { + // we didn't find any certificates in the servlet request, try extracting them from the headers instead + List<String> headerCerts = new ArrayList<>(); + + for (String headerName : clientCertificatePathHeaders) + { + String header = request.getHeader(headerName); + if (header != null) + { + headerCerts.add(header); + } + } + + if (headerCerts.isEmpty()) + { + return null; + } + else + { + return headerCerts.toArray(new String[] {}); + } + } + else + { + String[] pemEncoded = new String[certs.length]; + + try + { + for (int i = 0; i < certs.length; i++) + { + // encode each certificate in PEM format + StringBuilder sb = new StringBuilder(); + sb.append("-----BEGIN CERTIFICATE-----\n"); + sb.append(base64.encode(certs[i].getEncoded())); + sb.append("\n-----END CERTIFICATE-----\n"); + + pemEncoded[i] = sb.toString(); + + } + } catch (CertificateEncodingException e) + { + // TODO What should be done with this error? + e.printStackTrace(); + return null; + } + + return pemEncoded; + + } + + } + + protected String extractClientCertificate(HttpServletRequest request) + { + String[] certs = extractClientCertificateChain(request); + + if (certs != null && certs.length > 0) + { + return certs[0]; + } + else + { + return null; + } + } + }
abstracted certificate extraction to base endpoint
authlete_authlete-java-jaxrs
train
71c40d15e12405987fda3f1580522eba582fa2dc
diff --git a/lib/hasu/window.rb b/lib/hasu/window.rb index <HASH>..<HASH> 100644 --- a/lib/hasu/window.rb +++ b/lib/hasu/window.rb @@ -15,7 +15,7 @@ module Hasu def initialize(*) super - reset + reset unless Hasu.error end def self.run
Don't reset during initialization if there's a Hasu error
michaelfairley_hasu
train
eb2dee4bbc7c43ed85cf25945480dd4d375b28fa
diff --git a/src/ChangeStream.php b/src/ChangeStream.php index <HASH>..<HASH> 100644 --- a/src/ChangeStream.php +++ b/src/ChangeStream.php @@ -117,7 +117,26 @@ class ChangeStream implements Iterator */ public function rewind() { - $this->csIt->rewind(); + $resumable = false; + try { + $this->csIt->rewind(); + if ($this->valid()) { + $this->extractResumeToken($this->csIt->current()); + } + } catch (RuntimeException $e) { + if (strpos($e->getMessage(), "not master") !== false) { + $resumable = true; + } + if ($e->getCode() === self::CURSOR_NOT_FOUND) { + $resumable = true; + } + if ($e instanceof ConnectionTimeoutException) { + $resumable = true; + } + } + if ($resumable) { + $this->resume(); + } } /**
PHPLIB-<I>: Add resume logic to ChangeStream::rewind()
mongodb_mongo-php-library
train
4ae7c9616e9194d8d49d90de63e0c3fce3135b94
diff --git a/src/main/java/org/craftercms/core/service/impl/ContentStoreServiceImpl.java b/src/main/java/org/craftercms/core/service/impl/ContentStoreServiceImpl.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/craftercms/core/service/impl/ContentStoreServiceImpl.java +++ b/src/main/java/org/craftercms/core/service/impl/ContentStoreServiceImpl.java @@ -30,7 +30,6 @@ import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.craftercms.commons.config.ConfigurationException; import org.craftercms.commons.config.ConfigurationProvider; import org.craftercms.commons.file.blob.BlobStore; import org.craftercms.commons.file.blob.BlobStoreResolver; @@ -228,7 +227,7 @@ public class ContentStoreServiceImpl extends AbstractCachedContentStoreService { BlobStore store = blobStoreResolver.getById( new ConfigurationProviderImpl(cachingOptions, context), blob.getStoreId()); return new ResourceBasedContent(store.getResource(url, blob)); - } catch (IOException | ConfigurationException e) { + } catch (Exception e) { throw new StoreException("Error reading blob file at " + blobUrl, e); } } @@ -583,8 +582,8 @@ public class ContentStoreServiceImpl extends AbstractCachedContentStoreService { */ private class ConfigurationProviderImpl implements ConfigurationProvider { - private CachingOptions cachingOptions; - private Context context; + private final CachingOptions cachingOptions; + private final Context context; public ConfigurationProviderImpl(CachingOptions cachingOptions, Context context) { this.cachingOptions = cachingOptions;
Improve logging for blob stores
craftercms_core
train
2ac6a23fa755d7fa5770aabaf3b5878ea8e0a325
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ with open("README.md", "r") as fh: setup( name='pyfmg', - version='0.7.5', + version='0.8.01', packages=find_packages(), url='https://github.com/p4r4n0y1ng/pyfmg', license='Apache 2.0',
Modified version to <I> for submission to pypi
p4r4n0y1ng_pyfmg
train
34dcf968a9c9b4a3653049bd702f451ea1812fdf
diff --git a/src/bosh-director/lib/bosh/director/errand/instance_group_manager.rb b/src/bosh-director/lib/bosh/director/errand/instance_group_manager.rb index <HASH>..<HASH> 100644 --- a/src/bosh-director/lib/bosh/director/errand/instance_group_manager.rb +++ b/src/bosh-director/lib/bosh/director/errand/instance_group_manager.rb @@ -33,7 +33,7 @@ module Bosh::Director bound_instance_plans.each do |instance_plan| unless instance_plan.already_detached? - DeploymentPlan::Steps::UnmountInstanceDisksStep.new(instance_plan).perform + DeploymentPlan::Steps::UnmountInstanceDisksStep.new(instance_plan.instance).perform end @vm_deleter.delete_for_instance(instance_plan.instance.model) diff --git a/src/bosh-director/spec/unit/errand/instance_group_manager_spec.rb b/src/bosh-director/spec/unit/errand/instance_group_manager_spec.rb index <HASH>..<HASH> 100644 --- a/src/bosh-director/spec/unit/errand/instance_group_manager_spec.rb +++ b/src/bosh-director/spec/unit/errand/instance_group_manager_spec.rb @@ -108,7 +108,7 @@ module Bosh::Director context 'when there are instance plans' do before do - allow(DeploymentPlan::Steps::UnmountInstanceDisksStep).to receive(:new).with(instance_plan1).and_return(unmount_step) + allow(DeploymentPlan::Steps::UnmountInstanceDisksStep).to receive(:new).with(instance1).and_return(unmount_step) allow(Config).to receive_message_chain(:current_job, :event_manager).and_return(Api::EventManager.new({})) allow(Config).to receive_message_chain(:current_job, :username).and_return('user') allow(Config).to receive_message_chain(:current_job, :task_id).and_return('task-1', 'task-2')
Pass in instance_model as expected for unmounts * Previously took instance plans [#<I>](<URL>)
cloudfoundry_bosh
train
6dc2199d8085e2de87046b7b56bacdcf5914db25
diff --git a/doc/source/whatsnew/v0.15.1.txt b/doc/source/whatsnew/v0.15.1.txt index <HASH>..<HASH> 100644 --- a/doc/source/whatsnew/v0.15.1.txt +++ b/doc/source/whatsnew/v0.15.1.txt @@ -118,7 +118,8 @@ Bug Fixes -- Bug in numeric index operations of add/sub with Float/Index Index with numpy arrays (:issue:`8608`) +- Bug in numeric index operations of add/sub with Float/Index Index with numpy arrays (:issue:`8608` +- Bug in setitem with empty indexer and unwanted coercion of dtypes (:issue:`8669`) diff --git a/pandas/core/internals.py b/pandas/core/internals.py index <HASH>..<HASH> 100644 --- a/pandas/core/internals.py +++ b/pandas/core/internals.py @@ -549,10 +549,31 @@ class Block(PandasObject): "different length than the value") try: + + def _is_scalar_indexer(indexer): + # treat a len 0 array like a scalar + # return True if we are all scalar indexers + + if arr_value.ndim == 1: + if not isinstance(indexer, tuple): + indexer = tuple([indexer]) + + def _is_ok(idx): + + if np.isscalar(idx): + return True + elif isinstance(idx, slice): + return False + return len(idx) == 0 + + return all([ _is_ok(idx) for idx in indexer ]) + return False + + # setting a single element for each dim and with a rhs that could be say a list - # GH 6043 - if arr_value.ndim == 1 and ( - np.isscalar(indexer) or (isinstance(indexer, tuple) and all([ np.isscalar(idx) for idx in indexer ]))): + # or empty indexers (so no astyping) + # GH 6043, 8669 (empty) + if _is_scalar_indexer(indexer): values[indexer] = value # if we are an exact match (ex-broadcasting), diff --git a/pandas/tests/test_indexing.py b/pandas/tests/test_indexing.py index <HASH>..<HASH> 100644 --- a/pandas/tests/test_indexing.py +++ b/pandas/tests/test_indexing.py @@ -1043,6 +1043,13 @@ class TestIndexing(tm.TestCase): expected = DataFrame(dict(A = Series(val1,index=keys1), B = Series(val2,index=keys2))).reindex(index=index) assert_frame_equal(df, expected) + # GH 8669 + # invalid coercion of nan -> int + df = DataFrame({'A' : [1,2,3], 'B' : np.nan }) + df.loc[df.B > df.A, 'B'] = df.A + expected = DataFrame({'A' : [1,2,3], 'B' : np.nan}) + assert_frame_equal(df, expected) + # GH 6546 # setting with mixed labels df = DataFrame({1:[1,2],2:[3,4],'a':['a','b']}) @@ -1055,7 +1062,6 @@ class TestIndexing(tm.TestCase): df.loc[0,[1,2]] = [5,6] assert_frame_equal(df, expected) - def test_loc_setitem_frame_multiples(self): # multiple setting df = DataFrame({ 'A' : ['foo','bar','baz'],
BUG: Bug in setitem with empty indexer and unwanted coercion of dtypes (GH<I>)
pandas-dev_pandas
train
35c9e753f4b6d6a77255e638e05cb1214d0c0cb1
diff --git a/modules/backend/widgets/Table.php b/modules/backend/widgets/Table.php index <HASH>..<HASH> 100644 --- a/modules/backend/widgets/Table.php +++ b/modules/backend/widgets/Table.php @@ -158,6 +158,14 @@ class Table extends WidgetBase $option = trans($option); } + if (isset($data['validation'])) { + foreach ($data['validation'] as &$validation) { + if (isset($validation['message'])) { + $validation['message'] = trans($validation['message']); + } + } + } + $result[] = $data; } diff --git a/modules/backend/widgets/table/assets/js/build-min.js b/modules/backend/widgets/table/assets/js/build-min.js index <HASH>..<HASH> 100644 --- a/modules/backend/widgets/table/assets/js/build-min.js +++ b/modules/backend/widgets/table/assets/js/build-min.js @@ -697,11 +697,14 @@ CheckboxProcessor.prototype.onFocus=function(cellElement,isClick){cellElement.qu CheckboxProcessor.prototype.onKeyDown=function(ev){if(ev.keyCode==32) this.onClick(ev)} CheckboxProcessor.prototype.onClick=function(ev){var target=this.tableObj.getEventTarget(ev,'DIV') -if(target.getAttribute('data-checkbox-element')){this.changeState(target)}} +if(target.getAttribute('data-checkbox-element')){var container=this.getCheckboxContainerNode(target) +if(container.getAttribute('data-column')!==this.columnName){return} +this.changeState(target)}} CheckboxProcessor.prototype.changeState=function(divElement){var cell=divElement.parentNode.parentNode if(divElement.getAttribute('class')=='checked'){divElement.setAttribute('class','') this.tableObj.setCellValue(cell,0)}else{divElement.setAttribute('class','checked') this.tableObj.setCellValue(cell,1)}} +CheckboxProcessor.prototype.getCheckboxContainerNode=function(checkbox){return checkbox.parentNode.parentNode} $.oc.table.processor.checkbox=CheckboxProcessor;}(window.jQuery);+function($){"use strict";if($.oc.table===undefined) throw new Error("The $.oc.table namespace is not defined. Make sure that the table.js script is loaded.");if($.oc.table.processor===undefined) throw new Error("The $.oc.table.processor namespace is not defined. Make sure that the table.processor.base.js script is loaded.");var Base=$.oc.table.processor.base,BaseProto=Base.prototype diff --git a/modules/backend/widgets/table/assets/js/table.processor.checkbox.js b/modules/backend/widgets/table/assets/js/table.processor.checkbox.js index <HASH>..<HASH> 100644 --- a/modules/backend/widgets/table/assets/js/table.processor.checkbox.js +++ b/modules/backend/widgets/table/assets/js/table.processor.checkbox.js @@ -79,6 +79,13 @@ var target = this.tableObj.getEventTarget(ev, 'DIV') if (target.getAttribute('data-checkbox-element')) { + // The method is called for all processors, but we should + // update only the checkbox in the clicked column. + var container = this.getCheckboxContainerNode(target) + if (container.getAttribute('data-column') !== this.columnName) { + return + } + this.changeState(target) } } @@ -95,5 +102,9 @@ } } + CheckboxProcessor.prototype.getCheckboxContainerNode = function(checkbox) { + return checkbox.parentNode.parentNode + } + $.oc.table.processor.checkbox = CheckboxProcessor; }(window.jQuery); \ No newline at end of file
Table validation messages are translatable now. Fixed a bug where clicking a checkbox in a table row doesn't change its status.
octobercms_october
train
25ac7f1f3b2e56fdf85ffa444d25a2a28ad0d640
diff --git a/pypuppetdb/api.py b/pypuppetdb/api.py index <HASH>..<HASH> 100644 --- a/pypuppetdb/api.py +++ b/pypuppetdb/api.py @@ -424,7 +424,7 @@ class BaseAPI(object): "command": command, "version": COMMAND_VERSION[command], "certname": payload['certname'], - "checksum": hashlib.sha1(str(payload).encode('utf-8')).hexdigest() + "checksum": hashlib.sha1(str(payload).encode('utf-8')).hexdigest() #nosec } if not self.token:
Add #nosec to sha1 line - erroneous bandit error
voxpupuli_pypuppetdb
train
37cbb2b07ceceb7cb8976ed9379a9b1886af2d97
diff --git a/sdl/endian.go b/sdl/endian.go index <HASH>..<HASH> 100644 --- a/sdl/endian.go +++ b/sdl/endian.go @@ -4,6 +4,7 @@ package sdl import "C" const ( + BYTEORDER = C.SDL_BYTEORDER LIL_ENDIAN = C.SDL_LIL_ENDIAN BIG_ENDIAN = C.SDL_BIG_ENDIAN )
sdl: endian: Add BYTEORDER
veandco_go-sdl2
train
2aa859796a2506cf19f70f7caed0ada2cdedf0aa
diff --git a/lib/query/model_with.js b/lib/query/model_with.js index <HASH>..<HASH> 100644 --- a/lib/query/model_with.js +++ b/lib/query/model_with.js @@ -127,6 +127,7 @@ module.exports = Mixin.create(/** @lends ModelQuery# */ { * @private * @return {String} Relation key path. */ + // TODO: update docs (include the fact that unique results are returned) _expandedPrefetches: function() { var self = this; var associations = _.keys(this._prefetches); @@ -155,8 +156,6 @@ module.exports = Mixin.create(/** @lends ModelQuery# */ { * completed. */ _prefetchAssociation: promiseMethod(function(association, records, cache) { - if (cache[association]) { return cache[association]; } - var relation = this._findPrefetchRelation(association); var prevAssociation = _.initial(association.split('.')).join('.'); var prevRecords = records;
Removed code that is not needed.
wbyoung_azul
train
a4c9341232885292e4e9aa55b6449fcea9aa7144
diff --git a/tests/TestCase/Validation/ValidationTest.php b/tests/TestCase/Validation/ValidationTest.php index <HASH>..<HASH> 100644 --- a/tests/TestCase/Validation/ValidationTest.php +++ b/tests/TestCase/Validation/ValidationTest.php @@ -83,6 +83,8 @@ class ValidationTest extends TestCase $this->assertTrue(Validation::notBlank('π')); $this->assertTrue(Validation::notBlank('0')); $this->assertTrue(Validation::notBlank(0)); + $this->assertTrue(Validation::notBlank(0.0)); + $this->assertTrue(Validation::notBlank('0.0')); $this->assertFalse(Validation::notBlank("\t ")); $this->assertFalse(Validation::notBlank("")); }
Failing test to prove there's a problem
cakephp_cakephp
train
b4b1acbdc9bcb40930ae65e40ed53c5469e9a180
diff --git a/pgpy/packet/packets.py b/pgpy/packet/packets.py index <HASH>..<HASH> 100644 --- a/pgpy/packet/packets.py +++ b/pgpy/packet/packets.py @@ -12,6 +12,11 @@ def PGPPacket(packetblob): if header.tag == Header.Tag.Signature: return Signature(packetblob) + if header.tag == Header.Tag.PubKey: + return PubKey(packetblob) + + return Packet(packetblob) + class Packet(object): def __init__(self, packet): @@ -100,4 +105,7 @@ class Signature(Packet): _bytes += self.hash2 _bytes += self.signature.__bytes__() - return _bytes \ No newline at end of file + return _bytes + +class PubKey(Packet): + pass \ No newline at end of file
added PubKey to the PGPPacket factory; added stub for class PubKey(Packet)
SecurityInnovation_PGPy
train
04e6b46371d43315408a53bfd49141aae26be808
diff --git a/clusterlensing/clusters/cofm.py b/clusterlensing/clusters/cofm.py index <HASH>..<HASH> 100644 --- a/clusterlensing/clusters/cofm.py +++ b/clusterlensing/clusters/cofm.py @@ -11,11 +11,10 @@ from astropy.cosmology import Planck13 as cosmo h = cosmo.h Om_M = cosmo.Om0 Om_L = 1. - Om_M -H0 = (100. * h) * 10**3 # [m/s/Mpc] -def test_Inputs(z, m): - +def check_inputs(z, m): + """Check inputs are arrays of same length or array and a scalar.""" try: nz = len(z) except TypeError: @@ -47,13 +46,18 @@ def c_Prada(z, m, h=h, Om_M=Om_M, Om_L=Om_L): ---------- z : float or array_like Redshift(s) of halos. - m : float or array_like Mass(es) of halos (m200 definition), in units of solar masses. + h : float, optional + Hubble parameter. Default is from Planck13. + Om_M : float, optional + Matter density parameter. Default is from Planck13. + Om_L : float, optional + Cosmological constant density parameter. Default is from Planck13. Returns ---------- - numpy.ndarray + ndarray Concentration values (c200) for halos. Notes @@ -72,8 +76,7 @@ def c_Prada(z, m, h=h, Om_M=Om_M, Om_L=Om_L): 423, Issue 4, pp. 3018-3030, 2012. """ - # check compatibility of inputs, convert to numpy arrays if necessary - z, m = test_Inputs(z, m) + z, m = check_inputs(z, m) # EQ 13 x = (1. / (1. + z)) * (Om_L / Om_M)**(1. / 3.) @@ -139,13 +142,14 @@ def c_DuttonMaccio(z, m, h=h): ---------- z : float or array_like Redshift(s) of halos. - m : float or array_like Mass(es) of halos (m200 definition), in units of solar masses. + h : float, optional + Hubble parameter. Default is from Planck13. Returns ---------- - numpy.ndarray + ndarray Concentration values (c200) for halos. References @@ -158,8 +162,7 @@ def c_DuttonMaccio(z, m, h=h): p.3359-3374, 2014. """ - # check compatibility of inputs, convert to numpy arrays if necessary - z, m = test_Inputs(z, m) + z, m = check_inputs(z, m) a = 0.52 + 0.385 * np.exp(-0.617 * (z**1.21)) # EQ 10 b = -0.101 + 0.026 * z # EQ 11 @@ -178,13 +181,14 @@ def c_Duffy(z, m, h=h): ---------- z : float or array_like Redshift(s) of halos. - m : float or array_like Mass(es) of halos (m200 definition), in units of solar masses. + h : float, optional + Hubble parameter. Default is from Planck13. Returns ---------- - numpy.ndarray + ndarray Concentration values (c200) for halos. References @@ -202,8 +206,7 @@ def c_Duffy(z, m, h=h): -0.084,-0.47) in Table 1 of Duffy et al. (2008). """ - # check compatibility of inputs, convert to numpy arrays if necessary - z, m = test_Inputs(z, m) + z, m = check_inputs(z, m) M_pivot = 2.e12 / h # [M_solar]
update docstrings with keyword params
jesford_cluster-lensing
train
27dcd3375154177aff90fd37b1e3833e137c1883
diff --git a/src/main/java/stormpot/whirlpool/Request.java b/src/main/java/stormpot/whirlpool/Request.java index <HASH>..<HASH> 100644 --- a/src/main/java/stormpot/whirlpool/Request.java +++ b/src/main/java/stormpot/whirlpool/Request.java @@ -1,9 +1,10 @@ package stormpot.whirlpool; public class Request { + private static Request request = new Request(); public static Request get() { - return new Request(); + return request; } } diff --git a/src/test/java/stormpot/whirlpool/RequestTest.java b/src/test/java/stormpot/whirlpool/RequestTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/stormpot/whirlpool/RequestTest.java +++ b/src/test/java/stormpot/whirlpool/RequestTest.java @@ -6,9 +6,17 @@ import org.junit.Test; public class RequestTest { @Test public void - getMustReturnRequestObject() { + getMustReturnRequestRequest() { assertNotNull(Request.get()); } - // TODO get must return existing active object - // TODO get must allocate new object if existing is inactive + + @Test public void + getMustReturnExistingRequest() { + Request r1 = Request.get(); + Request r2 = Request.get(); + assertTrue( r1 == r2); + } + // TODO request must be thread local + // TODO get must return active request + // TODO get must allocate new request if existing is inactive }
Request.get must return existing object.
chrisvest_stormpot
train
ebd5591e365e77278ba74c359533ffcfa9768be8
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,9 @@ """setup.py file.""" -import uuid - from setuptools import setup, find_packages -from pip.req import parse_requirements - +with open("requirements.txt", "r") as fs: + reqs = [r for r in fs.read().splitlines() if (len(r) > 0 and not r.startswith("#"))] -install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1()) -reqs = [str(ir.req) for ir in install_reqs] __author__ = 'David Barroso <dbarrosop@dravetech.com>'
Fix setup.py issue when using pip <I>. (#<I>) Fixes #<I> pip<I> compatibility issue
napalm-automation_napalm
train
b3d7665d4b5892642df445232af0e557a047f5f3
diff --git a/wfe2/wfe.go b/wfe2/wfe.go index <HASH>..<HASH> 100644 --- a/wfe2/wfe.go +++ b/wfe2/wfe.go @@ -1617,17 +1617,6 @@ func (wfe *WebFrontEndImpl) finalizeOrder( return } - // The account must have agreed to the subscriber agreement to finalize an - // order since it will result in the issuance of a certificate. - // Any version of the agreement is acceptable here. Version match is enforced in - // wfe.Registration when agreeing the first time. Agreement updates happen - // by mailing subscribers and don't require a registration update. - if acct.Agreement == "" { - wfe.sendError(response, logEvent, - probs.Unauthorized("Must agree to subscriber agreement before any further actions"), nil) - return - } - // If the order's status is not pending we can not finalize it and must // return an error if *order.Status != string(core.StatusPending) { diff --git a/wfe2/wfe_test.go b/wfe2/wfe_test.go index <HASH>..<HASH> 100644 --- a/wfe2/wfe_test.go +++ b/wfe2/wfe_test.go @@ -1934,14 +1934,6 @@ func TestFinalizeOrder(t *testing.T) { ExpectedBody: `{"type":"` + probs.V2ErrorNS + `malformed","detail":"No order found for account ID 2","status":404}`, }, { - Name: "Account without Subscriber agreement", - // mocks/mocks.go's StorageAuthority's GetRegistration mock treats ID 6 - // as an account without the agreement set. Order ID 6 is mocked to belong - // to it. - Request: signAndPost(t, "6/6/finalize-order", "http://localhost/6/6/finalize-order", "{}", 6, wfe.nonceService), - ExpectedBody: `{"type":"` + probs.V2ErrorNS + `unauthorized","detail":"Must agree to subscriber agreement before any further actions","status":403}`, - }, - { Name: "Order ID is invalid", Request: signAndPost(t, "1/okwhatever/finalize-order", "http://localhost/1/okwhatever/finalize-order", "{}", 1, wfe.nonceService), ExpectedBody: `{"type":"` + probs.V2ErrorNS + `malformed","detail":"Invalid order ID","status":400}`,
WFE2: Remove secondary ToS check. (#<I>) `terms-of-service-agreed` is checked at initial signup and doesn't need to be rechecked. Worse, since the V2 registration only accepts a bool the "Agreement" field is never set and checking it against != "" will always fail for v2 accounts. This was already done for Pebble[0] but was missed in the Boulder WFE2. [0] - <URL>
letsencrypt_boulder
train
1f1f7dab833a9c2fb2939e71e2f47995f52736a4
diff --git a/test/__init__.py b/test/__init__.py index <HASH>..<HASH> 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -107,7 +107,8 @@ class TestEnvironment(object): def setup_sync_cx(self): """Get a synchronous PyMongo MongoClient and determine SSL config.""" - connectTimeoutMS = socketTimeoutMS = 30 * 1000 + connectTimeoutMS = 100 + socketTimeoutMS = 30 * 1000 try: self.sync_cx = pymongo.MongoClient( host, port,
More quickly determine if mongod is running SSL.
mongodb_motor
train
6cc6c8e0dec4e814c1a2c668ccbca08cf6fd08a7
diff --git a/script/build-table.js b/script/build-table.js index <HASH>..<HASH> 100644 --- a/script/build-table.js +++ b/script/build-table.js @@ -1,12 +1,17 @@ +/** + * @typedef {import('mdast').Root} Root + * @typedef {import('mdast').TableRow} TableRow + */ + import {headingRange} from 'mdast-util-heading-range' import {u} from 'unist-builder' import {patterns} from '../lib/en.js' -/** @type {import('unified').Plugin<[]>} */ +/** @type {import('unified').Plugin<[], Root>} */ export default function table() { return (tree) => { headingRange(tree, 'list of rules', (start, _, end) => { - /** @type {import('mdast').TableRow[]} */ + /** @type {TableRow[]} */ const rows = [ u('tableRow', [ u('tableCell', [u('text', 'id')]),
Fix internal types for changes in `@types/unist`
retextjs_retext-equality
train
7b206e61e6a72afe3faa5420ba90b1ff0d1da26b
diff --git a/source/object.js b/source/object.js index <HASH>..<HASH> 100644 --- a/source/object.js +++ b/source/object.js @@ -10,30 +10,44 @@ * The array of key-value pairs to be mapped. * * @param {Object} [options] - * - {Number} [depth=0] + * - {Number} [depth=1] * The depth to which the `array`'s pairs should be traversed. Set it to `Infinity` to map the * whole structure. * * @returns {Object} * A new object mapped from the array. */ -export default function asObject (array, options, _depthLeft) { - var pair, value; +export default function asObject (array, options) { + // Parse options. + var depth = + ( !options || typeof options == "undefined" + ? 1 + : options.depth + ); - if (!options) options = {}; - if (_depthLeft === void null && options.depth) _depthLeft = options.depth; + // End recursion if we've reached a depth of 0. + if (!depth) return array; + // Create an empty `result` object. var result = {}; - var i = 0; var l = array.length; while (i < l) { - pair = array[i++]; + // For every `pair` of the given `array`: + var i = 0; var l = array.length; + while (i < l) { let pair = array[i++]; + // - skip the `pair` if it has no `key` if (!pair || !pair.hasOwnProperty("key")) continue; - value = pair.value; - if (_depthLeft && value instanceof Array) { - value = asObject(value, options, _depthLeft - 1); + // - save `pair.value` as `value` + let value = pair.value; + + // - recurse if the `value` is an array + if (value instanceof Array) { + value = asObject(value, {depth: depth - 1}); } + + // - save `value` as `result[pair.key]` result[pair.key] = value; } + // Return the `result`. return result; } diff --git a/test/array-object.js b/test/array-object.js index <HASH>..<HASH> 100644 --- a/test/array-object.js +++ b/test/array-object.js @@ -50,8 +50,8 @@ test("as/array >> as/object", function (tape) { , [ asObject(asArray ( {a: "b", c: "d", e: {f: "g", h: {i: "j"}}} - , {depth: 1} - ), {depth: 1}) + , {depth: 2} + ), {depth: 2}) , {a: "b", c: "d", e: {f: "g", h: {i: "j"}}} , "shouldn't change a nested object mapped to a specific depth" ] diff --git a/test/object.js b/test/object.js index <HASH>..<HASH> 100644 --- a/test/object.js +++ b/test/object.js @@ -75,10 +75,10 @@ test("as/object", function (tape) { ( [ {key: "a", value: "b"} , {key: "c", value: [{key: "d", value: "e"}]} ] - , {depth: 1} + , {depth: 2} ) , {a: "b", c: {d: "e"}} - , "should map one level deep" + , "should map two levels deep" ] , [ asObject @@ -87,10 +87,10 @@ test("as/object", function (tape) { , {key: "f", value: [{key: "g", value: "h"}]} ]} ] - , {depth: 1} + , {depth: 2} ) , {a: "b", c: {d: "e", f: [{key: "g", value: "h"}]}} - , "should map only one level deep" + , "should map only two levels deep" ] , [ asObject
Change meaning of `options.depth` in asObject
architectcodes_as
train
b8572849a98639ca3c0abd997198e3f2a34f8dd4
diff --git a/drivers/i2c/ina3221_driver_test.go b/drivers/i2c/ina3221_driver_test.go index <HASH>..<HASH> 100644 --- a/drivers/i2c/ina3221_driver_test.go +++ b/drivers/i2c/ina3221_driver_test.go @@ -4,6 +4,7 @@ import ( "testing" "errors" + "gobot.io/x/gobot" "gobot.io/x/gobot/gobottest" ) @@ -55,3 +56,111 @@ func TestINA3221Driver_Halt(t *testing.T) { d := initTestINA3221Driver() gobottest.Assert(t, d.Halt(), nil) } + +func TestINA3221DriverGetBusVoltage(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + // TODO: return test data as read from actual sensor + copy(b, []byte{0x22, 0x33}) + return 2, nil + } + + v, err := d.GetBusVoltage(INA3221Channel1) + gobottest.Assert(t, v, float64(8.755)) + gobottest.Assert(t, err, nil) +} + +func TestINA3221DriverGetBusVoltageReadError(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + return 0, errors.New("read error") + } + + _, err := d.GetBusVoltage(INA3221Channel1) + gobottest.Assert(t, err, errors.New("read error")) +} + +func TestINA3221DriverGetShuntVoltage(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + // TODO: return test data as read from actual sensor + copy(b, []byte{0x22, 0x33}) + return 2, nil + } + + v, err := d.GetShuntVoltage(INA3221Channel1) + gobottest.Assert(t, v, float64(43.775)) + gobottest.Assert(t, err, nil) +} + +func TestINA3221DriverGetShuntVoltageReadError(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + return 0, errors.New("read error") + } + + _, err := d.GetShuntVoltage(INA3221Channel1) + gobottest.Assert(t, err, errors.New("read error")) +} + +func TestINA3221DriverGetCurrent(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + // TODO: return test data as read from actual sensor + copy(b, []byte{0x22, 0x33}) + return 2, nil + } + + v, err := d.GetCurrent(INA3221Channel1) + gobottest.Assert(t, v, float64(437.74999999999994)) + gobottest.Assert(t, err, nil) +} + +func TestINA3221DriverCurrentReadError(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + return 0, errors.New("read error") + } + + _, err := d.GetCurrent(INA3221Channel1) + gobottest.Assert(t, err, errors.New("read error")) +} + +func TestINA3221DriverGetLoadVoltage(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + // TODO: return test data as read from actual sensor + copy(b, []byte{0x22, 0x33}) + return 2, nil + } + + v, err := d.GetLoadVoltage(INA3221Channel2) + gobottest.Assert(t, v, float64(8.798775000000001)) + gobottest.Assert(t, err, nil) +} + +func TestINA3221DriverGetLoadVoltageReadError(t *testing.T) { + d, a := initTestINA3221DriverWithStubbedAdaptor() + gobottest.Assert(t, d.Start(), nil) + + a.i2cReadImpl = func(b []byte) (int, error) { + return 0, errors.New("read error") + } + + _, err := d.GetLoadVoltage(INA3221Channel2) + gobottest.Assert(t, err, errors.New("read error")) +}
i2c: increase test coverage for INA<I>Driver
hybridgroup_gobot
train
5d241fd1b5d708abe96a93b73971ffc47f1de236
diff --git a/pyicloud/base.py b/pyicloud/base.py index <HASH>..<HASH> 100644 --- a/pyicloud/base.py +++ b/pyicloud/base.py @@ -57,7 +57,6 @@ class PyiCloudService(object): self.session = requests.Session() self.session.verify = verify self.session.headers.update({ - 'Host': 'setup.icloud.com', 'Origin': self._home_endpoint, 'Referer': '%s/' % self._home_endpoint, 'User-Agent': 'Opera/9.52 (X11; Linux i686; U; en)' diff --git a/pyicloud/services/calendar.py b/pyicloud/services/calendar.py index <HASH>..<HASH> 100644 --- a/pyicloud/services/calendar.py +++ b/pyicloud/services/calendar.py @@ -57,8 +57,6 @@ class CalendarService(object): Fetches a single event's details by specifying a pguid (a calendar) and a guid (an event's ID). """ - host = self._service_root.split('//')[1].split(':')[0] - self.session.headers.update({'host': host}) params = dict(self.params) params.update({'lang': 'en-us', 'usertz': self.get_system_tz()}) url = '%s/%s/%s' % (self._calendar_event_detail_url, pguid, guid) @@ -78,8 +76,6 @@ class CalendarService(object): from_dt = datetime(today.year, today.month, first_day) if not to_dt: to_dt = datetime(today.year, today.month, last_day) - host = self._service_root.split('//')[1].split(':')[0] - self.session.headers.update({'host': host}) params = dict(self.params) params.update({ 'lang': 'en-us', diff --git a/pyicloud/services/contacts.py b/pyicloud/services/contacts.py index <HASH>..<HASH> 100644 --- a/pyicloud/services/contacts.py +++ b/pyicloud/services/contacts.py @@ -22,8 +22,6 @@ class ContactsService(object): Refreshes the ContactsService endpoint, ensuring that the contacts data is up-to-date. """ - host = self._service_root.split('//')[1].split(':')[0] - self.session.headers.update({'host': host}) params_contacts = dict(self.params) params_contacts.update({ 'clientVersion': '2.1', diff --git a/pyicloud/services/findmyiphone.py b/pyicloud/services/findmyiphone.py index <HASH>..<HASH> 100644 --- a/pyicloud/services/findmyiphone.py +++ b/pyicloud/services/findmyiphone.py @@ -33,8 +33,6 @@ class FindMyiPhoneServiceManager(object): This ensures that the location data is up-to-date. """ - host = self._service_root.split('//')[1].split(':')[0] - self.session.headers.update({'host': host}) req = self.session.post( self._fmip_refresh_url, params=self.params, diff --git a/pyicloud/services/ubiquity.py b/pyicloud/services/ubiquity.py index <HASH>..<HASH> 100644 --- a/pyicloud/services/ubiquity.py +++ b/pyicloud/services/ubiquity.py @@ -13,9 +13,6 @@ class UbiquityService(object): self._service_root = service_root self._node_url = '/ws/%s/%s/%s' - host = self._service_root.split('//')[1].split(':')[0] - self.session.headers.update({'host': host}) - def get_node_url(self, id, variant='item'): return self._service_root + self._node_url % ( self.params['dsid'],
Don't set Host header manually It's taken care of by the requests module, based on the url we're posting to, so no need to juggle the host header ourselves.
picklepete_pyicloud
train
953db2b39d85c6cec1fe75b407e7f98650d280f8
diff --git a/src/js/bootstrap-datetimepicker.js b/src/js/bootstrap-datetimepicker.js index <HASH>..<HASH> 100644 --- a/src/js/bootstrap-datetimepicker.js +++ b/src/js/bootstrap-datetimepicker.js @@ -2376,6 +2376,7 @@ options = options || {}; var args = Array.prototype.slice.call(arguments, 1), + isInstance = true, thisMethods = ['destroy', 'hide', 'show', 'toggle'], returnValue; @@ -2391,17 +2392,16 @@ } else if (typeof options === 'string') { this.each(function () { var $this = $(this), - instance = $this.data('DateTimePicker'), - result; + instance = $this.data('DateTimePicker'); if (!instance) { - throw new Error('The bootstrap-datetimepicker("' + options + '") method was called on an element that is not using DateTimePicker'); + throw new Error('bootstrap-datetimepicker("' + options + '") method was called on an element that is not using DateTimePicker'); } - result = instance[options].apply(instance, args); - returnValue = result === instance ? this : result; + returnValue = instance[options].apply(instance, args); + isInstance = returnValue === instance; }); - if ($.inArray(options, thisMethods) > -1) { + if (isInstance || $.inArray(options, thisMethods) > -1) { return this; }
fixed issue where wrong jQuery object could be returned if multiple elements were selected
Eonasdan_bootstrap-datetimepicker
train
36069bf7679c1c2965a0dfd72d34836de7a9f94f
diff --git a/restclients/models/grad.py b/restclients/models/grad.py index <HASH>..<HASH> 100644 --- a/restclients/models/grad.py +++ b/restclients/models/grad.py @@ -1,10 +1,11 @@ +import datetime from django.db import models def get_datetime_str(datetime_obj): if datetime_obj is None: return None - return datetime_obj.strftime("%Y-%m-%d %H:%M") + return datetime_obj.isoformat() class GradTerm(models.Model): @@ -121,7 +122,7 @@ class GradLeave(models.Model): def json_data(self): data = { 'reason': self.reason, - 'submit_date': self.submit_date, + 'submit_date': get_datetime_str(self.submit_date), 'status': self.status, 'terms': [], }
fix a bug in the json_data function of GradLeave class - a datetime object didn't convert to a string.
uw-it-aca_uw-restclients
train
a15e1ff0b1766db3cf47c25bfe006c7c5c2b8089
diff --git a/README b/README index <HASH>..<HASH> 100644 --- a/README +++ b/README @@ -91,6 +91,8 @@ Currently Supported Actions * Logoff * ListCategories * ListCommands +* MailboxCount +* MailboxStatus * Originate * Redirect * Reload diff --git a/docs/examples/quickstart/example.php b/docs/examples/quickstart/example.php index <HASH>..<HASH> 100644 --- a/docs/examples/quickstart/example.php +++ b/docs/examples/quickstart/example.php @@ -62,6 +62,8 @@ use PAMI\Message\Action\DAHDIDialOffHookAction; use PAMI\Message\Action\DAHDIDNDOnAction; use PAMI\Message\Action\DAHDIDNDOffAction; use PAMI\Message\Action\AgentsAction; +use PAMI\Message\Action\MailboxStatusAction; +use PAMI\Message\Action\MailboxCountAction; class A implements IEventListener { @@ -95,6 +97,8 @@ try var_dump($a->send(new GetConfigJSONAction('extensions.conf'))); var_dump($a->send(new DAHDIShowChannelsAction())); var_dump($a->send(new AgentsAction())); + var_dump($a->send(new MailboxStatusAction('marcelog@netlabs'))); + var_dump($a->send(new MailboxCountAction('marcelog@netlabs'))); //var_dump($a->send(new CreateConfigAction('foo.conf'))); //var_dump($a->send(new DAHDIDNDOnAction('1'))); //var_dump($a->send(new DAHDIDNDOffAction('1'))); diff --git a/src/mg/PAMI/Message/Action/MailboxCountAction.php b/src/mg/PAMI/Message/Action/MailboxCountAction.php index <HASH>..<HASH> 100644 --- a/src/mg/PAMI/Message/Action/MailboxCountAction.php +++ b/src/mg/PAMI/Message/Action/MailboxCountAction.php @@ -1 +1,43 @@ <?php +/** + * MailboxCount action message. + * + * PHP Version 5 + * + * @category Pami + * @package Message + * @subpackage Action + * @author Marcelo Gornstein <marcelog@gmail.com> + * @license http://www.noneyet.ar/ Apache License 2.0 + * @version SVN: $Id$ + * @link http://www.noneyet.ar/ + */ +namespace PAMI\Message\Action; + +/** + * MailboxCount action message. + * + * PHP Version 5 + * + * @category Pami + * @package Message + * @subpackage Action + * @author Marcelo Gornstein <marcelog@gmail.com> + * @license http://www.noneyet.ar/ Apache License 2.0 + * @link http://www.noneyet.ar/ + */ +class MailboxCountAction extends ActionMessage +{ + /** + * Constructor. + * + * @param string $mailbox MailboxId (mailbox@vm-context) + * + * @return void + */ + public function __construct($mailbox) + { + parent::__construct('MailboxCount'); + $this->setKey('Mailbox', $mailbox); + } +} \ No newline at end of file diff --git a/src/mg/PAMI/Message/Action/MailboxStatusAction.php b/src/mg/PAMI/Message/Action/MailboxStatusAction.php index <HASH>..<HASH> 100644 --- a/src/mg/PAMI/Message/Action/MailboxStatusAction.php +++ b/src/mg/PAMI/Message/Action/MailboxStatusAction.php @@ -1 +1,43 @@ <?php +/** + * MailboxStatus action message. + * + * PHP Version 5 + * + * @category Pami + * @package Message + * @subpackage Action + * @author Marcelo Gornstein <marcelog@gmail.com> + * @license http://www.noneyet.ar/ Apache License 2.0 + * @version SVN: $Id$ + * @link http://www.noneyet.ar/ + */ +namespace PAMI\Message\Action; + +/** + * MailboxStatus action message. + * + * PHP Version 5 + * + * @category Pami + * @package Message + * @subpackage Action + * @author Marcelo Gornstein <marcelog@gmail.com> + * @license http://www.noneyet.ar/ Apache License 2.0 + * @link http://www.noneyet.ar/ + */ +class MailboxStatusAction extends ActionMessage +{ + /** + * Constructor. + * + * @param string $mailbox MailboxId (mailbox@vm-context) + * + * @return void + */ + public function __construct($mailbox) + { + parent::__construct('MailboxStatus'); + $this->setKey('Mailbox', $mailbox); + } +} \ No newline at end of file
added mailboxcount and mailboxstatus actions
marcelog_PAMI
train
33a9b5aae46706eabd5728bf0fe9c3e1f58366fe
diff --git a/h2quic/client.go b/h2quic/client.go index <HASH>..<HASH> 100644 --- a/h2quic/client.go +++ b/h2quic/client.go @@ -24,7 +24,8 @@ import ( type Client struct { mutex sync.RWMutex - config *quic.Config + dialAddr func(hostname string, config *quic.Config) (quic.Session, error) + config *quic.Config t *QuicRoundTripper @@ -46,6 +47,7 @@ var _ h2quicClient = &Client{} func NewClient(t *QuicRoundTripper, tlsConfig *tls.Config, hostname string) *Client { return &Client{ t: t, + dialAddr: quic.DialAddr, hostname: authorityAddr("https", hostname), responses: make(map[protocol.StreamID]chan *http.Response), encryptionLevel: protocol.EncryptionUnencrypted, @@ -60,7 +62,7 @@ func NewClient(t *QuicRoundTripper, tlsConfig *tls.Config, hostname string) *Cli // Dial dials the connection func (c *Client) Dial() error { var err error - c.session, err = quic.DialAddr(c.hostname, c.config) + c.session, err = c.dialAddr(c.hostname, c.config) if err != nil { return err } diff --git a/h2quic/client_test.go b/h2quic/client_test.go index <HASH>..<HASH> 100644 --- a/h2quic/client_test.go +++ b/h2quic/client_test.go @@ -5,7 +5,6 @@ import ( "compress/gzip" "crypto/tls" "errors" - "net" "net/http" "golang.org/x/net/http2" @@ -52,16 +51,45 @@ var _ = Describe("Client", func() { }) It("dials", func() { - udpConn, err := net.ListenUDP("udp", &net.UDPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 0}) - Expect(err).ToNot(HaveOccurred()) - client = NewClient(quicTransport, nil, udpConn.LocalAddr().String()) - go client.Dial() - data := make([]byte, 100) - _, err = udpConn.Read(data) - hdr, err := quic.ParsePublicHeader(bytes.NewReader(data), protocol.PerspectiveClient) + client = NewClient(quicTransport, nil, "localhost") + session.streamToOpen = &mockStream{id: 3} + client.dialAddr = func(hostname string, conf *quic.Config) (quic.Session, error) { + return session, nil + } + err := client.Dial() Expect(err).ToNot(HaveOccurred()) - Expect(hdr.VersionFlag).To(BeTrue()) - Expect(hdr.ConnectionID).ToNot(BeNil()) + Expect(client.session).To(Equal(session)) + }) + + It("errors when dialing fails", func() { + testErr := errors.New("handshake error") + client = NewClient(quicTransport, nil, "localhost") + client.dialAddr = func(hostname string, conf *quic.Config) (quic.Session, error) { + return nil, testErr + } + err := client.Dial() + Expect(err).To(MatchError(testErr)) + }) + + It("errors if the header stream has the wrong stream ID", func() { + client = NewClient(quicTransport, nil, "localhost") + session.streamToOpen = &mockStream{id: 2} + client.dialAddr = func(hostname string, conf *quic.Config) (quic.Session, error) { + return session, nil + } + err := client.Dial() + Expect(err).To(MatchError("h2quic Client BUG: StreamID of Header Stream is not 3")) + }) + + It("errors if it can't open a stream", func() { + testErr := errors.New("you shall not pass") + client = NewClient(quicTransport, nil, "localhost") + session.streamOpenErr = testErr + client.dialAddr = func(hostname string, conf *quic.Config) (quic.Session, error) { + return session, nil + } + err := client.Dial() + Expect(err).To(MatchError(testErr)) }) Context("Doing requests", func() {
add more tests for Dial in the h2quic client
lucas-clemente_quic-go
train
4fd7a6ca7339bcbbfa6feda266dcca96684b81c6
diff --git a/lib/rules/key-spacing.js b/lib/rules/key-spacing.js index <HASH>..<HASH> 100644 --- a/lib/rules/key-spacing.js +++ b/lib/rules/key-spacing.js @@ -427,19 +427,7 @@ module.exports = { * @returns {void} */ function report(property, side, whitespace, expected, mode) { - const diff = whitespace.length - expected, - nextColon = getNextColon(property.key), - tokenBeforeColon = sourceCode.getTokenBefore(nextColon, { includeComments: true }), - tokenAfterColon = sourceCode.getTokenAfter(nextColon, { includeComments: true }), - isKeySide = side === "key", - isExtra = diff > 0, - diffAbs = Math.abs(diff), - spaces = Array(diffAbs + 1).join(" "); - - const locStart = isKeySide ? tokenBeforeColon.loc.end : nextColon.loc.start; - const locEnd = isKeySide ? nextColon.loc.start : tokenAfterColon.loc.start; - const missingLoc = isKeySide ? tokenBeforeColon.loc : tokenAfterColon.loc; - const loc = isExtra ? { start: locStart, end: locEnd } : missingLoc; + const diff = whitespace.length - expected; if (( diff && mode === "strict" || @@ -447,6 +435,19 @@ module.exports = { diff > 0 && !expected && mode === "minimum") && !(expected && containsLineTerminator(whitespace)) ) { + const nextColon = getNextColon(property.key), + tokenBeforeColon = sourceCode.getTokenBefore(nextColon, { includeComments: true }), + tokenAfterColon = sourceCode.getTokenAfter(nextColon, { includeComments: true }), + isKeySide = side === "key", + isExtra = diff > 0, + diffAbs = Math.abs(diff), + spaces = Array(diffAbs + 1).join(" "); + + const locStart = isKeySide ? tokenBeforeColon.loc.end : nextColon.loc.start; + const locEnd = isKeySide ? nextColon.loc.start : tokenAfterColon.loc.start; + const missingLoc = isKeySide ? tokenBeforeColon.loc : tokenAfterColon.loc; + const loc = isExtra ? { start: locStart, end: locEnd } : missingLoc; + let fix; if (isExtra) {
perf: don't prepare a fix for valid code in key-spacing (#<I>)
eslint_eslint
train
223f2b8912b857ca9505a6447876dbaf6d323d9a
diff --git a/ChangeLog b/ChangeLog index <HASH>..<HASH> 100644 --- a/ChangeLog +++ b/ChangeLog @@ -11,6 +11,7 @@ * Button * Div * Span + * Table * Selenium support for using index for the following elements: * Link * TextField @@ -22,6 +23,7 @@ * Button * Div * Span + * Table * Added [] method to SelectList to index Options * Added options method to Select List * Added support for the following elements diff --git a/features/span.feature b/features/span.feature index <HASH>..<HASH> 100644 --- a/features/span.feature +++ b/features/span.feature @@ -27,7 +27,7 @@ Feature: Span | search_by | | name | - Scenario Outline: Locating divs using multiple parameters + Scenario Outline: Locating span using multiple parameters When I search for the span by "<param1>" and "<param2>" Then the text should be "My alert" diff --git a/features/step_definitions/accessor_steps.rb b/features/step_definitions/accessor_steps.rb index <HASH>..<HASH> 100644 --- a/features/step_definitions/accessor_steps.rb +++ b/features/step_definitions/accessor_steps.rb @@ -180,6 +180,10 @@ When /^I retrieve a table element by "([^\"]*)"$/ do |how| @element = @page.send "table_#{how}_table" end +When /^I retrieve a table element by "([^"]*)" and "([^"]*)"$/ do |param1, param2| + @element = @page.send "table_#{param1}_#{param2}_table" +end + When /^I get the image element$/ do @element = @page.image_id_image end diff --git a/features/support/page.rb b/features/support/page.rb index <HASH>..<HASH> 100644 --- a/features/support/page.rb +++ b/features/support/page.rb @@ -100,6 +100,8 @@ class Page table(:table_class, :class => 'table_class') table(:table_index, :index => 0) table(:table_xpath, :xpath => '//table') + table(:table_class_index, :class => "table_class", :index => 0) + table(:table_name_index, :name => "table_name", :index => 0) cell(:cell_id, :id => 'cell_id') cell(:cell_name, :name => 'cell_name') diff --git a/features/table.feature b/features/table.feature index <HASH>..<HASH> 100644 --- a/features/table.feature +++ b/features/table.feature @@ -33,18 +33,8 @@ Feature: Table | id | | class | | xpath | - - - @watir_only - Scenario Outline: Locating table cells on the Page with watir - When I retrieve a table element by "<search_by>" - Then the data for row "1" should be "Data1" and "Data2" - - Scenarios: - | search_by | | index | - @selenium_only Scenario Outline: Locating table cells on the Page with selenium When I retrieve a table element by "<search_by>" @@ -53,3 +43,12 @@ Feature: Table Scenarios: | search_by | | name | + + Scenario Outline: Locating table using multiple parameters + When I retrieve a table element by "<param1>" and "<param2>" + Then the data for row "1" should be "Data1" and "Data2" + + Scenarios: + | param1 | param2 | + | class | index | + | name | index | diff --git a/lib/page-object/accessors.rb b/lib/page-object/accessors.rb index <HASH>..<HASH> 100644 --- a/lib/page-object/accessors.rb +++ b/lib/page-object/accessors.rb @@ -354,7 +354,7 @@ module PageObject # def table(name, identifier=nil, &block) define_method("#{name}_table") do - block ? block.call(browser) : platform.table_for(identifier) + block ? block.call(browser) : platform.table_for(identifier.clone) end end diff --git a/lib/page-object/selenium_page_object.rb b/lib/page-object/selenium_page_object.rb index <HASH>..<HASH> 100755 --- a/lib/page-object/selenium_page_object.rb +++ b/lib/page-object/selenium_page_object.rb @@ -328,6 +328,7 @@ module PageObject # See PageObject::Accessors#table # def table_for(identifier) + identifier = add_tagname_if_needed identifier, 'table' how, what = Elements::Table.selenium_identifier_for identifier element = @browser.find_element(how, what) PageObject::Elements::Table.new(element, :platform => :selenium) diff --git a/spec/page-object/elements/table_spec.rb b/spec/page-object/elements/table_spec.rb index <HASH>..<HASH> 100644 --- a/spec/page-object/elements/table_spec.rb +++ b/spec/page-object/elements/table_spec.rb @@ -11,7 +11,7 @@ describe PageObject::Elements::Table do end it "should map selenium types to same" do - [:class, :id, :name, :xpath].each do |t| + [:class, :id, :index, :name, :xpath].each do |t| key, value = PageObject::Elements::Table.selenium_identifier_for t => 'value' key.should == t end
multiple identifiers and index for Table with Selenium
cheezy_page-object
train
238f3af4bda818b8ec7de4c798299396c724b9ce
diff --git a/search/highlighter_simple.go b/search/highlighter_simple.go index <HASH>..<HASH> 100644 --- a/search/highlighter_simple.go +++ b/search/highlighter_simple.go @@ -106,6 +106,11 @@ OUTER: formattedFragments[i] = s.sep + s.formatter.Format(fragment, dm.Locations[field]) + s.sep } + if dm.Fragments == nil { + dm.Fragments = make(FieldFragmentMap, 0) + } + dm.Fragments[field] = formattedFragments + return formattedFragments } diff --git a/search/search.go b/search/search.go index <HASH>..<HASH> 100644 --- a/search/search.go +++ b/search/search.go @@ -18,13 +18,28 @@ type Locations []*Location type TermLocationMap map[string]Locations +func (t TermLocationMap) AddLocation(term string, location *Location) { + existingLocations, exists := t[term] + if exists { + existingLocations = append(existingLocations, location) + t[term] = existingLocations + } else { + locations := make(Locations, 1) + locations[0] = location + t[term] = locations + } +} + type FieldTermLocationMap map[string]TermLocationMap +type FieldFragmentMap map[string][]string + type DocumentMatch struct { ID string `json:"id"` Score float64 `json:"score"` Expl *Explanation `json:"explanation,omitempty"` Locations FieldTermLocationMap `json:"locations,omitempty"` + Fragments FieldFragmentMap `json:"fragments,omitempty"` } type DocumentMatchCollection []*DocumentMatch
change higlight api to store in document match
blevesearch_bleve
train
565a8a47d867be751ccea09805ba776869e3e35d
diff --git a/assets/service_broker/run_all_cases.rb b/assets/service_broker/run_all_cases.rb index <HASH>..<HASH> 100644 --- a/assets/service_broker/run_all_cases.rb +++ b/assets/service_broker/run_all_cases.rb @@ -6,6 +6,8 @@ require 'benchmark' require 'securerandom' require 'optparse' +DEFAULT_BROKER_URL = 'http://async-broker.10.244.0.34.xip.io' + def get_config raw_config = File.read('data.json') JSON.parse(raw_config) @@ -26,12 +28,16 @@ def get_second_plan config['behaviors']['catalog']['body']['services'].first['plans'][1]['name'] end +def execute(cmd) + `#{cmd}` +end + class ProvisionCommand def setup(instance_name) end def run(instance_name) - `cf create-service #{get_service} #{get_plan} #{instance_name}` + execute "cf create-service #{get_service} #{get_plan} #{instance_name}" end def cleanup(instance_name) @@ -40,11 +46,11 @@ end class UpdateCommand def setup(instance_name) - `cf create-service #{get_service} #{get_plan} #{instance_name}` + execute "cf create-service #{get_service} #{get_plan} #{instance_name}" end def run(instance_name) - `cf update-service #{instance_name} -p #{get_second_plan}` + execute "cf update-service #{instance_name} -p #{get_second_plan}" end def cleanup(instance_name) @@ -53,11 +59,11 @@ end class DeprovisionCommand def setup(instance_name) - `cf create-service #{get_service} #{get_plan} #{instance_name}` + execute "cf create-service #{get_service} #{get_plan} #{instance_name}" end def run(instance_name) - `cf delete-service #{instance_name} -f` + execute "cf delete-service #{instance_name} -f" end def cleanup(instance_name) @@ -65,8 +71,9 @@ class DeprovisionCommand end class CleanupCommandWrapper - def initialize(command) + def initialize(command, broker_url) @command = command + @broker_url = broker_url end def setup(instance_name) @@ -79,30 +86,21 @@ class CleanupCommandWrapper def cleanup(instance_name) @command.cleanup(instance_name) - if attempt_delete(instance_name) - -> { - until attempt_delete(instance_name) - end - } - end + -> { + execute "curl -s #{@broker_url}/config/reset -X POST" + until attempt_delete(instance_name) + end + } end private def attempt_delete(instance_name) - output = `cf delete-service #{instance_name} -f` + output = execute "cf delete-service #{instance_name} -f" !output.include?('Another operation for this service instance is in progress') end end -action_to_cmd_mapping = { - provision: CleanupCommandWrapper.new(ProvisionCommand.new), - update: CleanupCommandWrapper.new(UpdateCommand.new), - deprovision: CleanupCommandWrapper.new(DeprovisionCommand.new), -} - -DEFAULT_BROKER_URL = 'http://async-broker.10.244.0.34.xip.io' - def write_output_file(output_file, rows) CSV.open(output_file, 'w') do |csv| csv << rows[0].headers @@ -113,11 +111,17 @@ def write_output_file(output_file, rows) end def delete_leftover_instances(deferred_deletions) - STDOUT.write("Cleaning up service instances... ") + count = deferred_deletions.compact.count + STDOUT.write("Cleaning up service instances ... 0 / #{count}") STDOUT.flush + i = 0 deferred_deletions.compact.each do |callback| callback.call + i += 1 + STDOUT.write("\rCleaning up service instances ... #{i} / #{count}") + STDOUT.flush end + puts puts "Done" end @@ -159,12 +163,12 @@ def configure_broker_endpoint(action, body, broker_url, row, status) } } - `curl -s #{broker_url}/config/reset -X POST` - `curl -s #{broker_url}/config -d '#{json_config.to_json}'` + execute "curl -s #{broker_url}/config/reset -X POST" + execute "curl -s #{broker_url}/config -d '#{json_config.to_json}'" end -def run_command(command, deferred_deletions, cleanup) - instance_name = "si-#{SecureRandom.uuid}" +def run_command(command, deferred_deletions, cleanup, line_number) + instance_name = "si-#{line_number}-#{SecureRandom.uuid}" command.setup(instance_name) output = command.run(instance_name) @@ -177,8 +181,16 @@ rows = [] broker_url, input_file, output_file, cleanup = parse_parameters +action_to_cmd_mapping = { + provision: CleanupCommandWrapper.new(ProvisionCommand.new, broker_url), + update: CleanupCommandWrapper.new(UpdateCommand.new, broker_url), + deprovision: CleanupCommandWrapper.new(DeprovisionCommand.new, broker_url), +} + report = Benchmark.measure do + i = 0 CSV.foreach(input_file, headers: true) do |row| + i += 1 rows << row action, status, body = row['action'], row['status'], row['body'] @@ -189,7 +201,7 @@ report = Benchmark.measure do configure_broker_endpoint(action, body, broker_url, row, status) - output = run_command(command, deferred_deletions, cleanup) + output = run_command(command, deferred_deletions, cleanup, i) row['output'] = output STDOUT.write('.') STDOUT.flush
Properly clean up service instances for acceptance tests [fixes #<I>]
cloudfoundry_cf-acceptance-tests
train