content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
PHP
PHP
add a plugin parsing test
baf1ce93f2c44b49c0d592034d7b543febaba785
<ide><path>lib/Cake/Test/Case/Console/Command/TestShellTest.php <ide> public function testMapAppFileToCase() { <ide> $this->assertSame('My/File/Is/Here', $return); <ide> } <ide> <add>/** <add> * testMapPluginFileToCategory <add> * <add> * @return void <add> */ <add> public function testMapPluginFileToCategory() { <add> $this->Shell->startup(); <add> <add> $return = $this->Shell->mapFileToCategory('Plugins/Awesome/Controller/ExampleController.php'); <add> $this->assertSame('awesome', $return); <add> <add> $return = $this->Shell->mapFileToCategory('plugins/Awesome/Controller/ExampleController.php'); <add> $this->assertSame('awesome', $return); <add> <add> $return = $this->Shell->mapFileToCategory('app/Plugins/Awesome/Controller/ExampleController.php'); <add> $this->assertSame('awesome', $return); <add> <add> $return = $this->Shell->mapFileToCategory('app/plugins/Awesome/Controller/ExampleController.php'); <add> $this->assertSame('awesome', $return); <add> <add> } <add> <add>/** <add> * testMapPluginFileToCase <add> * <add> * @return void <add> */ <add> public function testMapPluginFileToCase() { <add> $this->Shell->startup(); <add> <add> $return = $this->Shell->mapFileToCase('Plugins/Awesome/Controller/ExampleController.php', 'awesome', false); <add> $this->assertSame('Controller/ExampleController', $return); <add> <add> $return = $this->Shell->mapFileToCase('plugins/Awesome/Controller/ExampleController.php', 'awesome', false); <add> $this->assertSame('Controller/ExampleController', $return); <add> <add> $return = $this->Shell->mapFileToCase('app/Plugins/Awesome/Controller/ExampleController.php', 'awesome', false); <add> $this->assertSame('Controller/ExampleController', $return); <add> <add> $return = $this->Shell->mapFileToCase('app/plugins/Awesome/Controller/ExampleController.php', 'awesome', false); <add> $this->assertSame('Controller/ExampleController', $return); <add> } <add> <ide> /** <ide> * test available list of test cases for an empty category <ide> *
1
Python
Python
fix coding style for project euler problem 39
261be28120712f66a5bdb643585659eb4e2f932a
<ide><path>project_euler/problem_39/sol1.py <ide> """ <add>Problem 39: https://projecteuler.net/problem=39 <add> <ide> If p is the perimeter of a right angle triangle with integral length sides, <ide> {a,b,c}, there are exactly three solutions for p = 120. <ide> {20,48,52}, {24,45,51}, {30,40,50} <ide> <ide> from __future__ import annotations <ide> <add>import typing <ide> from collections import Counter <ide> <ide> <del>def pythagorean_triple(max_perimeter: int) -> dict: <add>def pythagorean_triple(max_perimeter: int) -> typing.Counter[int]: <ide> """ <ide> Returns a dictionary with keys as the perimeter of a right angled triangle <ide> and value as the number of corresponding triplets. <ide> def pythagorean_triple(max_perimeter: int) -> dict: <ide> >>> pythagorean_triple(50) <ide> Counter({12: 1, 30: 1, 24: 1, 40: 1, 36: 1, 48: 1}) <ide> """ <del> triplets = Counter() <add> triplets: typing.Counter[int] = Counter() <ide> for base in range(1, max_perimeter + 1): <ide> for perpendicular in range(base, max_perimeter + 1): <ide> hypotenuse = (base * base + perpendicular * perpendicular) ** 0.5 <del> if hypotenuse == int((hypotenuse)): <add> if hypotenuse == int(hypotenuse): <ide> perimeter = int(base + perpendicular + hypotenuse) <ide> if perimeter > max_perimeter: <ide> continue <del> else: <del> triplets[perimeter] += 1 <add> triplets[perimeter] += 1 <ide> return triplets <ide> <ide> <add>def solution(n: int = 1000) -> int: <add> """ <add> Returns perimeter with maximum solutions. <add> >>> solution(100) <add> 90 <add> >>> solution(200) <add> 180 <add> >>> solution(1000) <add> 840 <add> """ <add> triplets = pythagorean_triple(n) <add> return triplets.most_common(1)[0][0] <add> <add> <ide> if __name__ == "__main__": <del> triplets = pythagorean_triple(1000) <del> print(f"{triplets.most_common()[0][0] = }") <add> print(f"Perimeter {solution()} has maximum solutions")
1
PHP
PHP
add test for getroutesbyname()
e64f5b195626f8e88f2afbd201cc48060ca9cd45
<ide><path>tests/Routing/RouteCollectionTest.php <ide> public function testRouteCollectionCanGetAllRoutes() <ide> $this->assertEquals($allRoutes, $this->routeCollection->getRoutes()); <ide> } <ide> <add> public function testRouteCollectionCanGetRoutesByName() <add> { <add> $routesByName = [ <add> 'foo_index' => new Route('GET', 'foo/index', [ <add> 'uses' => 'FooController@index', <add> 'as' => 'foo_index', <add> ]), <add> 'foo_show' => new Route('GET', 'foo/show', [ <add> 'uses' => 'FooController@show', <add> 'as' => 'foo_show', <add> ]), <add> 'bar_create' => new Route('POST', 'bar', [ <add> 'uses' => 'BarController@create', <add> 'as' => 'bar_create', <add> ]), <add> ]; <add> <add> $this->routeCollection->add($routesByName['foo_index']); <add> $this->routeCollection->add($routesByName['foo_show']); <add> $this->routeCollection->add($routesByName['bar_create']); <add> <add> $this->assertSame($routesByName, $this->routeCollection->getRoutesByName()); <add> } <add> <ide> public function testRouteCollectionCleansUpOverwrittenRoutes() <ide> { <ide> // Create two routes with the same path and method.
1
Javascript
Javascript
fix an issue with the vhea/vmtx tables
5cb2a07b3d224e40874bb9d96173eb6ce2fdc6ab
<ide><path>fonts.js <ide> var Font = (function Font() { <ide> var end = denseRange[1]; <ide> var index = firstCode; <ide> for (var j = start; j <= end; j++) { <del> var code = j - firstCode - 1; <add> var code = glyphs[j - start]; <ide> var mapping = encoding[index] || {}; <del> mapping.unicode = glyphs[code].unicode; <add> mapping.unicode = code.unicode; <ide> encoding[index++] = mapping; <ide> } <ide> return cmap.data = createCMapTable(glyphs); <ide> var Font = (function Font() { <ide> return cmap.data; <ide> }; <ide> <add> function sanitizeMetrics(font, header, metrics, numGlyphs) { <add> if (!header && !metrics) <add> return; <add> <add> // The vhea/vmtx tables are not required, so it happens that <add> // some fonts embed a vmtx table without a vhea table. In this <add> // situation the sanitizer assume numOfLongVerMetrics = 1. As <add> // a result it tries to read numGlyphs - 1 SHORT from the vmtx <add> // table, and if it is not possible, the font is rejected. <add> // So remove the vmtx table if there is no vhea table. <add> if (!header && metrics) { <add> metrics.data = null; <add> return; <add> } <add> <add> font.pos = (font.start ? font.start : 0) + header.offset; <add> font.pos += header.length - 2; <add> var numOfMetrics = int16(font.getBytes(2)); <add> <add> var numOfSidebearings = numGlyphs - numOfMetrics; <add> var numMissing = numOfSidebearings - <add> ((hmtx.length - numOfMetrics * 4) >> 1); <add> if (numMissing > 0) { <add> font.pos = (font.start ? font.start : 0) + metrics.offset; <add> var entries = ''; <add> for (var i = 0; i < hmtx.length; i++) <add> entries += String.fromCharCode(font.getByte()); <add> for (var i = 0; i < numMissing; i++) <add> entries += '\x00\x00'; <add> metrics.data = stringToArray(entries); <add> } <add> }; <add> <ide> // Check that required tables are present <ide> var requiredTables = ['OS/2', 'cmap', 'head', 'hhea', <ide> 'hmtx', 'maxp', 'name', 'post']; <ide> <ide> var header = readOpenTypeHeader(font); <ide> var numTables = header.numTables; <ide> <del> var cmap, maxp, hhea, hmtx; <add> var cmap, maxp, hhea, hmtx, vhea, vmtx; <ide> var tables = []; <ide> for (var i = 0; i < numTables; i++) { <ide> var table = readTableEntry(font); <ide> var Font = (function Font() { <ide> hmtx = table; <ide> <ide> requiredTables.splice(index, 1); <add> } else { <add> if (table.tag == 'vmtx') <add> vmtx = table; <add> else if (table.tag == 'vhea') <add> vhea = table; <ide> } <ide> tables.push(table); <ide> } <ide> var Font = (function Font() { <ide> }); <ide> } <ide> <del> // Ensure the hmtx tables contains an advance width and a sidebearing <del> // for the number of glyphs declared in the maxp table <add> // Ensure the [h/v]mtx tables contains the advance width and <add> // sidebearings information for numGlyphs in the maxp table <ide> font.pos = (font.start ? font.start : 0) + maxp.offset; <ide> var version = int16(font.getBytes(4)); <ide> var numGlyphs = int16(font.getBytes(2)); <ide> <del> font.pos = (font.start ? font.start : 0) + hhea.offset; <del> font.pos += hhea.length - 2; <del> var numOfHMetrics = int16(font.getBytes(2)); <del> <del> var numOfSidebearings = numGlyphs - numOfHMetrics; <del> var numMissing = numOfSidebearings - <del> ((hmtx.length - numOfHMetrics * 4) >> 1); <del> if (numMissing > 0) { <del> font.pos = (font.start ? font.start : 0) + hmtx.offset; <del> var metrics = ''; <del> for (var i = 0; i < hmtx.length; i++) <del> metrics += String.fromCharCode(font.getByte()); <del> for (var i = 0; i < numMissing; i++) <del> metrics += '\x00\x00'; <del> hmtx.data = stringToArray(metrics); <del> } <add> sanitizeMetrics(font, hhea, hmtx, numGlyphs); <add> sanitizeMetrics(font, vhea, vmtx, numGlyphs); <ide> <ide> // Sanitizer reduces the glyph advanceWidth to the maxAdvanceWidth <ide> // Sometimes it's 0. That needs to be fixed
1
Ruby
Ruby
add salt to blocklist
d530cf68078d3b60f12032a606e5f319a4052288
<ide><path>Library/Homebrew/utils/pypi.rb <ide> module PyPI <ide> dxpy <ide> ipython <ide> molecule <add> salt <ide> xonsh <ide> ].freeze <ide>
1
PHP
PHP
add contract to let dispatcher use pipelines
fb40aba786421581781ae59be98132e13724ed42
<ide><path>src/Illuminate/Contracts/Bus/PipingDispatcher.php <add><?php namespace Illuminate\Contracts\Bus; <add> <add>interface PipingDispatcher extends Dispatcher { <add> <add> /** <add> * Set the pipes commands should be piped through before dispatching. <add> * <add> * @param array $pipes <add> * @return $this <add> */ <add> public function pipeThrough(array $pipes); <add> <add>}
1
Javascript
Javascript
use .chunk when calling adapters's writev
54bc3c9afaf6d3b28552536d6aac2d1887fbe8df
<ide><path>lib/internal/webstreams/adapters.js <ide> function newStreamWritableFromWritableStream(writableStream, options = {}) { <ide> <ide> writev(chunks, callback) { <ide> function done(error) { <add> error = error.filter((e) => e); <ide> try { <del> callback(error); <add> callback(error.length === 0 ? undefined : error); <ide> } catch (error) { <ide> // In a next tick because this is happening within <ide> // a promise context, and if there are any errors <ide> function newStreamWritableFromWritableStream(writableStream, options = {}) { <ide> PromiseAll( <ide> ArrayPrototypeMap( <ide> chunks, <del> (chunk) => writer.write(chunk))), <add> (data) => writer.write(data.chunk))), <ide> done, <ide> done); <ide> }, <ide> function newStreamDuplexFromReadableWritablePair(pair = {}, options = {}) { <ide> <ide> writev(chunks, callback) { <ide> function done(error) { <add> error = error.filter((e) => e); <ide> try { <del> callback(error); <add> callback(error.length === 0 ? undefined : error); <ide> } catch (error) { <ide> // In a next tick because this is happening within <ide> // a promise context, and if there are any errors <ide> function newStreamDuplexFromReadableWritablePair(pair = {}, options = {}) { <ide> PromiseAll( <ide> ArrayPrototypeMap( <ide> chunks, <del> (chunk) => writer.write(chunk))), <add> (data) => writer.write(data.chunk))), <ide> done, <ide> done); <ide> }, <ide><path>test/parallel/test-whatwg-webstreams-adapters-to-streamduplex.js <ide> const { <ide> finished(duplex, common.mustCall()); <ide> pipeline(readable, duplex, writable, common.mustCall()); <ide> } <add> <add>{ <add> const transform = new TransformStream(); <add> const duplex = newStreamDuplexFromReadableWritablePair(transform); <add> duplex.setEncoding('utf-8'); <add> duplex.on('data', common.mustCall((data) => { <add> assert.strictEqual(data, 'hello'); <add> }, 5)); <add> <add> duplex.write(Buffer.from('hello')); <add> duplex.write(Buffer.from('hello')); <add> duplex.write(Buffer.from('hello')); <add> duplex.write(Buffer.from('hello')); <add> duplex.write(Buffer.from('hello')); <add> <add> duplex.end(); <add>} <ide><path>test/parallel/test-whatwg-webstreams-adapters-to-streamwritable.js <ide> class TestSource { <ide> <ide> { <ide> const writableStream = new WritableStream({ <del> write: common.mustCall(2), <add> write: common.mustCall(5), <ide> close: common.mustCall(), <ide> }); <ide> const writable = newStreamWritableFromWritableStream(writableStream); <ide> <ide> finished(writable, common.mustCall()); <ide> <ide> writable.write('hello'); <add> writable.write('hello'); <add> writable.write('hello'); <add> writable.write('world'); <ide> writable.write('world'); <ide> writable.end(); <ide> }
3
Text
Text
update layer documentation
0c48b89f0b5808bb4653759535469294105d5190
<ide><path>official/nlp/modeling/layers/README.md <ide> assemble new `tf.keras` layers or models. <ide> of self multi-head attention, cross multi-head attention and feedforward <ide> network. <ide> <add>* [RandomFeatureGaussianProcess](gaussian_process.py) implements random <add> feature-based Gaussian process described in ["Random Features for <add> Large-Scale Kernel Machines"](https://people.eecs.berkeley.edu/~brecht/papers/07.rah.rec.nips.pdf). <add> <ide> * [ReZeroTransformer](rezero_transformer.py) implements Transformer with <ide> ReZero described in <ide> ["ReZero is All You Need: Fast Convergence at Large Depth"](https://arxiv.org/abs/2003.04887). <ide> assemble new `tf.keras` layers or models. <ide> * [SelfAttentionMask](self_attention_mask.py) creates a 3D attention mask from <ide> a 2D tensor mask. <ide> <add>* [SpectralNormalization](spectral_normalization.py) implements a tf.Wrapper <add> that applies spectral normalization regularization to a given layer. See <add> [Spectral Norm Regularization for Improving the Generalizability of <add> Deep Learning](https://arxiv.org/abs/1705.10941) <add> <ide> * [MaskedSoftmax](masked_softmax.py) implements a softmax with an optional <ide> masking input. If no mask is provided to this layer, it performs a standard <ide> softmax; however, if a mask tensor is applied (which should be 1 in <ide> assemble new `tf.keras` layers or models. <ide> * [ClassificationHead](cls_head.py) A pooling head over a sequence of <ide> embeddings, commonly used by classification tasks. <ide> <add>* [GaussianProcessClassificationHead](cls_head.py) A spectral-normalized <add> neural Gaussian process (SNGP)-based classification head as described in <add> ["Simple and Principled Uncertainty Estimation with Deterministic Deep <add> Learning via Distance Awareness"](https://arxiv.org/abs/2006.10108). <add> <ide> * [GatedFeedforward](gated_feedforward.py) implements the gated linear layer <ide> feedforward as described in <ide> ["GLU Variants Improve Transformer"](https://arxiv.org/abs/2002.05202).
1
Javascript
Javascript
remove duplicate of bordercolorpicker
4e51e845bb1fd3d044ccc2b7a121584d172f5ebd
<ide><path>client/src/components/helpers/AvatarRenderer.js <ide> import PropTypes from 'prop-types'; <ide> import { Image } from '@freecodecamp/react-bootstrap'; <ide> import DefaultAvatar from '../../assets/icons/DefaultAvatar'; <ide> import { defaultUserImage } from '../../../../config/misc'; <add>import { borderColorPicker } from '../helpers'; <ide> <ide> const propTypes = { <ide> isDonating: PropTypes.bool, <ide> const propTypes = { <ide> userName: PropTypes.string.isRequired <ide> }; <ide> <del>function borderColorPicker(isDonating, isTopContributor) { <del> if (isDonating && isTopContributor) return 'purple-border'; <del> else if (isTopContributor) return 'green-border'; <del> else if (isDonating) return 'gold-border'; <del> else return 'default-border'; <del>} <del> <ide> function AvatarRenderer({ picture, userName, isDonating, isTopContributor }) { <ide> let borderColor = borderColorPicker(isDonating, isTopContributor); <ide> let isPlaceHolderImage =
1
Go
Go
use imagebuildoptions in builder
5190794f1d85d5406611eb69c270df62ac1cdc7f
<ide><path>api/client/build.go <ide> import ( <ide> <ide> "github.com/docker/docker/api" <ide> "github.com/docker/docker/api/types" <add> "github.com/docker/docker/api/types/container" <ide> "github.com/docker/docker/builder/dockerignore" <ide> Cli "github.com/docker/docker/cli" <ide> "github.com/docker/docker/opts" <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> } <ide> } <ide> <add> var shmSize int64 <add> if *flShmSize != "" { <add> shmSize, err = units.RAMInBytes(*flShmSize) <add> if err != nil { <add> return err <add> } <add> } <add> <ide> var remoteContext string <ide> if isRemote { <ide> remoteContext = cmd.Arg(0) <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> Remove: *rm, <ide> ForceRemove: *forceRm, <ide> PullParent: *pull, <del> Isolation: *isolation, <add> IsolationLevel: container.IsolationLevel(*isolation), <ide> CPUSetCPUs: *flCPUSetCpus, <ide> CPUSetMems: *flCPUSetMems, <ide> CPUShares: *flCPUShares, <ide> CPUQuota: *flCPUQuota, <ide> CPUPeriod: *flCPUPeriod, <ide> CgroupParent: *flCgroupParent, <del> ShmSize: *flShmSize, <ide> Dockerfile: relDockerfile, <add> ShmSize: shmSize, <ide> Ulimits: flUlimits.GetList(), <del> BuildArgs: flBuildArg.GetAll(), <add> BuildArgs: runconfigopts.ConvertKVStringsToMap(flBuildArg.GetAll()), <ide> AuthConfigs: cli.configFile.AuthConfigs, <ide> } <ide> <ide><path>api/client/lib/image_build.go <ide> import ( <ide> <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/api/types/container" <del> "github.com/docker/go-units" <ide> ) <ide> <ide> var headerRegexp = regexp.MustCompile(`\ADocker/.+\s\((.+)\)\z`) <ide> func imageBuildOptionsToQuery(options types.ImageBuildOptions) (url.Values, erro <ide> query.Set("pull", "1") <ide> } <ide> <del> if !container.IsolationLevel.IsDefault(container.IsolationLevel(options.Isolation)) { <del> query.Set("isolation", options.Isolation) <add> if !container.IsolationLevel.IsDefault(options.IsolationLevel) { <add> query.Set("isolation", string(options.IsolationLevel)) <ide> } <ide> <ide> query.Set("cpusetcpus", options.CPUSetCPUs) <ide> func imageBuildOptionsToQuery(options types.ImageBuildOptions) (url.Values, erro <ide> query.Set("memory", strconv.FormatInt(options.Memory, 10)) <ide> query.Set("memswap", strconv.FormatInt(options.MemorySwap, 10)) <ide> query.Set("cgroupparent", options.CgroupParent) <del> <del> if options.ShmSize != "" { <del> parsedShmSize, err := units.RAMInBytes(options.ShmSize) <del> if err != nil { <del> return query, err <del> } <del> query.Set("shmsize", strconv.FormatInt(parsedShmSize, 10)) <del> } <del> <add> query.Set("shmsize", strconv.FormatInt(options.ShmSize, 10)) <ide> query.Set("dockerfile", options.Dockerfile) <ide> <ide> ulimitsJSON, err := json.Marshal(options.Ulimits) <ide> func imageBuildOptionsToQuery(options types.ImageBuildOptions) (url.Values, erro <ide> } <ide> query.Set("ulimits", string(ulimitsJSON)) <ide> <del> buildArgs := convertKVStringsToMap(options.BuildArgs) <del> buildArgsJSON, err := json.Marshal(buildArgs) <add> buildArgsJSON, err := json.Marshal(options.BuildArgs) <ide> if err != nil { <ide> return query, err <ide> } <ide><path>api/server/router/build/build_routes.go <ide> func sanitizeRepoAndTags(names []string) ([]reference.Named, error) { <ide> return repoAndTags, nil <ide> } <ide> <add>func newImageBuildOptions(ctx context.Context, r *http.Request) (*types.ImageBuildOptions, error) { <add> version := httputils.VersionFromContext(ctx) <add> options := &types.ImageBuildOptions{} <add> if httputils.BoolValue(r, "forcerm") && version.GreaterThanOrEqualTo("1.12") { <add> options.Remove = true <add> } else if r.FormValue("rm") == "" && version.GreaterThanOrEqualTo("1.12") { <add> options.Remove = true <add> } else { <add> options.Remove = httputils.BoolValue(r, "rm") <add> } <add> if httputils.BoolValue(r, "pull") && version.GreaterThanOrEqualTo("1.16") { <add> options.PullParent = true <add> } <add> <add> options.Dockerfile = r.FormValue("dockerfile") <add> options.SuppressOutput = httputils.BoolValue(r, "q") <add> options.NoCache = httputils.BoolValue(r, "nocache") <add> options.ForceRemove = httputils.BoolValue(r, "forcerm") <add> options.MemorySwap = httputils.Int64ValueOrZero(r, "memswap") <add> options.Memory = httputils.Int64ValueOrZero(r, "memory") <add> options.CPUShares = httputils.Int64ValueOrZero(r, "cpushares") <add> options.CPUPeriod = httputils.Int64ValueOrZero(r, "cpuperiod") <add> options.CPUQuota = httputils.Int64ValueOrZero(r, "cpuquota") <add> options.CPUSetCPUs = r.FormValue("cpusetcpus") <add> options.CPUSetMems = r.FormValue("cpusetmems") <add> options.CgroupParent = r.FormValue("cgroupparent") <add> <add> if r.Form.Get("shmsize") != "" { <add> shmSize, err := strconv.ParseInt(r.Form.Get("shmsize"), 10, 64) <add> if err != nil { <add> return nil, err <add> } <add> options.ShmSize = shmSize <add> } <add> <add> if i := container.IsolationLevel(r.FormValue("isolation")); i != "" { <add> if !container.IsolationLevel.IsValid(i) { <add> return nil, fmt.Errorf("Unsupported isolation: %q", i) <add> } <add> options.IsolationLevel = i <add> } <add> <add> var buildUlimits = []*units.Ulimit{} <add> ulimitsJSON := r.FormValue("ulimits") <add> if ulimitsJSON != "" { <add> if err := json.NewDecoder(strings.NewReader(ulimitsJSON)).Decode(&buildUlimits); err != nil { <add> return nil, err <add> } <add> options.Ulimits = buildUlimits <add> } <add> <add> var buildArgs = map[string]string{} <add> buildArgsJSON := r.FormValue("buildargs") <add> if buildArgsJSON != "" { <add> if err := json.NewDecoder(strings.NewReader(buildArgsJSON)).Decode(&buildArgs); err != nil { <add> return nil, err <add> } <add> options.BuildArgs = buildArgs <add> } <add> return options, nil <add>} <add> <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { <ide> var ( <ide> authConfigs = map[string]types.AuthConfig{} <ide> authConfigsEncoded = r.Header.Get("X-Registry-Config") <del> buildConfig = &dockerfile.Config{} <ide> notVerboseBuffer = bytes.NewBuffer(nil) <ide> ) <ide> <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> <ide> w.Header().Set("Content-Type", "application/json") <ide> <del> version := httputils.VersionFromContext(ctx) <ide> output := ioutils.NewWriteFlusher(w) <ide> defer output.Close() <ide> sf := streamformatter.NewJSONStreamFormatter() <ide> errf := func(err error) error { <del> if !buildConfig.Verbose && notVerboseBuffer.Len() > 0 { <add> if httputils.BoolValue(r, "q") && notVerboseBuffer.Len() > 0 { <ide> output.Write(notVerboseBuffer.Bytes()) <ide> } <ide> // Do not write the error in the http output if it's still empty. <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> return nil <ide> } <ide> <del> if httputils.BoolValue(r, "forcerm") && version.GreaterThanOrEqualTo("1.12") { <del> buildConfig.Remove = true <del> } else if r.FormValue("rm") == "" && version.GreaterThanOrEqualTo("1.12") { <del> buildConfig.Remove = true <del> } else { <del> buildConfig.Remove = httputils.BoolValue(r, "rm") <del> } <del> if httputils.BoolValue(r, "pull") && version.GreaterThanOrEqualTo("1.16") { <del> buildConfig.Pull = true <add> buildOptions, err := newImageBuildOptions(ctx, r) <add> if err != nil { <add> return errf(err) <ide> } <ide> <ide> repoAndTags, err := sanitizeRepoAndTags(r.Form["t"]) <ide> if err != nil { <ide> return errf(err) <ide> } <ide> <del> buildConfig.DockerfileName = r.FormValue("dockerfile") <del> buildConfig.Verbose = !httputils.BoolValue(r, "q") <del> buildConfig.UseCache = !httputils.BoolValue(r, "nocache") <del> buildConfig.ForceRemove = httputils.BoolValue(r, "forcerm") <del> buildConfig.MemorySwap = httputils.Int64ValueOrZero(r, "memswap") <del> buildConfig.Memory = httputils.Int64ValueOrZero(r, "memory") <del> buildConfig.CPUShares = httputils.Int64ValueOrZero(r, "cpushares") <del> buildConfig.CPUPeriod = httputils.Int64ValueOrZero(r, "cpuperiod") <del> buildConfig.CPUQuota = httputils.Int64ValueOrZero(r, "cpuquota") <del> buildConfig.CPUSetCpus = r.FormValue("cpusetcpus") <del> buildConfig.CPUSetMems = r.FormValue("cpusetmems") <del> buildConfig.CgroupParent = r.FormValue("cgroupparent") <del> <del> if r.Form.Get("shmsize") != "" { <del> shmSize, err := strconv.ParseInt(r.Form.Get("shmsize"), 10, 64) <del> if err != nil { <del> return errf(err) <del> } <del> buildConfig.ShmSize = &shmSize <del> } <del> <del> if i := container.IsolationLevel(r.FormValue("isolation")); i != "" { <del> if !container.IsolationLevel.IsValid(i) { <del> return errf(fmt.Errorf("Unsupported isolation: %q", i)) <del> } <del> buildConfig.Isolation = i <del> } <del> <del> var buildUlimits = []*units.Ulimit{} <del> ulimitsJSON := r.FormValue("ulimits") <del> if ulimitsJSON != "" { <del> if err := json.NewDecoder(strings.NewReader(ulimitsJSON)).Decode(&buildUlimits); err != nil { <del> return errf(err) <del> } <del> buildConfig.Ulimits = buildUlimits <del> } <del> <del> var buildArgs = map[string]string{} <del> buildArgsJSON := r.FormValue("buildargs") <del> if buildArgsJSON != "" { <del> if err := json.NewDecoder(strings.NewReader(buildArgsJSON)).Decode(&buildArgs); err != nil { <del> return errf(err) <del> } <del> buildConfig.BuildArgs = buildArgs <del> } <del> <ide> remoteURL := r.FormValue("remote") <ide> <ide> // Currently, only used if context is from a remote url. <ide> // Look at code in DetectContextFromRemoteURL for more information. <ide> createProgressReader := func(in io.ReadCloser) io.ReadCloser { <ide> progressOutput := sf.NewProgressOutput(output, true) <del> if !buildConfig.Verbose { <add> if buildOptions.SuppressOutput { <ide> progressOutput = sf.NewProgressOutput(notVerboseBuffer, true) <ide> } <ide> return progress.NewProgressReader(in, progressOutput, r.ContentLength, "Downloading context", remoteURL) <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> logrus.Debugf("[BUILDER] failed to remove temporary context: %v", err) <ide> } <ide> }() <add> if len(dockerfileName) > 0 { <add> buildOptions.Dockerfile = dockerfileName <add> } <ide> <ide> uidMaps, gidMaps := br.backend.GetUIDGIDMaps() <ide> defaultArchiver := &archive.Archiver{ <ide> Untar: chrootarchive.Untar, <ide> UIDMaps: uidMaps, <ide> GIDMaps: gidMaps, <ide> } <add> <ide> docker := &daemonbuilder.Docker{ <ide> Daemon: br.backend, <ide> OutOld: output, <ide> AuthConfigs: authConfigs, <ide> Archiver: defaultArchiver, <ide> } <del> if !buildConfig.Verbose { <add> if buildOptions.SuppressOutput { <ide> docker.OutOld = notVerboseBuffer <ide> } <ide> <del> b, err := dockerfile.NewBuilder(buildConfig, docker, builder.DockerIgnoreContext{ModifiableContext: context}, nil) <add> b, err := dockerfile.NewBuilder( <add> buildOptions, // result of newBuildConfig <add> docker, <add> builder.DockerIgnoreContext{ModifiableContext: context}, <add> nil) <ide> if err != nil { <ide> return errf(err) <ide> } <ide> b.Stdout = &streamformatter.StdoutFormatter{Writer: output, StreamFormatter: sf} <ide> b.Stderr = &streamformatter.StderrFormatter{Writer: output, StreamFormatter: sf} <del> if !buildConfig.Verbose { <add> if buildOptions.SuppressOutput { <ide> b.Stdout = &streamformatter.StdoutFormatter{Writer: notVerboseBuffer, StreamFormatter: sf} <ide> b.Stderr = &streamformatter.StderrFormatter{Writer: notVerboseBuffer, StreamFormatter: sf} <ide> } <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> }() <ide> } <ide> <del> if len(dockerfileName) > 0 { <del> b.DockerfileName = dockerfileName <del> } <del> <ide> imgID, err := b.Build() <ide> if err != nil { <ide> return errf(err) <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> <ide> // Everything worked so if -q was provided the output from the daemon <ide> // should be just the image ID and we'll print that to stdout. <del> if !buildConfig.Verbose { <add> if buildOptions.SuppressOutput { <ide> stdout := &streamformatter.StdoutFormatter{Writer: output, StreamFormatter: sf} <ide> fmt.Fprintf(stdout, "%s\n", string(imgID)) <ide> } <ide><path>api/types/client.go <ide> type ImageBuildOptions struct { <ide> Remove bool <ide> ForceRemove bool <ide> PullParent bool <del> Isolation string <add> IsolationLevel container.IsolationLevel <ide> CPUSetCPUs string <ide> CPUSetMems string <ide> CPUShares int64 <ide> type ImageBuildOptions struct { <ide> Memory int64 <ide> MemorySwap int64 <ide> CgroupParent string <del> ShmSize string <add> ShmSize int64 <ide> Dockerfile string <ide> Ulimits []*units.Ulimit <del> BuildArgs []string <add> BuildArgs map[string]string <ide> AuthConfigs map[string]AuthConfig <ide> Context io.Reader <ide> } <ide><path>api/types/container/host_config.go <ide> type HostConfig struct { <ide> SecurityOpt []string // List of string values to customize labels for MLS systems, such as SELinux. <ide> Tmpfs map[string]string `json:",omitempty"` // List of tmpfs (mounts) used for the container <ide> UTSMode UTSMode // UTS namespace to use for the container <del> ShmSize *int64 // Total shm memory usage <add> ShmSize int64 // Total shm memory usage <ide> <ide> // Applicable to Windows <ide> ConsoleSize [2]int // Initial console size <ide><path>builder/dockerfile/builder.go <ide> import ( <ide> "sync" <ide> <ide> "github.com/Sirupsen/logrus" <add> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/api/types/container" <ide> "github.com/docker/docker/builder" <ide> "github.com/docker/docker/builder/dockerfile/parser" <ide> "github.com/docker/docker/pkg/stringid" <del> "github.com/docker/go-units" <ide> ) <ide> <ide> var validCommitCommands = map[string]bool{ <ide> var BuiltinAllowedBuildArgs = map[string]bool{ <ide> "no_proxy": true, <ide> } <ide> <del>// Config constitutes the configuration for a Dockerfile builder. <del>type Config struct { <del> // only used if Dockerfile has to be extracted from Context <del> DockerfileName string <del> <del> Verbose bool <del> UseCache bool <del> Remove bool <del> ForceRemove bool <del> Pull bool <del> BuildArgs map[string]string // build-time args received in build context for expansion/substitution and commands in 'run'. <del> Isolation container.IsolationLevel <del> <del> // resource constraints <del> // TODO: factor out to be reused with Run ? <del> <del> Memory int64 <del> MemorySwap int64 <del> ShmSize *int64 <del> CPUShares int64 <del> CPUPeriod int64 <del> CPUQuota int64 <del> CPUSetCpus string <del> CPUSetMems string <del> CgroupParent string <del> Ulimits []*units.Ulimit <del>} <del> <ide> // Builder is a Dockerfile builder <ide> // It implements the builder.Backend interface. <ide> type Builder struct { <del> *Config <add> options *types.ImageBuildOptions <ide> <ide> Stdout io.Writer <ide> Stderr io.Writer <ide> type Builder struct { <ide> // NewBuilder creates a new Dockerfile builder from an optional dockerfile and a Config. <ide> // If dockerfile is nil, the Dockerfile specified by Config.DockerfileName, <ide> // will be read from the Context passed to Build(). <del>func NewBuilder(config *Config, docker builder.Backend, context builder.Context, dockerfile io.ReadCloser) (b *Builder, err error) { <add>func NewBuilder(config *types.ImageBuildOptions, backend builder.Backend, context builder.Context, dockerfile io.ReadCloser) (b *Builder, err error) { <ide> if config == nil { <del> config = new(Config) <add> config = new(types.ImageBuildOptions) <ide> } <ide> if config.BuildArgs == nil { <ide> config.BuildArgs = make(map[string]string) <ide> } <ide> b = &Builder{ <del> Config: config, <add> options: config, <ide> Stdout: os.Stdout, <ide> Stderr: os.Stderr, <del> docker: docker, <add> docker: backend, <ide> context: context, <ide> runConfig: new(container.Config), <ide> tmpContainers: map[string]struct{}{}, <ide> func (b *Builder) Build() (string, error) { <ide> // Not cancelled yet, keep going... <ide> } <ide> if err := b.dispatch(i, n); err != nil { <del> if b.ForceRemove { <add> if b.options.ForceRemove { <ide> b.clearTmp() <ide> } <ide> return "", err <ide> } <ide> shortImgID = stringid.TruncateID(b.image) <ide> fmt.Fprintf(b.Stdout, " ---> %s\n", shortImgID) <del> if b.Remove { <add> if b.options.Remove { <ide> b.clearTmp() <ide> } <ide> } <ide> <ide> // check if there are any leftover build-args that were passed but not <ide> // consumed during build. Return an error, if there are any. <ide> leftoverArgs := []string{} <del> for arg := range b.BuildArgs { <add> for arg := range b.options.BuildArgs { <ide> if !b.isBuildArgAllowed(arg) { <ide> leftoverArgs = append(leftoverArgs, arg) <ide> } <ide><path>builder/dockerfile/dispatchers.go <ide> func from(b *Builder, args []string, attributes map[string]bool, original string <ide> err error <ide> ) <ide> // TODO: don't use `name`, instead resolve it to a digest <del> if !b.Pull { <add> if !b.options.PullParent { <ide> image, err = b.docker.GetImage(name) <ide> // TODO: shouldn't we error out if error is different from "not found" ? <ide> } <ide> func run(b *Builder, args []string, attributes map[string]bool, original string) <ide> // lookup for same image built with same build time environment. <ide> cmdBuildEnv := []string{} <ide> configEnv := runconfigopts.ConvertKVStringsToMap(b.runConfig.Env) <del> for key, val := range b.BuildArgs { <add> for key, val := range b.options.BuildArgs { <ide> if !b.isBuildArgAllowed(key) { <ide> // skip build-args that are not in allowed list, meaning they have <ide> // not been defined by an "ARG" Dockerfile command yet. <ide> func arg(b *Builder, args []string, attributes map[string]bool, original string) <ide> // If there is a default value associated with this arg then add it to the <ide> // b.buildArgs if one is not already passed to the builder. The args passed <ide> // to builder override the default value of 'arg'. <del> if _, ok := b.BuildArgs[name]; !ok && hasDefault { <del> b.BuildArgs[name] = value <add> if _, ok := b.options.BuildArgs[name]; !ok && hasDefault { <add> b.options.BuildArgs[name] = value <ide> } <ide> <ide> return b.commit("", b.runConfig.Cmd, fmt.Sprintf("ARG %s", arg)) <ide><path>builder/dockerfile/evaluator.go <ide> func (b *Builder) dispatch(stepN int, ast *parser.Node) error { <ide> // a subsequent one. So, putting the buildArgs list after the Config.Env <ide> // list, in 'envs', is safe. <ide> envs := b.runConfig.Env <del> for key, val := range b.BuildArgs { <add> for key, val := range b.options.BuildArgs { <ide> if !b.isBuildArgAllowed(key) { <ide> // skip build-args that are not in allowed list, meaning they have <ide> // not been defined by an "ARG" Dockerfile command yet. <ide><path>builder/dockerfile/internals.go <ide> func (b *Builder) processImageFrom(img builder.Image) error { <ide> // If there is any error, it returns `(false, err)`. <ide> func (b *Builder) probeCache() (bool, error) { <ide> c, ok := b.docker.(builder.ImageCache) <del> if !ok || !b.UseCache || b.cacheBusted { <add> if !ok || b.options.NoCache || b.cacheBusted { <ide> return false, nil <ide> } <ide> cache, err := c.GetCachedImage(b.image, b.runConfig) <ide> func (b *Builder) create() (string, error) { <ide> b.runConfig.Image = b.image <ide> <ide> resources := container.Resources{ <del> CgroupParent: b.CgroupParent, <del> CPUShares: b.CPUShares, <del> CPUPeriod: b.CPUPeriod, <del> CPUQuota: b.CPUQuota, <del> CpusetCpus: b.CPUSetCpus, <del> CpusetMems: b.CPUSetMems, <del> Memory: b.Memory, <del> MemorySwap: b.MemorySwap, <del> Ulimits: b.Ulimits, <add> CgroupParent: b.options.CgroupParent, <add> CPUShares: b.options.CPUShares, <add> CPUPeriod: b.options.CPUPeriod, <add> CPUQuota: b.options.CPUQuota, <add> CpusetCpus: b.options.CPUSetCPUs, <add> CpusetMems: b.options.CPUSetMems, <add> Memory: b.options.Memory, <add> MemorySwap: b.options.MemorySwap, <add> Ulimits: b.options.Ulimits, <ide> } <ide> <ide> // TODO: why not embed a hostconfig in builder? <ide> hostConfig := &container.HostConfig{ <del> Isolation: b.Isolation, <del> ShmSize: b.ShmSize, <add> Isolation: b.options.IsolationLevel, <add> ShmSize: b.options.ShmSize, <ide> Resources: resources, <ide> } <ide> <ide> func (b *Builder) readDockerfile() error { <ide> // If no -f was specified then look for 'Dockerfile'. If we can't find <ide> // that then look for 'dockerfile'. If neither are found then default <ide> // back to 'Dockerfile' and use that in the error message. <del> if b.DockerfileName == "" { <del> b.DockerfileName = api.DefaultDockerfileName <del> if _, _, err := b.context.Stat(b.DockerfileName); os.IsNotExist(err) { <del> lowercase := strings.ToLower(b.DockerfileName) <add> if b.options.Dockerfile == "" { <add> b.options.Dockerfile = api.DefaultDockerfileName <add> if _, _, err := b.context.Stat(b.options.Dockerfile); os.IsNotExist(err) { <add> lowercase := strings.ToLower(b.options.Dockerfile) <ide> if _, _, err := b.context.Stat(lowercase); err == nil { <del> b.DockerfileName = lowercase <add> b.options.Dockerfile = lowercase <ide> } <ide> } <ide> } <ide> <del> f, err := b.context.Open(b.DockerfileName) <add> f, err := b.context.Open(b.options.Dockerfile) <ide> if err != nil { <ide> if os.IsNotExist(err) { <del> return fmt.Errorf("Cannot locate specified Dockerfile: %s", b.DockerfileName) <add> return fmt.Errorf("Cannot locate specified Dockerfile: %s", b.options.Dockerfile) <ide> } <ide> return err <ide> } <ide> func (b *Builder) readDockerfile() error { <ide> return fmt.Errorf("Unexpected error reading Dockerfile: %v", err) <ide> } <ide> if fi.Size() == 0 { <del> return fmt.Errorf("The Dockerfile (%s) cannot be empty", b.DockerfileName) <add> return fmt.Errorf("The Dockerfile (%s) cannot be empty", b.options.Dockerfile) <ide> } <ide> } <ide> b.dockerfile, err = parser.Parse(f) <ide> func (b *Builder) readDockerfile() error { <ide> // Note that this assumes the Dockerfile has been read into memory and <ide> // is now safe to be removed. <ide> if dockerIgnore, ok := b.context.(builder.DockerIgnoreContext); ok { <del> dockerIgnore.Process([]string{b.DockerfileName}) <add> dockerIgnore.Process([]string{b.options.Dockerfile}) <ide> } <ide> return nil <ide> } <ide><path>daemon/container_operations_unix.go <ide> func (daemon *Daemon) setupIpcDirs(c *container.Container) error { <ide> } <ide> <ide> shmSize := container.DefaultSHMSize <del> if c.HostConfig.ShmSize != nil { <del> shmSize = *c.HostConfig.ShmSize <add> if c.HostConfig.ShmSize != 0 { <add> shmSize = c.HostConfig.ShmSize <ide> } <ide> shmproperty := "mode=1777,size=" + strconv.FormatInt(shmSize, 10) <ide> if err := syscall.Mount("shm", shmPath, "tmpfs", uintptr(syscall.MS_NOEXEC|syscall.MS_NOSUID|syscall.MS_NODEV), label.FormatMountLabel(shmproperty, c.GetMountLabel())); err != nil { <ide><path>daemon/daemon_unix.go <ide> func (daemon *Daemon) adaptContainerSettings(hostConfig *containertypes.HostConf <ide> // By default, MemorySwap is set to twice the size of Memory. <ide> hostConfig.MemorySwap = hostConfig.Memory * 2 <ide> } <del> if hostConfig.ShmSize == nil { <del> shmSize := container.DefaultSHMSize <del> hostConfig.ShmSize = &shmSize <add> if hostConfig.ShmSize == 0 { <add> hostConfig.ShmSize = container.DefaultSHMSize <ide> } <ide> var err error <ide> if hostConfig.SecurityOpt == nil { <ide> func verifyPlatformContainerSettings(daemon *Daemon, hostConfig *containertypes. <ide> } <ide> warnings = append(warnings, w...) <ide> <del> if hostConfig.ShmSize != nil && *hostConfig.ShmSize <= 0 { <add> if hostConfig.ShmSize < 0 { <ide> return warnings, fmt.Errorf("SHM size must be greater then 0") <ide> } <ide> <ide><path>integration-cli/docker_api_containers_test.go <ide> func (s *DockerSuite) TestPostContainersCreateShmSizeNegative(c *check.C) { <ide> c.Assert(string(body), checker.Contains, "SHM size must be greater then 0") <ide> } <ide> <del>func (s *DockerSuite) TestPostContainersCreateShmSizeZero(c *check.C) { <del> config := map[string]interface{}{ <del> "Image": "busybox", <del> "HostConfig": map[string]interface{}{"ShmSize": 0}, <del> } <del> <del> status, body, err := sockRequest("POST", "/containers/create", config) <del> c.Assert(err, check.IsNil) <del> c.Assert(status, check.Equals, http.StatusInternalServerError) <del> c.Assert(string(body), checker.Contains, "SHM size must be greater then 0") <del>} <del> <ide> func (s *DockerSuite) TestPostContainersCreateShmSizeHostConfigOmitted(c *check.C) { <ide> var defaultSHMSize int64 = 67108864 <ide> config := map[string]interface{}{ <ide> func (s *DockerSuite) TestPostContainersCreateShmSizeHostConfigOmitted(c *check. <ide> var containerJSON types.ContainerJSON <ide> c.Assert(json.Unmarshal(body, &containerJSON), check.IsNil) <ide> <del> c.Assert(*containerJSON.HostConfig.ShmSize, check.Equals, defaultSHMSize) <add> c.Assert(containerJSON.HostConfig.ShmSize, check.Equals, defaultSHMSize) <ide> <ide> out, _ := dockerCmd(c, "start", "-i", containerJSON.ID) <ide> shmRegexp := regexp.MustCompile(`shm on /dev/shm type tmpfs(.*)size=65536k`) <ide> func (s *DockerSuite) TestPostContainersCreateShmSizeOmitted(c *check.C) { <ide> var containerJSON types.ContainerJSON <ide> c.Assert(json.Unmarshal(body, &containerJSON), check.IsNil) <ide> <del> c.Assert(*containerJSON.HostConfig.ShmSize, check.Equals, int64(67108864)) <add> c.Assert(containerJSON.HostConfig.ShmSize, check.Equals, int64(67108864)) <ide> <ide> out, _ := dockerCmd(c, "start", "-i", containerJSON.ID) <ide> shmRegexp := regexp.MustCompile(`shm on /dev/shm type tmpfs(.*)size=65536k`) <ide> func (s *DockerSuite) TestPostContainersCreateWithShmSize(c *check.C) { <ide> var containerJSON types.ContainerJSON <ide> c.Assert(json.Unmarshal(body, &containerJSON), check.IsNil) <ide> <del> c.Assert(*containerJSON.HostConfig.ShmSize, check.Equals, int64(1073741824)) <add> c.Assert(containerJSON.HostConfig.ShmSize, check.Equals, int64(1073741824)) <ide> <ide> out, _ := dockerCmd(c, "start", "-i", containerJSON.ID) <ide> shmRegex := regexp.MustCompile(`shm on /dev/shm type tmpfs(.*)size=1048576k`) <ide><path>runconfig/opts/parse.go <ide> func Parse(cmd *flag.FlagSet, args []string) (*container.Config, *container.Host <ide> return nil, nil, cmd, fmt.Errorf("Invalid value: %d. Valid memory swappiness range is 0-100", swappiness) <ide> } <ide> <del> var parsedShm *int64 <add> var shmSize int64 <ide> if *flShmSize != "" { <del> shmSize, err := units.RAMInBytes(*flShmSize) <add> shmSize, err = units.RAMInBytes(*flShmSize) <ide> if err != nil { <ide> return nil, nil, cmd, err <ide> } <del> parsedShm = &shmSize <ide> } <ide> <ide> var binds []string <ide> func Parse(cmd *flag.FlagSet, args []string) (*container.Config, *container.Host <ide> LogConfig: container.LogConfig{Type: *flLoggingDriver, Config: loggingOpts}, <ide> VolumeDriver: *flVolumeDriver, <ide> Isolation: container.IsolationLevel(*flIsolation), <del> ShmSize: parsedShm, <add> ShmSize: shmSize, <ide> Resources: resources, <ide> Tmpfs: tmpfs, <ide> } <ide><path>runconfig/opts/parse_test.go <ide> func TestParseModes(t *testing.T) { <ide> if err != nil { <ide> t.Fatal(err) <ide> } <del> if *hostconfig.ShmSize != 134217728 { <del> t.Fatalf("Expected a valid ShmSize, got %d", *hostconfig.ShmSize) <add> if hostconfig.ShmSize != 134217728 { <add> t.Fatalf("Expected a valid ShmSize, got %d", hostconfig.ShmSize) <ide> } <ide> } <ide>
14
PHP
PHP
use container call
8529ee504f8caf7abd3776e62a3029c1ea437e82
<ide><path>src/Illuminate/Routing/RouteServiceProvider.php <ide> class RouteServiceProvider extends ServiceProvider { <ide> */ <ide> public function boot() <ide> { <del> $this->before(); <add> $this->app->call([$this, 'before']); <ide> <ide> if ($this->app->routesAreCached()) <ide> { <ide> public function boot() <ide> } <ide> else <ide> { <del> $this->map(); <add> $this->app->call([$this, 'map']); <ide> } <ide> } <ide> <ide> public function boot() <ide> */ <ide> public function register() {} <ide> <del> /** <del> * Called before routes are registered. <del> * <del> * Register any model bindings or pattern based filters. <del> * <del> * @return void <del> */ <del> public function before() {} <del> <del> /** <del> * Define the routes for the application. <del> * <del> * @return void <del> */ <del> public function map() {} <del> <ide> /** <ide> * Register the given Closure with the "group" function namespace set. <ide> *
1
Go
Go
move httputils error helpers to errdefs package
2a9c987e5a72549775ffa4dc31595ceff4f06a78
<ide><path>api/server/httputils/errors_deprecated.go <add>package httputils // import "github.com/docker/docker/api/server/httputils" <add>import "github.com/docker/docker/errdefs" <add> <add>// GetHTTPErrorStatusCode retrieves status code from error message. <add>// <add>// Deprecated: use errdefs.GetHTTPErrorStatusCode <add>func GetHTTPErrorStatusCode(err error) int { <add> return errdefs.GetHTTPErrorStatusCode(err) <add>} <ide><path>api/server/httputils/httputils.go <ide> import ( <ide> "net/http" <ide> "strings" <ide> <add> "github.com/docker/docker/api/types" <add> "github.com/docker/docker/api/types/versions" <ide> "github.com/docker/docker/errdefs" <add> "github.com/gorilla/mux" <ide> "github.com/pkg/errors" <ide> "github.com/sirupsen/logrus" <add> "google.golang.org/grpc/status" <ide> ) <ide> <ide> // APIVersionKey is the client's requested API version. <ide> func VersionFromContext(ctx context.Context) string { <ide> return "" <ide> } <ide> <add>// MakeErrorHandler makes an HTTP handler that decodes a Docker error and <add>// returns it in the response. <add>func MakeErrorHandler(err error) http.HandlerFunc { <add> return func(w http.ResponseWriter, r *http.Request) { <add> statusCode := errdefs.GetHTTPErrorStatusCode(err) <add> vars := mux.Vars(r) <add> if apiVersionSupportsJSONErrors(vars["version"]) { <add> response := &types.ErrorResponse{ <add> Message: err.Error(), <add> } <add> WriteJSON(w, statusCode, response) <add> } else { <add> http.Error(w, status.Convert(err).Message(), statusCode) <add> } <add> } <add>} <add> <add>func apiVersionSupportsJSONErrors(version string) bool { <add> const firstAPIVersionWithJSONErrors = "1.23" <add> return version == "" || versions.GreaterThan(version, firstAPIVersionWithJSONErrors) <add>} <add> <ide> // matchesContentType validates the content type against the expected one <ide> func matchesContentType(contentType, expectedType string) bool { <ide> mimetype, _, err := mime.ParseMediaType(contentType) <ide><path>api/server/router/container/container_routes.go <ide> func (s *containerRouter) postContainersAttach(ctx context.Context, w http.Respo <ide> // Remember to close stream if error happens <ide> conn, _, errHijack := hijacker.Hijack() <ide> if errHijack == nil { <del> statusCode := httputils.GetHTTPErrorStatusCode(err) <add> statusCode := errdefs.GetHTTPErrorStatusCode(err) <ide> statusText := http.StatusText(statusCode) <ide> fmt.Fprintf(conn, "HTTP/1.1 %d %s\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n%s\r\n", statusCode, statusText, err.Error()) <ide> httputils.CloseStreams(conn) <ide><path>api/server/server.go <ide> import ( <ide> "github.com/docker/docker/api/server/router" <ide> "github.com/docker/docker/api/server/router/debug" <ide> "github.com/docker/docker/dockerversion" <add> "github.com/docker/docker/errdefs" <ide> "github.com/gorilla/mux" <ide> "github.com/sirupsen/logrus" <ide> ) <ide> func (s *Server) makeHTTPHandler(handler httputils.APIFunc) http.HandlerFunc { <ide> } <ide> <ide> if err := handlerFunc(ctx, w, r, vars); err != nil { <del> statusCode := httputils.GetHTTPErrorStatusCode(err) <add> statusCode := errdefs.GetHTTPErrorStatusCode(err) <ide> if statusCode >= 500 { <ide> logrus.Errorf("Handler for %s %s returned error: %v", r.Method, r.URL.Path, err) <ide> } <ide><path>client/ping.go <ide> import ( <ide> "net/http" <ide> "path" <ide> <del> "github.com/docker/docker/api/server/httputils" <ide> "github.com/docker/docker/api/types" <add> "github.com/docker/docker/errdefs" <ide> ) <ide> <ide> // Ping pings the server and returns the value of the "Docker-Experimental", <ide> func parsePingResponse(cli *Client, resp serverResponse) (types.Ping, error) { <ide> var ping types.Ping <ide> if resp.header == nil { <ide> err := cli.checkResponseErr(resp) <del> return ping, httputils.FromStatusCode(err, resp.statusCode) <add> return ping, errdefs.FromStatusCode(err, resp.statusCode) <ide> } <ide> ping.APIVersion = resp.header.Get("API-Version") <ide> ping.OSType = resp.header.Get("OSType") <ide> func parsePingResponse(cli *Client, resp serverResponse) (types.Ping, error) { <ide> ping.BuilderVersion = types.BuilderVersion(bv) <ide> } <ide> err := cli.checkResponseErr(resp) <del> return ping, httputils.FromStatusCode(err, resp.statusCode) <add> return ping, errdefs.FromStatusCode(err, resp.statusCode) <ide> } <ide><path>client/request.go <ide> import ( <ide> "os" <ide> "strings" <ide> <del> "github.com/docker/docker/api/server/httputils" <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/api/types/versions" <add> "github.com/docker/docker/errdefs" <ide> "github.com/pkg/errors" <ide> ) <ide> <ide> func (cli *Client) sendRequest(ctx context.Context, method, path string, query u <ide> } <ide> resp, err := cli.doRequest(ctx, req) <ide> if err != nil { <del> return resp, httputils.FromStatusCode(err, resp.statusCode) <add> return resp, errdefs.FromStatusCode(err, resp.statusCode) <ide> } <ide> err = cli.checkResponseErr(resp) <del> return resp, httputils.FromStatusCode(err, resp.statusCode) <add> return resp, errdefs.FromStatusCode(err, resp.statusCode) <ide> } <ide> <ide> func (cli *Client) doRequest(ctx context.Context, req *http.Request) (serverResponse, error) { <add><path>errdefs/http_helpers.go <del><path>api/server/httputils/errors.go <del>package httputils // import "github.com/docker/docker/api/server/httputils" <add>package errdefs // import "github.com/docker/docker/errdefs" <ide> <ide> import ( <ide> "fmt" <ide> "net/http" <ide> <ide> "github.com/docker/distribution/registry/api/errcode" <del> "github.com/docker/docker/api/types" <del> "github.com/docker/docker/api/types/versions" <del> "github.com/docker/docker/errdefs" <del> "github.com/gorilla/mux" <ide> "github.com/sirupsen/logrus" <ide> "google.golang.org/grpc/codes" <ide> "google.golang.org/grpc/status" <ide> ) <ide> <del>type causer interface { <del> Cause() error <del>} <del> <ide> // GetHTTPErrorStatusCode retrieves status code from error message. <ide> func GetHTTPErrorStatusCode(err error) int { <ide> if err == nil { <ide> func GetHTTPErrorStatusCode(err error) int { <ide> <ide> // Note that the below functions are already checking the error causal chain for matches. <ide> switch { <del> case errdefs.IsNotFound(err): <add> case IsNotFound(err): <ide> statusCode = http.StatusNotFound <del> case errdefs.IsInvalidParameter(err): <add> case IsInvalidParameter(err): <ide> statusCode = http.StatusBadRequest <del> case errdefs.IsConflict(err) || errdefs.IsAlreadyExists(err): <add> case IsConflict(err) || IsAlreadyExists(err): <ide> statusCode = http.StatusConflict <del> case errdefs.IsUnauthorized(err): <add> case IsUnauthorized(err): <ide> statusCode = http.StatusUnauthorized <del> case errdefs.IsUnavailable(err): <add> case IsUnavailable(err): <ide> statusCode = http.StatusServiceUnavailable <del> case errdefs.IsForbidden(err): <add> case IsForbidden(err): <ide> statusCode = http.StatusForbidden <del> case errdefs.IsNotModified(err): <add> case IsNotModified(err): <ide> statusCode = http.StatusNotModified <del> case errdefs.IsNotImplemented(err): <add> case IsNotImplemented(err): <ide> statusCode = http.StatusNotImplemented <del> case errdefs.IsSystem(err) || errdefs.IsUnknown(err) || errdefs.IsDataLoss(err) || errdefs.IsDeadline(err) || errdefs.IsCancelled(err): <add> case IsSystem(err) || IsUnknown(err) || IsDataLoss(err) || IsDeadline(err) || IsCancelled(err): <ide> statusCode = http.StatusInternalServerError <ide> default: <ide> statusCode = statusCodeFromGRPCError(err) <ide> func GetHTTPErrorStatusCode(err error) int { <ide> return statusCode <ide> } <ide> <del>// FromStatusCode creates an errdef error, based on the provided status-code <add>// FromStatusCode creates an errdef error, based on the provided HTTP status-code <ide> func FromStatusCode(err error, statusCode int) error { <ide> if err == nil { <ide> return err <ide> } <ide> switch statusCode { <ide> case http.StatusNotFound: <del> err = errdefs.NotFound(err) <add> err = NotFound(err) <ide> case http.StatusBadRequest: <del> err = errdefs.InvalidParameter(err) <add> err = InvalidParameter(err) <ide> case http.StatusConflict: <del> err = errdefs.Conflict(err) <add> err = Conflict(err) <ide> case http.StatusUnauthorized: <del> err = errdefs.Unauthorized(err) <add> err = Unauthorized(err) <ide> case http.StatusServiceUnavailable: <del> err = errdefs.Unavailable(err) <add> err = Unavailable(err) <ide> case http.StatusForbidden: <del> err = errdefs.Forbidden(err) <add> err = Forbidden(err) <ide> case http.StatusNotModified: <del> err = errdefs.NotModified(err) <add> err = NotModified(err) <ide> case http.StatusNotImplemented: <del> err = errdefs.NotImplemented(err) <add> err = NotImplemented(err) <ide> case http.StatusInternalServerError: <del> if !errdefs.IsSystem(err) && !errdefs.IsUnknown(err) && !errdefs.IsDataLoss(err) && !errdefs.IsDeadline(err) && !errdefs.IsCancelled(err) { <del> err = errdefs.System(err) <add> if !IsSystem(err) && !IsUnknown(err) && !IsDataLoss(err) && !IsDeadline(err) && !IsCancelled(err) { <add> err = System(err) <ide> } <ide> default: <ide> logrus.WithFields(logrus.Fields{ <ide> func FromStatusCode(err error, statusCode int) error { <ide> case statusCode >= 200 && statusCode < 400: <ide> // it's a client error <ide> case statusCode >= 400 && statusCode < 500: <del> err = errdefs.InvalidParameter(err) <add> err = InvalidParameter(err) <ide> case statusCode >= 500 && statusCode < 600: <del> err = errdefs.System(err) <add> err = System(err) <ide> default: <del> err = errdefs.Unknown(err) <add> err = Unknown(err) <ide> } <ide> } <ide> return err <ide> } <ide> <del>func apiVersionSupportsJSONErrors(version string) bool { <del> const firstAPIVersionWithJSONErrors = "1.23" <del> return version == "" || versions.GreaterThan(version, firstAPIVersionWithJSONErrors) <del>} <del> <del>// MakeErrorHandler makes an HTTP handler that decodes a Docker error and <del>// returns it in the response. <del>func MakeErrorHandler(err error) http.HandlerFunc { <del> return func(w http.ResponseWriter, r *http.Request) { <del> statusCode := GetHTTPErrorStatusCode(err) <del> vars := mux.Vars(r) <del> if apiVersionSupportsJSONErrors(vars["version"]) { <del> response := &types.ErrorResponse{ <del> Message: err.Error(), <del> } <del> WriteJSON(w, statusCode, response) <del> } else { <del> http.Error(w, status.Convert(err).Message(), statusCode) <del> } <del> } <del>} <del> <ide> // statusCodeFromGRPCError returns status code according to gRPC error <ide> func statusCodeFromGRPCError(err error) int { <ide> switch status.Code(err) { <add><path>errdefs/http_helpers_test.go <del><path>api/server/httputils/errors_test.go <del>package httputils <add>package errdefs <ide> <ide> import ( <ide> "fmt" <ide> "net/http" <ide> "testing" <ide> <del> "github.com/docker/docker/errdefs" <ide> "gotest.tools/assert" <ide> ) <ide> <ide> func TestFromStatusCode(t *testing.T) { <ide> { <ide> err: testErr, <ide> status: http.StatusNotFound, <del> check: errdefs.IsNotFound, <add> check: IsNotFound, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusBadRequest, <del> check: errdefs.IsInvalidParameter, <add> check: IsInvalidParameter, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusConflict, <del> check: errdefs.IsConflict, <add> check: IsConflict, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusUnauthorized, <del> check: errdefs.IsUnauthorized, <add> check: IsUnauthorized, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusServiceUnavailable, <del> check: errdefs.IsUnavailable, <add> check: IsUnavailable, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusForbidden, <del> check: errdefs.IsForbidden, <add> check: IsForbidden, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusNotModified, <del> check: errdefs.IsNotModified, <add> check: IsNotModified, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusNotImplemented, <del> check: errdefs.IsNotImplemented, <add> check: IsNotImplemented, <ide> }, <ide> { <ide> err: testErr, <ide> status: http.StatusInternalServerError, <del> check: errdefs.IsSystem, <add> check: IsSystem, <ide> }, <ide> { <del> err: errdefs.Unknown(testErr), <add> err: Unknown(testErr), <ide> status: http.StatusInternalServerError, <del> check: errdefs.IsUnknown, <add> check: IsUnknown, <ide> }, <ide> { <del> err: errdefs.DataLoss(testErr), <add> err: DataLoss(testErr), <ide> status: http.StatusInternalServerError, <del> check: errdefs.IsDataLoss, <add> check: IsDataLoss, <ide> }, <ide> { <del> err: errdefs.Deadline(testErr), <add> err: Deadline(testErr), <ide> status: http.StatusInternalServerError, <del> check: errdefs.IsDeadline, <add> check: IsDeadline, <ide> }, <ide> { <del> err: errdefs.Cancelled(testErr), <add> err: Cancelled(testErr), <ide> status: http.StatusInternalServerError, <del> check: errdefs.IsCancelled, <add> check: IsCancelled, <ide> }, <ide> } <ide>
8
Ruby
Ruby
fix `page_headers` method for multiple headers
10b5548eace342387afc97f5a809792dbd94b4b5
<ide><path>Library/Homebrew/livecheck/strategy.rb <ide> def from_url(url, livecheck_strategy: nil, regex_provided: nil) <ide> <ide> def self.page_headers(url) <ide> @headers ||= {} <del> @headers[url] ||= curl_output("--head", "--location", url).stdout <del> .split("\r\n\r\n", 2).first <del> .split("\r\n").drop(1) <del> .map { |header| header.split(/:\s*/, 2) } <del> .to_h.transform_keys(&:downcase) <add> <add> return @headers[url] if @headers.key?(url) <add> <add> stdout, _, status = curl_output( <add> "--head", "--request", "GET", "--silent", "--location", <add> "--connect-timeout", 5, "--retry-max-time", 15, "--max-time", 10, <add> url <add> ) <add> return {} unless status.success? <add> <add> headers = {} <add> <add> while stdout.match?(/\AHTTP.*\r$/) <add> h, stdout = stdout.split("\r\n\r\n", 2) <add> <add> headers = headers.merge(h.split("\r\n").drop(1) <add> .map { |header| header.split(/:\s*/, 2) } <add> .to_h.transform_keys(&:downcase)) <add> end <add> <add> @headers[url] = headers <ide> end <ide> <ide> def self.page_contents(url)
1
Python
Python
add v3 info to project docs [ci skip]
a4c51f0f18e0eea6d994b53de9571e07fa077ded
<ide><path>spacy/cli/project/document.py <ide> <ide> <ide> DOCS_URL = "https://nightly.spacy.io" <add>INTRO = f"""> ⚠️ This project template uses the new [**spaCy v3.0**]({DOCS_URL}), which <add>> is currently available as a nightly pre-release. You can install it from pip as `spacy-nightly`: <add>> `pip install spacy-nightly`. Make sure to use a fresh virtual environment.""" <ide> INTRO_PROJECT = f"""The [`{PROJECT_FILE}`]({PROJECT_FILE}) defines the data assets required by the <ide> project, as well as the available commands and workflows. For details, see the <ide> [spaCy projects documentation]({DOCS_URL}/usage/projects).""" <ide> def project_document( <ide> title = config.get("title") <ide> description = config.get("description") <ide> md.add(md.title(1, f"spaCy Project{f': {title}' if title else ''}", "🪐")) <add> md.add(INTRO) <ide> if description: <ide> md.add(description) <ide> md.add(md.title(2, PROJECT_FILE, "📋"))
1
PHP
PHP
remove check for items in file cache driver
de19b8db40190804e0703c3c8254cd39c9961fd1
<ide><path>system/cache/driver/file.php <ide> public function has($key) <ide> */ <ide> public function get($key) <ide> { <del> if (array_key_exists($key, $this->items)) <del> { <del> return $this->items[$key]; <del> } <del> <ide> if ( ! file_exists(APP_PATH.'storage/cache/'.$key)) <ide> { <ide> return null;
1
Go
Go
handle push/pull of repositories
11c4294846b55914ec5c5b475012a2234be9a9ff
<ide><path>auth/auth.go <ide> import ( <ide> const CONFIGFILE = "/var/lib/docker/.dockercfg" <ide> <ide> // the registry server we want to login against <del>const REGISTRY_SERVER = "https://registry.docker.io" <add>//const REGISTRY_SERVER = "https://registry.docker.io" <add>const REGISTRY_SERVER = "http://192.168.56.1:5000" <ide> <ide> type AuthConfig struct { <ide> Username string `json:"username"` <ide><path>commands.go <ide> func (srv *Server) CmdImport(stdin io.ReadCloser, stdout io.Writer, args ...stri <ide> } <ide> <ide> func (srv *Server) CmdPush(stdin io.ReadCloser, stdout io.Writer, args ...string) error { <del> cmd := rcli.Subcmd(stdout, "push", "[OPTIONS] IMAGE", "Push an image to the registry") <add> cmd := rcli.Subcmd(stdout, "push", "[OPTIONS] IMAGE", "Push an image or a repository to the registry") <add> user := cmd.String("u", "", "specify the user for the repository") <ide> if err := cmd.Parse(args); err != nil { <ide> return nil <ide> } <ide> func (srv *Server) CmdPush(stdin io.ReadCloser, stdout io.Writer, args ...string <ide> return nil <ide> } <ide> <add> // Try to get the image <add> // FIXME: Handle lookup <add> // FIXME: Also push the tags in case of ./docker push myrepo:mytag <add> // img, err := srv.runtime.LookupImage(cmd.Arg(0)) <ide> img, err := srv.runtime.graph.Get(cmd.Arg(0)) <ide> if err != nil { <del> return err <add> if *user == "" { <add> return fmt.Errorf("Not logged in and no user specified\n") <add> } <add> // If it fails, try to get the repository <add> if repo, exists := srv.runtime.repositories.Repositories[cmd.Arg(0)]; exists { <add> if err := srv.runtime.graph.PushRepository(*user, cmd.Arg(0), repo); err != nil { <add> return err <add> } <add> } else { <add> return err <add> } <add> return nil <ide> } <del> // FIXME: Handle repositories, etc. Not jist images <ide> return srv.runtime.graph.PushImage(img) <ide> } <ide> <ide> func (srv *Server) CmdPull(stdin io.ReadCloser, stdout io.Writer, args ...string) error { <del> cmd := rcli.Subcmd(stdout, "pull", "[OPTIONS] IMAGE", "Pull an image from the registry") <add> cmd := rcli.Subcmd(stdout, "pull", "[OPTIONS] IMAGE", "Pull an image or a repository from the registry") <add> user := cmd.String("u", "", "specify the user for the repository") <ide> if err := cmd.Parse(args); err != nil { <ide> return nil <ide> } <ide> if cmd.NArg() == 0 { <ide> cmd.Usage() <ide> return nil <ide> } <del> // FIXME: Handle repositories, etc. Not jist images <del> return srv.runtime.graph.PullImage(cmd.Arg(0)) <add> <add> if srv.runtime.graph.LookupRemoteImage(cmd.Arg(0)) { <add> return srv.runtime.graph.PullImage(cmd.Arg(0)) <add> } <add> if *user == "" { <add> return fmt.Errorf("Not loggin and no user specified\n") <add> } <add> // FIXME: Allow pull repo:tag <add> return srv.runtime.graph.PullRepository(*user, cmd.Arg(0), "", srv.runtime.repositories) <ide> } <ide> <ide> func (srv *Server) CmdImages(stdin io.ReadCloser, stdout io.Writer, args ...string) error { <ide><path>registry.go <ide> package docker <ide> import ( <ide> "encoding/json" <ide> "fmt" <add> "github.com/dotcloud/docker/auth" <ide> "io" <ide> "io/ioutil" <ide> "net/http" <ide> import ( <ide> <ide> //FIXME: Set the endpoint in a conf file or via commandline <ide> //const REGISTRY_ENDPOINT = "http://registry-creack.dotcloud.com/v1" <del>const REGISTRY_ENDPOINT = "http://192.168.56.1:5000/v1" <add>const REGISTRY_ENDPOINT = auth.REGISTRY_SERVER + "/v1" <ide> <ide> // Build an Image object from raw json data <ide> func NewImgJson(src []byte) (*Image, error) { <ide> func (graph *Graph) getRemoteHistory(imgId string) ([]*Image, error) { <ide> return history, nil <ide> } <ide> <add>// Check if an image exists in the Registry <add>func (graph *Graph) LookupRemoteImage(imgId string) bool { <add> res, err := http.Get(REGISTRY_ENDPOINT + "/images/" + imgId + "/json") <add> if err != nil { <add> return false <add> } <add> return res.StatusCode == 307 <add>} <add> <ide> // Retrieve an image from the Registry. <ide> // Returns the Image object as well as the layer as an Archive (io.Reader) <ide> func (graph *Graph) getRemoteImage(imgId string) (*Image, Archive, error) { <ide> func (graph *Graph) PullImage(imgId string) error { <ide> return nil <ide> } <ide> <add>// FIXME: Handle the askedTag parameter <add>func (graph *Graph) PullRepository(user, repoName, askedTag string, repositories *TagStore) error { <add> client := &http.Client{} <add> <add> req, err := http.NewRequest("GET", REGISTRY_ENDPOINT+"/users/"+user+"/"+repoName, nil) <add> if err != nil { <add> return err <add> } <add> authStruct, err := auth.LoadConfig() <add> if err != nil { <add> return err <add> } <add> <add> req.SetBasicAuth(authStruct.Username, authStruct.Password) <add> res, err := client.Do(req) <add> if err != nil { <add> return err <add> } <add> rawJson, err := ioutil.ReadAll(res.Body) <add> if err != nil { <add> return err <add> } <add> t := map[string]string{} <add> if err = json.Unmarshal(rawJson, &t); err != nil { <add> return err <add> } <add> for tag, rev := range t { <add> if err = graph.PullImage(rev); err != nil { <add> return err <add> } <add> if err = repositories.Set(repoName, tag, rev); err != nil { <add> return err <add> } <add> } <add> if err = repositories.Save(); err != nil { <add> return err <add> } <add> return nil <add>} <add> <ide> // Push a local image to the registry with its history if needed <ide> func (graph *Graph) PushImage(imgOrig *Image) error { <ide> client := &http.Client{} <ide> func (graph *Graph) PushImage(imgOrig *Image) error { <ide> if err != nil { <ide> return fmt.Errorf("Error while retreiving the path for {%s}: %s", img.Id, err) <ide> } <del> // FIXME: try json with URF8 <add> // FIXME: try json with UTF8 <ide> jsonData := strings.NewReader(string(jsonRaw)) <ide> req, err := http.NewRequest("PUT", REGISTRY_ENDPOINT+"/images/"+img.Id+"/json", jsonData) <add> if err != nil { <add> return err <add> } <ide> res, err := client.Do(req) <ide> if err != nil || res.StatusCode != 200 { <ide> if res == nil { <ide> func (graph *Graph) PushImage(imgOrig *Image) error { <ide> } <ide> return nil <ide> } <add> <add>func (graph *Graph) pushTag(user, repo, revision, tag string) error { <add> <add> if tag == "" { <add> tag = "lastest" <add> } <add> <add> revision = "\"" + revision + "\"" <add> <add> client := &http.Client{} <add> req, err := http.NewRequest("PUT", REGISTRY_ENDPOINT+"/users/"+user+"/"+repo+"/"+tag, strings.NewReader(revision)) <add> req.Header.Add("Content-type", "application/json") <add> res, err := client.Do(req) <add> if err != nil { <add> return err <add> } <add> fmt.Printf("Result of push tag: %d\n", res.StatusCode) <add> switch res.StatusCode { <add> default: <add> return fmt.Errorf("Error %d\n", res.StatusCode) <add> case 200: <add> case 201: <add> } <add> return nil <add>} <add> <add>func (graph *Graph) PushRepository(user, repoName string, repo Repository) error { <add> for tag, imgId := range repo { <add> fmt.Printf("tag: %s, imgId: %s\n", tag, imgId) <add> img, err := graph.Get(imgId) <add> if err != nil { <add> return err <add> } <add> if err = graph.PushImage(img); err != nil { <add> return err <add> } <add> if err = graph.pushTag(user, repoName, imgId, tag); err != nil { <add> return err <add> } <add> } <add> return nil <add>}
3
PHP
PHP
clear` command fails
ad670c8423a5fdfc929644f63db1b7ca39a8ba59
<ide><path>src/Illuminate/Cache/Console/ClearCommand.php <ide> public function handle() <ide> 'cache:clearing', [$this->argument('store'), $this->tags()] <ide> ); <ide> <del> $this->cache()->flush(); <add> if (! $this->cache()->flush()) { <add> return $this->error('Failed to clear cache. Make sure you have appropriate rights.'); <add> } <ide> <ide> $this->flushFacades(); <ide>
1
Ruby
Ruby
remove duplicates after autoloading modules
d92fb27885ddcb0a92ac67f69bf0eb8c912f4dc7
<ide><path>activesupport/lib/active_support/dependencies.rb <ide> def autoload_module!(into, const_name, qualified_name, path_suffix) <ide> mod = Module.new <ide> into.const_set const_name, mod <ide> autoloaded_constants << qualified_name unless autoload_once_paths.include?(base_path) <add> autoloaded_constants.uniq! <ide> mod <ide> end <ide>
1
Javascript
Javascript
fix incorrect copy-paste in test
08e69f65b44f5e5501c796be6f7b272441ebd0e6
<ide><path>packages/react-dom/src/events/__tests__/DOMPluginEventSystem-test.internal.js <ide> describe('DOMPluginEventSystem', () => { <ide> <div ref={middleDivRef}> <ide> <div <ide> ref={divRef} <del> onClick={onFocus} <del> onClickCapture={onFocusCapture} <add> onFocus={onFocus} <add> onFocusCapture={onFocusCapture} <ide> tabIndex={0}> <ide> Click me! <ide> </div> <ide> describe('DOMPluginEventSystem', () => { <ide> React.useLayoutEffect(() => { <ide> // This should prevent the portalElement listeners from <ide> // capturing the events in the bubble phase. <del> middleDivRef.current.addEventListener('click', e => { <add> middleDivRef.current.addEventListener('focusin', e => { <ide> e.stopPropagation(); <ide> }); <ide> }); <ide> describe('DOMPluginEventSystem', () => { <ide> const divElement = divRef.current; <ide> divElement.focus(); <ide> expect(onFocus).toHaveBeenCalledTimes(1); <del> expect(onFocusCapture).toHaveBeenCalledTimes(1); <add> expect(onFocusCapture).toHaveBeenCalledTimes(3); <ide> <ide> document.body.removeChild(portalElement); <ide> });
1
Text
Text
limit markdown file to 80 chars per line
84df9351b07f2384ee0f49cce8a789f1adbccbac
<ide><path>official/vision/beta/projects/yolo/README.md <ide> [![Paper](http://img.shields.io/badge/Paper-arXiv.1804.02767-B3181B?logo=arXiv)](https://arxiv.org/abs/1804.02767) <ide> [![Paper](http://img.shields.io/badge/Paper-arXiv.2004.10934-B3181B?logo=arXiv)](https://arxiv.org/abs/2004.10934) <ide> <del>This repository is the unofficial implementation of the following papers. However, we spent painstaking hours ensuring that every aspect that we constructed was the exact same as the original paper and the original repository. <add>This repository is the unofficial implementation of the following papers. <add>However, we spent painstaking hours ensuring that every aspect that we <add>constructed was the exact same as the original paper and the original <add>repository. <ide> <ide> * YOLOv3: An Incremental Improvement: [YOLOv3: An Incremental Improvement](https://arxiv.org/abs/1804.02767) <ide> <ide> This repository is the unofficial implementation of the following papers. Howeve <ide> <ide> > * need to add run code for demos <ide> <del>Yolo v1 the original implementation was released in 2015 providing a ground breaking algorithm that would quickly process images, and locate objects in a single pass through the detector. The original implementation based used a backbone derived from state of the art object classifier of the time, like [GoogLeNet](https://arxiv.org/abs/1409.4842) and [VGG](https://arxiv.org/abs/1409.1556). More attention was given to the novel Yolo Detection head that allowed for Object Detection with a single pass of an image. Though limited, the network could predict up to 90 bounding boxes per image, and was tested for about 80 classes per box. Also, the model could only make prediction at one scale. These attributes caused yolo v1 to be more limited, and less versatile, so as the year passed, the Developers continued to update and develop this model. <del> <del>Yolo v3 and v4 serve as the most up to date and capable versions of the Yolo network group. These model uses a custom backbone called Darknet53 that uses knowledge gained from the ResNet paper to improve its predictions. The new backbone also allows for objects to be detected at multiple scales. As for the new detection head, the model now predicts the bounding boxes using a set of anchor box priors (Anchor Boxes) as suggestions. The multiscale predictions in combination with the Anchor boxes allows for the network to make up to 1000 object predictions on a single image. Finally, the new loss function forces the network to make better prediction by using Intersection Over Union (IOU) to inform the model's confidence rather than relying on the mean squared error for the entire output. <add>Yolo v1 the original implementation was released in 2015 providing a ground <add>breaking algorithm that would quickly process images, and locate objects in a <add>single pass through the detector. The original implementation based used a <add>backbone derived from state of the art object classifier of the time, like <add>[GoogLeNet](https://arxiv.org/abs/1409.4842) and <add>[VGG](https://arxiv.org/abs/1409.1556). More attention was given to the novel <add>Yolo Detection head that allowed for Object Detection with a single pass of an <add>image. Though limited, the network could predict up to 90 bounding boxes per <add>image, and was tested for about 80 classes per box. Also, the model could only <add>make prediction at one scale. These attributes caused yolo v1 to be more <add>limited, and less versatile, so as the year passed, the Developers continued to <add>update and develop this model. <add> <add>Yolo v3 and v4 serve as the most up to date and capable versions of the Yolo <add>network group. These model uses a custom backbone called Darknet53 that uses <add>knowledge gained from the ResNet paper to improve its predictions. The new <add>backbone also allows for objects to be detected at multiple scales. As for the <add>new detection head, the model now predicts the bounding boxes using a set of <add>anchor box priors (Anchor Boxes) as suggestions. The multiscale predictions in <add>combination with the Anchor boxes allows for the network to make up to 1000 <add>object predictions on a single image. Finally, the new loss function forces the <add>network to make better prediction by using Intersection Over Union (IOU) to <add>inform the model's confidence rather than relying on the mean squared error for <add>the entire output. <ide> <ide> ## Authors <ide> <ide> Yolo v3 and v4 serve as the most up to date and capable versions of the Yolo net <ide> <ide> ## Our Goal <ide> <del>Our goal with this model conversion is to provide implementations of the Backbone and Yolo Head. We have built the model in such a way that the Yolo head could be connected to a new, more powerful backbone if a person chose to. <add>Our goal with this model conversion is to provide implementations of the <add>Backbone and Yolo Head. We have built the model in such a way that the Yolo <add>head could be connected to a new, more powerful backbone if a person chose to. <ide> <ide> ## Models in the library <ide>
1
Python
Python
move get_non_empty_box_indices to box_utils
48693cad45ed57441ee2b4a3f77f355102e2b9ee
<ide><path>official/vision/detection/dataloader/maskrcnn_parser.py <ide> def _parse_train_data(self, data): <ide> boxes, image_scale, (image_height, image_width), offset) <ide> <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> classes = tf.gather(classes, indices) <ide> if self._include_mask: <ide><path>official/vision/detection/dataloader/retinanet_parser.py <ide> def _parse_train_data(self, data): <ide> boxes = input_utils.resize_and_crop_boxes( <ide> boxes, image_scale, (image_height, image_width), offset) <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> classes = tf.gather(classes, indices) <ide> <ide> def _parse_eval_data(self, data): <ide> boxes = input_utils.resize_and_crop_boxes( <ide> boxes, image_scale, (image_height, image_width), offset) <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> classes = tf.gather(classes, indices) <ide> <ide> def _parse_predict_data(self, data): <ide> boxes = input_utils.resize_and_crop_boxes( <ide> boxes, image_scale, (image_height, image_width), offset) <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> <ide> # Assigns anchors. <ide><path>official/vision/detection/dataloader/shapemask_parser.py <ide> def _parse_train_data(self, data): <ide> boxes, image_scale, self._output_size, offset) <ide> <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> classes = tf.gather(classes, indices) <ide> masks = tf.gather(masks, indices) <ide> def _parse_predict_data(self, data): <ide> tf.expand_dims(masks, axis=-1), image_scale, self._output_size, offset) <ide> <ide> # Filters out ground truth boxes that are all zeros. <del> indices = input_utils.get_non_empty_box_indices(boxes) <add> indices = box_utils.get_non_empty_box_indices(boxes) <ide> boxes = tf.gather(boxes, indices) <ide> classes = tf.gather(classes, indices) <ide> <ide><path>official/vision/detection/utils/box_utils.py <ide> def bbox_overlap(boxes, gt_boxes): <ide> iou = tf.where(padding_mask, -tf.ones_like(iou), iou) <ide> <ide> return iou <add> <add> <add>def get_non_empty_box_indices(boxes): <add> """Get indices for non-empty boxes.""" <add> # Selects indices if box height or width is 0. <add> height = boxes[:, 2] - boxes[:, 0] <add> width = boxes[:, 3] - boxes[:, 1] <add> indices = tf.where(tf.logical_and(tf.greater(height, 0), <add> tf.greater(width, 0))) <add> return indices[:, 0] <ide><path>official/vision/detection/utils/input_utils.py <ide> def resize_and_crop_masks(masks, <ide> def random_horizontal_flip(image, boxes=None, masks=None): <ide> """Randomly flips input image and bounding boxes.""" <ide> return preprocessor.random_horizontal_flip(image, boxes, masks) <del> <del> <del>def get_non_empty_box_indices(boxes): <del> """Get indices for non-empty boxes.""" <del> # Selects indices if box height or width is 0. <del> height = boxes[:, 2] - boxes[:, 0] <del> width = boxes[:, 3] - boxes[:, 1] <del> indices = tf.where(tf.logical_and(tf.greater(height, 0), <del> tf.greater(width, 0))) <del> return indices[:, 0]
5
Javascript
Javascript
fix scripting test related to keystroke event
880ac6037c91b62b2c6c75b0112eefc0540c5bab
<ide><path>test/unit/scripting_spec.js <ide> describe("Scripting", function () { <ide> value: "3F?", <ide> change: "0", <ide> name: "Keystroke", <del> willCommit: true, <add> willCommit: false, <ide> selStart: 3, <ide> selEnd: 3, <ide> }); <ide> expect(send_queue.has(refId)).toEqual(false); <add> <add> await sandbox.dispatchEventInSandbox({ <add> id: refId, <add> value: "3F?0", <add> name: "Keystroke", <add> willCommit: true, <add> selStart: 4, <add> selEnd: 4, <add> }); <add> expect(send_queue.has(refId)).toEqual(true); <add> expect(send_queue.get(refId)).toEqual({ <add> id: refId, <add> valueAsString: "3F?0", <add> }); <ide> }); <ide> }); <ide>
1
Javascript
Javascript
fix redundant styles on refreshcontrol for android
39c18186e1a5bcc4cc5c5f42a9193c1a8b86cd30
<ide><path>Libraries/Components/ScrollView/ScrollView.js <ide> const ScrollView = React.createClass({ <ide> // On Android wrap the ScrollView with a AndroidSwipeRefreshLayout. <ide> // Since the ScrollView is wrapped add the style props to the <ide> // AndroidSwipeRefreshLayout and use flex: 1 for the ScrollView. <add> // Note: we should only apply props.style on the wrapper <add> // however, the ScrollView still needs the baseStyle to be scrollable <add> <ide> return React.cloneElement( <ide> refreshControl, <ide> {style: props.style}, <del> <ScrollViewClass {...props} ref={this._setScrollViewRef}> <add> <ScrollViewClass {...props} style={baseStyle} ref={this._setScrollViewRef}> <ide> {contentContainer} <ide> </ScrollViewClass> <ide> );
1
Javascript
Javascript
add missing article
c414ece5cf9749dd6924b4b00cb5773d2d7088bb
<ide><path>src/ng/sce.js <ide> function $SceDelegateProvider() { <ide> * You can ensure your document is in standards mode and not quirks mode by adding `<!doctype html>` <ide> * to the top of your HTML document. <ide> * <del> * SCE assists in writing code in way that (a) is secure by default and (b) makes auditing for <add> * SCE assists in writing code in a way that (a) is secure by default and (b) makes auditing for <ide> * security vulnerabilities such as XSS, clickjacking, etc. a lot easier. <ide> * <ide> * Here's an example of a binding in a privileged context:
1
Text
Text
remove outdated documentation for completion (#99)
e0264302c7cca1b6b472aba912bfe100489bd698
<ide><path>share/doc/homebrew/Tips-N'-Tricks.md <ide> Use `brew info $FORMULA` to check what versions are installed but not currently <ide> ./configure --prefix=/usr/local/Cellar/foo/1.2 && make && make install && brew link foo <ide> ``` <ide> <del>## Command tab-completion <del> <del>### Bash <del>Add to your `~/.bashrc` or `~/.bash_profile` (whichever you have configured to run on shell startup): <del> <del>```bash <del>source $(brew --repository)/Library/Contributions/brew_bash_completion.sh <del>``` <del> <del>### Zsh <del>Run in terminal (may require `sudo`): <del> <del>```zsh <del>ln -s "$(brew --prefix)/Library/Contributions/brew_zsh_completion.zsh" /usr/local/share/zsh/site-functions/_brew <del>``` <del> <ide> ## Pre-downloading a file for a formula <ide> <ide> Sometimes it's faster to download a file via means other than those
1
Ruby
Ruby
relocate virtualenv orig-prefix.txt
22d3a67b73214727bf2deed06f90799e688c6af0
<ide><path>Library/Homebrew/keg_relocate.rb <ide> def text_files <ide> files = Set.new path.find.reject { |pn| <ide> next true if pn.symlink? <ide> next true if pn.directory? <add> next false if pn.basename.to_s == "orig-prefix.txt" # for python virtualenvs <ide> next true if Metafiles::EXTENSIONS.include?(pn.extname) <ide> if pn.text_executable? <ide> text_files << pn
1
Text
Text
fix typo in doc
3af40ba58457e5000fe0c2c07a6eb99fa27dc4b3
<ide><path>docs/sources/layers/core.md <ide> keras.layers.core.ActivityRegularization(l1=0., l2=0.) <ide> <ide> Leaves the input unchanged, but adds a term to the loss function based on the input activity. L1 and L2 regularization supported. <ide> <del>This layer can be use, for instance, to induce activation sparsity in the previous layer. <add>This layer can be used, for instance, to induce activation sparsity in the previous layer. <ide> <ide> --- <ide>
1
Python
Python
fix mypy errors for databricks provider.
cad39274d9a8eceba2845dc39e8c870959746478
<ide><path>airflow/providers/databricks/hooks/databricks.py <ide> """ <ide> import time <ide> from time import sleep <add>from typing import Dict <ide> from urllib.parse import urlparse <ide> <ide> import requests <ide> def __init__( <ide> ) -> None: <ide> super().__init__() <ide> self.databricks_conn_id = databricks_conn_id <del> self.databricks_conn = None <ide> self.timeout_seconds = timeout_seconds <ide> if retry_limit < 1: <ide> raise ValueError('Retry limit must be greater than equal to 1') <ide> self.retry_limit = retry_limit <ide> self.retry_delay = retry_delay <del> self.aad_tokens = {} <add> self.aad_tokens: Dict[str, dict] = {} <ide> self.aad_timeout_seconds = 10 <add> self.databricks_conn = self.get_connection(self.databricks_conn_id) <add> if 'host' in self.databricks_conn.extra_dejson: <add> self.host = self._parse_host(self.databricks_conn.extra_dejson['host']) <add> else: <add> self.host = self._parse_host(self.databricks_conn.host) <ide> <ide> @staticmethod <ide> def _parse_host(host: str) -> str: <ide> def _get_aad_token(self, resource: str) -> str: <ide> f'Response: {e.response.content}, Status Code: {e.response.status_code}' <ide> ) <ide> <del> self._log_request_error(attempt_num, e) <add> self._log_request_error(attempt_num, e.strerror) <ide> <ide> if attempt_num == self.retry_limit: <ide> raise AirflowException(f'API requests to Azure failed {self.retry_limit} times. Giving up.') <ide> def _do_api_call(self, endpoint_info, json): <ide> """ <ide> method, endpoint = endpoint_info <ide> <del> if self.databricks_conn is None: <del> self.databricks_conn = self.get_connection(self.databricks_conn_id) <del> <del> if 'host' in self.databricks_conn.extra_dejson: <del> self.host = self._parse_host(self.databricks_conn.extra_dejson['host']) <del> else: <del> self.host = self._parse_host(self.databricks_conn.host) <del> <ide> url = f'https://{self.host}/{endpoint}' <ide> <ide> aad_headers = self._get_aad_headers()
1
Ruby
Ruby
use one 'be'
9ff2e928ef90411c5bd5dc6a61dfe735f3002cb1
<ide><path>activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb <ide> def create_table(table_name, options = {}) <ide> end <ide> <ide> # Creates a new join table with the name created using the lexical order of the first two <del> # arguments. These arguments can be be a String or a Symbol. <add> # arguments. These arguments can be a String or a Symbol. <ide> # <ide> # # Creates a table called 'assemblies_parts' with no id. <ide> # create_join_table(:assemblies, :parts)
1
PHP
PHP
add typehints to collection classes
a0c8826dafa33b8db980d5a1e8e166973fcf8762
<ide><path>src/Collection/Collection.php <ide> public function unserialize($collection) <ide> * <ide> * @return int <ide> */ <del> public function count() <add> public function count(): int <ide> { <ide> $traversable = $this->optimizeUnwrap(); <ide> <ide> public function count() <ide> * <ide> * @return int <ide> */ <del> public function countKeys() <add> public function countKeys(): int <ide> { <ide> return count($this->toArray()); <ide> } <ide><path>src/Collection/CollectionInterface.php <ide> <ide> use Iterator; <ide> use JsonSerializable; <add>use Traversable; <ide> <ide> /** <ide> * Describes the methods a Collection should implement. A collection is an immutable <ide> interface CollectionInterface extends Iterator, JsonSerializable <ide> * in this collection <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function each(callable $c); <add> public function each(callable $c): CollectionInterface; <ide> <ide> /** <ide> * Looks through each value in the collection, and returns another collection with <ide> public function each(callable $c); <ide> * If left null, a callback that filters out falsey values will be used. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function filter(callable $c = null); <add> public function filter(callable $c = null): CollectionInterface; <ide> <ide> /** <ide> * Looks through each value in the collection, and returns another collection with <ide> public function filter(callable $c = null); <ide> * returns true whether or not they should be out of the resulting collection. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function reject(callable $c); <add> public function reject(callable $c): CollectionInterface; <ide> <ide> /** <ide> * Returns true if all values in this collection pass the truth test provided <ide> public function reject(callable $c); <ide> * @return bool true if for all elements in this collection the provided <ide> * callback returns true, false otherwise. <ide> */ <del> public function every(callable $c); <add> public function every(callable $c): bool; <ide> <ide> /** <ide> * Returns true if any of the values in this collection pass the truth test <ide> public function every(callable $c); <ide> * @return bool true if the provided callback returns true for any element in this <ide> * collection, false otherwise <ide> */ <del> public function some(callable $c); <add> public function some(callable $c): bool; <ide> <ide> /** <ide> * Returns true if $value is present in this collection. Comparisons are made <ide> public function some(callable $c); <ide> * @param mixed $value The value to check for <ide> * @return bool true if $value is present in this collection <ide> */ <del> public function contains($value); <add> public function contains($value): bool; <ide> <ide> /** <ide> * Returns another collection after modifying each of the values in this one using <ide> public function contains($value); <ide> * returns the new value for the key that is being iterated <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function map(callable $c); <add> public function map(callable $c): CollectionInterface; <ide> <ide> /** <ide> * Folds the values in this collection to a single value, as the result of <ide> public function reduce(callable $c, $zero = null); <ide> * inside the hierarchy of each value so that the column can be extracted. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function extract($matcher); <add> public function extract($matcher): CollectionInterface; <ide> <ide> /** <ide> * Returns the top element in this collection after being sorted by a property. <ide> public function median($matcher = null); <ide> * SORT_NUMERIC or SORT_NATURAL <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function sortBy($callback, $dir = SORT_DESC, $type = \SORT_NUMERIC); <add> public function sortBy($callback, $dir = SORT_DESC, $type = \SORT_NUMERIC): CollectionInterface; <ide> <ide> /** <ide> * Splits a collection into sets, grouped by the result of running each value <ide> public function sortBy($callback, $dir = SORT_DESC, $type = \SORT_NUMERIC); <ide> * or a function returning the grouping key out of the provided element <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function groupBy($callback); <add> public function groupBy($callback): CollectionInterface; <ide> <ide> /** <ide> * Given a list and a callback function that returns a key for each element <ide> public function groupBy($callback); <ide> * or a function returning the indexing key out of the provided element <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function indexBy($callback); <add> public function indexBy($callback): CollectionInterface; <ide> <ide> /** <ide> * Sorts a list into groups and returns a count for the number of elements <ide> public function indexBy($callback); <ide> * or a function returning the indexing key out of the provided element <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function countBy($callback); <add> public function countBy($callback): CollectionInterface; <ide> <ide> /** <ide> * Returns the total sum of all the values extracted with $matcher <ide> public function sumOf($matcher = null); <ide> * <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function shuffle(); <add> public function shuffle(): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection with maximum $size random elements <ide> public function shuffle(); <ide> * take from this collection <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function sample($size = 10); <add> public function sample($size = 10): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection with maximum $size elements in the internal <ide> public function sample($size = 10); <ide> * @param int $from A positional offset from where to take the elements <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function take($size = 1, $from = 0); <add> public function take($size = 1, $from = 0): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection that will skip the specified amount of elements <ide> public function take($size = 1, $from = 0); <ide> * @param int $howMany The number of elements to skip. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function skip($howMany); <add> public function skip($howMany): CollectionInterface; <ide> <ide> /** <ide> * Looks through each value in the list, returning a Collection of all the <ide> public function skip($howMany); <ide> * and the value the condition against with each element will be matched <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function match(array $conditions); <add> public function match(array $conditions): CollectionInterface; <ide> <ide> /** <ide> * Returns the first result matching all of the key-value pairs listed in <ide> public function last(); <ide> * @param array|\Traversable $items Items list. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function append($items); <add> public function append($items): CollectionInterface; <add> <add> /** <add> * Append a single item creating a new collection. <add> * <add> * @param mixed $item The item to append. <add> * @param mixed $key The key to append the item with. If null a key will be generated. <add> * @return \Cake\Collection\CollectionInterface <add> */ <add> public function appendItem($item, $key = null): CollectionInterface; <add> <add> /** <add> * Prepend a set of items to a collection creating a new collection <add> * <add> * @param mixed $items The items to prepend. <add> * @return \Cake\Collection\CollectionInterface <add> */ <add> public function prepend($items): CollectionInterface; <add> <add> /** <add> * Prepend a single item creating a new collection. <add> * <add> * @param mixed $item The item to prepend. <add> * @param mixed $key The key to prepend the item with. If null a key will be generated. <add> * @return \Cake\Collection\CollectionInterface <add> */ <add> public function prependItem($item, $key = null): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection where the values extracted based on a value path <ide> public function append($items); <ide> * grouping key or a function returning the key out of the provided element <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function combine($keyPath, $valuePath, $groupPath = null); <add> public function combine($keyPath, $valuePath, $groupPath = null): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection where the values are nested in a tree-like structure <ide> public function combine($keyPath, $valuePath, $groupPath = null); <ide> * @param string $nestingKey The key name under which children are nested <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function nest($idPath, $parentPath, $nestingKey = 'children'); <add> public function nest($idPath, $parentPath, $nestingKey = 'children'): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection containing each of the elements found in `$values` as <ide> public function nest($idPath, $parentPath, $nestingKey = 'children'); <ide> * values are matched with the elements in this collection by its positional index. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function insert($path, $values); <add> public function insert($path, $values): CollectionInterface; <ide> <ide> /** <ide> * Returns an array representation of the results <ide> public function insert($path, $values); <ide> * can help getting all items if keys are not important in the result. <ide> * @return array <ide> */ <del> public function toArray($preserveKeys = true); <add> public function toArray($preserveKeys = true): array; <ide> <ide> /** <ide> * Returns an numerically-indexed array representation of the results. <ide> * This is equivalent to calling `toArray(false)` <ide> * <ide> * @return array <ide> */ <del> public function toList(); <add> public function toList(): array; <ide> <ide> /** <ide> * Convert a result set into JSON. <ide> public function jsonSerialize(); <ide> * can help getting all items if keys are not important in the result. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function compile($preserveKeys = true); <add> public function compile($preserveKeys = true): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection where the operations performed by this collection. <ide> public function compile($preserveKeys = true); <ide> * <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function buffered(); <add> public function buffered(): CollectionInterface; <ide> <ide> /** <ide> * Returns a new collection with each of the elements of this collection <ide> public function buffered(); <ide> * or a callable function that will return the children list <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function listNested($dir = 'desc', $nestingKey = 'children'); <add> public function listNested($dir = 'desc', $nestingKey = 'children'): CollectionInterface; <ide> <ide> /** <ide> * Creates a new collection that when iterated will stop yielding results if <ide> public function listNested($dir = 'desc', $nestingKey = 'children'); <ide> * and the value the condition against with each element will be matched. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function stopWhen($condition); <add> public function stopWhen($condition): CollectionInterface; <ide> <ide> /** <ide> * Creates a new collection where the items are the <ide> public function stopWhen($condition); <ide> * the items in the collection and should return an array or Traversable object <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function unfold(callable $transformer = null); <add> public function unfold(callable $transformer = null): CollectionInterface; <ide> <ide> /** <ide> * Passes this collection through a callable as its first argument. <ide> public function unfold(callable $transformer = null); <ide> * this collection as first argument. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function through(callable $handler); <add> public function through(callable $handler): CollectionInterface; <ide> <ide> /** <ide> * Combines the elements of this collection with each of the elements of the <ide> public function through(callable $handler); <ide> * @param array|\Traversable ...$items The collections to zip. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function zip($items); <add> public function zip($items): CollectionInterface; <ide> <ide> /** <ide> * Combines the elements of this collection with each of the elements of the <ide> public function zip($items); <ide> * @param callable $callable The function to use for zipping the elements together. <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function zipWith($items, $callable); <add> public function zipWith($items, $callable): CollectionInterface; <ide> <ide> /** <ide> * Breaks the collection into smaller arrays of the given size. <ide> public function zipWith($items, $callable); <ide> * @param int $chunkSize The maximum size for each chunk <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function chunk($chunkSize); <add> public function chunk($chunkSize): CollectionInterface; <ide> <ide> /** <ide> * Breaks the collection into smaller arrays of the given size. <ide> public function chunk($chunkSize); <ide> * @param bool $preserveKeys If the keys of the array should be preserved <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function chunkWithKeys($chunkSize, $preserveKeys = true); <add> public function chunkWithKeys($chunkSize, $preserveKeys = true): CollectionInterface; <ide> <ide> /** <ide> * Returns whether or not there are elements in this collection <ide> public function chunkWithKeys($chunkSize, $preserveKeys = true); <ide> * <ide> * @return bool <ide> */ <del> public function isEmpty(); <add> public function isEmpty(): bool; <ide> <ide> /** <ide> * Returns the closest nested iterator that can be safely traversed without <ide> public function isEmpty(); <ide> * <ide> * @return \Traversable <ide> */ <del> public function unwrap(); <add> public function unwrap(): Traversable; <ide> <ide> /** <ide> * Transpose rows and columns into columns and rows <ide> public function unwrap(); <ide> * <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function transpose(); <add> public function transpose(): CollectionInterface; <ide> <ide> /** <ide> * Returns the amount of elements in the collection. <ide> public function transpose(); <ide> * <ide> * @return int <ide> */ <del> public function count(); <add> public function count(): int; <ide> <ide> /** <ide> * Returns the number of unique keys in this iterator. This is, the number of <ide> public function count(); <ide> * @see \Cake\Collection\CollectionInterface::count() <ide> * @return int <ide> */ <del> public function countKeys(); <add> public function countKeys(): int; <add> <add> /** <add> * Create a new collection that is the cartesian product of the current collection <add> * <add> * In order to create a carteisan product a collection must contain a single dimension <add> * of data. <add> * <add> * ### Example <add> * <add> * ``` <add> * $collection = new Collection([['A', 'B', 'C'], [1, 2, 3]]); <add> * $result = $collection->cartesianProduct()->toArray(); <add> * $expected = [ <add> * ['A', 1], <add> * ['A', 2], <add> * ['A', 3], <add> * ['B', 1], <add> * ['B', 2], <add> * ['B', 3], <add> * ['C', 1], <add> * ['C', 2], <add> * ['C', 3], <add> * ]; <add> * ``` <add> * <add> * @param callable|null $operation A callable that allows you to customize the product result. <add> * @param callable|null $filter A filtering callback that must return true for a result to be part <add> * of the final results. <add> * @return \Cake\Collection\CollectionInterface <add> */ <add> public function cartesianProduct(callable $operation = null, callable $filter = null): CollectionInterface; <ide> } <ide><path>src/Collection/CollectionTrait.php <ide> <ide> use AppendIterator; <ide> use ArrayIterator; <add>use Cake\Collection\CollectionInterface; <ide> use Cake\Collection\Iterator\BufferedIterator; <ide> use Cake\Collection\Iterator\ExtractIterator; <ide> use Cake\Collection\Iterator\FilterIterator; <ide> trait CollectionTrait <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function each(callable $c) <add> public function each(callable $c): CollectionInterface <ide> { <ide> foreach ($this->optimizeUnwrap() as $k => $v) { <ide> $c($v, $k); <ide> public function each(callable $c) <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\FilterIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function filter(callable $c = null) <add> public function filter(callable $c = null): CollectionInterface <ide> { <ide> if ($c === null) { <ide> $c = function ($v) { <ide> public function filter(callable $c = null) <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\FilterIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function reject(callable $c) <add> public function reject(callable $c): CollectionInterface <ide> { <ide> return new FilterIterator($this->unwrap(), function ($key, $value, $items) use ($c) { <ide> return !$c($key, $value, $items); <ide> public function reject(callable $c) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function every(callable $c) <add> public function every(callable $c): bool <ide> { <ide> foreach ($this->optimizeUnwrap() as $key => $value) { <ide> if (!$c($value, $key)) { <ide> public function every(callable $c) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function some(callable $c) <add> public function some(callable $c): bool <ide> { <ide> foreach ($this->optimizeUnwrap() as $key => $value) { <ide> if ($c($value, $key) === true) { <ide> public function some(callable $c) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function contains($value) <add> public function contains($value): bool <ide> { <ide> foreach ($this->optimizeUnwrap() as $v) { <ide> if ($value === $v) { <ide> public function contains($value) <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\ReplaceIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function map(callable $c) <add> public function map(callable $c): CollectionInterface <ide> { <ide> return new ReplaceIterator($this->unwrap(), $c); <ide> } <ide> public function reduce(callable $c, $zero = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function extract($matcher) <add> public function extract($matcher): CollectionInterface <ide> { <ide> $extractor = new ExtractIterator($this->unwrap(), $matcher); <ide> if (is_string($matcher) && strpos($matcher, '{*}') !== false) { <ide> public function median($matcher = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function sortBy($callback, $dir = \SORT_DESC, $type = \SORT_NUMERIC) <add> public function sortBy($callback, $dir = \SORT_DESC, $type = \SORT_NUMERIC): CollectionInterface <ide> { <ide> return new SortIterator($this->unwrap(), $callback, $dir, $type); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function groupBy($callback) <add> public function groupBy($callback): CollectionInterface <ide> { <ide> $callback = $this->_propertyExtractor($callback); <ide> $group = []; <ide> public function groupBy($callback) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function indexBy($callback) <add> public function indexBy($callback): CollectionInterface <ide> { <ide> $callback = $this->_propertyExtractor($callback); <ide> $group = []; <ide> public function indexBy($callback) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function countBy($callback) <add> public function countBy($callback): CollectionInterface <ide> { <ide> $callback = $this->_propertyExtractor($callback); <ide> <ide> public function countBy($callback) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function sumOf($matcher = null) <add> public function sumOf($matcher = null): int <ide> { <ide> if ($matcher === null) { <ide> return array_sum($this->toList()); <ide> public function sumOf($matcher = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function shuffle() <add> public function shuffle(): CollectionInterface <ide> { <ide> $elements = $this->toArray(); <ide> shuffle($elements); <ide> public function shuffle() <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function sample($size = 10) <add> public function sample($size = 10): CollectionInterface <ide> { <ide> return new Collection(new LimitIterator($this->shuffle(), 0, $size)); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function take($size = 1, $from = 0) <add> public function take($size = 1, $from = 0): CollectionInterface <ide> { <ide> return new Collection(new LimitIterator($this, $from, $size)); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function skip($howMany) <add> public function skip($howMany): CollectionInterface <ide> { <ide> return new Collection(new LimitIterator($this, $howMany)); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function match(array $conditions) <add> public function match(array $conditions): CollectionInterface <ide> { <ide> return $this->filter($this->_createMatcherFilter($conditions)); <ide> } <ide> public function last() <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function append($items) <add> public function append($items): CollectionInterface <ide> { <ide> $list = new AppendIterator(); <ide> $list->append($this->unwrap()); <ide> public function append($items) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function appendItem($item, $key = null) <add> public function appendItem($item, $key = null): CollectionInterface <ide> { <ide> if ($key !== null) { <ide> $data = [$key => $item]; <ide> public function appendItem($item, $key = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function prepend($items) <add> public function prepend($items): CollectionInterface <ide> { <ide> return (new Collection($items))->append($this); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function prependItem($item, $key = null) <add> public function prependItem($item, $key = null): CollectionInterface <ide> { <ide> if ($key !== null) { <ide> $data = [$key => $item]; <ide> public function prependItem($item, $key = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function combine($keyPath, $valuePath, $groupPath = null) <add> public function combine($keyPath, $valuePath, $groupPath = null): CollectionInterface <ide> { <ide> $options = [ <ide> 'keyPath' => $this->_propertyExtractor($keyPath), <ide> public function combine($keyPath, $valuePath, $groupPath = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function nest($idPath, $parentPath, $nestingKey = 'children') <add> public function nest($idPath, $parentPath, $nestingKey = 'children'): CollectionInterface <ide> { <ide> $parents = []; <ide> $idPath = $this->_propertyExtractor($idPath); <ide> public function nest($idPath, $parentPath, $nestingKey = 'children') <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\InsertIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function insert($path, $values) <add> public function insert($path, $values): CollectionInterface <ide> { <ide> return new InsertIterator($this->unwrap(), $path, $values); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function toArray($preserveKeys = true) <add> public function toArray($preserveKeys = true): array <ide> { <ide> $iterator = $this->unwrap(); <ide> if ($iterator instanceof ArrayIterator) { <ide> public function toArray($preserveKeys = true) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function toList() <add> public function toList(): array <ide> { <ide> return $this->toArray(false); <ide> } <ide> public function jsonSerialize() <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function compile($preserveKeys = true) <add> public function compile($preserveKeys = true): CollectionInterface <ide> { <ide> return new Collection($this->toArray($preserveKeys)); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\BufferedIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function buffered() <add> public function buffered(): CollectionInterface <ide> { <ide> return new BufferedIterator($this->unwrap()); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Cake\Collection\Iterator\TreeIterator <add> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function listNested($dir = 'desc', $nestingKey = 'children') <add> public function listNested($dir = 'desc', $nestingKey = 'children'): CollectionInterface <ide> { <ide> $dir = strtolower($dir); <ide> $modes = [ <ide> public function listNested($dir = 'desc', $nestingKey = 'children') <ide> * <ide> * @return \Cake\Collection\Iterator\StoppableIterator <ide> */ <del> public function stopWhen($condition) <add> public function stopWhen($condition): CollectionInterface <ide> { <ide> if (!is_callable($condition)) { <ide> $condition = $this->_createMatcherFilter($condition); <ide> public function stopWhen($condition) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function unfold(callable $transformer = null) <add> public function unfold(callable $transformer = null): CollectionInterface <ide> { <ide> if ($transformer === null) { <ide> $transformer = function ($item) { <ide> public function unfold(callable $transformer = null) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function through(callable $handler) <add> public function through(callable $handler): CollectionInterface <ide> { <ide> $result = $handler($this); <ide> <ide> public function through(callable $handler) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function zip($items) <add> public function zip($items): CollectionInterface <ide> { <ide> return new ZipIterator(array_merge([$this->unwrap()], func_get_args())); <ide> } <ide> <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function zipWith($items, $callable) <add> public function zipWith($items, $callable): CollectionInterface <ide> { <ide> if (func_num_args() > 2) { <ide> $items = func_get_args(); <ide> public function zipWith($items, $callable) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function chunk($chunkSize) <add> public function chunk($chunkSize): CollectionInterface <ide> { <ide> return $this->map(function ($v, $k, $iterator) use ($chunkSize) { <ide> $values = [$v]; <ide> public function chunk($chunkSize) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function chunkWithKeys($chunkSize, $preserveKeys = true) <add> public function chunkWithKeys($chunkSize, $preserveKeys = true): CollectionInterface <ide> { <ide> return $this->map(function ($v, $k, $iterator) use ($chunkSize, $preserveKeys) { <ide> $key = 0; <ide> public function chunkWithKeys($chunkSize, $preserveKeys = true) <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function isEmpty() <add> public function isEmpty(): bool <ide> { <ide> foreach ($this as $el) { <ide> return false; <ide> public function isEmpty() <ide> /** <ide> * {@inheritDoc} <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> $iterator = $this; <ide> while (get_class($iterator) === 'Cake\Collection\Collection') { <ide> public function unwrap() <ide> * <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function cartesianProduct(callable $operation = null, callable $filter = null) <add> public function cartesianProduct(callable $operation = null, callable $filter = null): CollectionInterface <ide> { <ide> if ($this->isEmpty()) { <ide> return new Collection([]); <ide> public function cartesianProduct(callable $operation = null, callable $filter = <ide> * <ide> * @return \Cake\Collection\CollectionInterface <ide> */ <del> public function transpose() <add> public function transpose(): CollectionInterface <ide> { <ide> $arrayValue = $this->toList(); <ide> $length = count(current($arrayValue)); <ide> public function transpose() <ide> * <ide> * @return int <ide> */ <del> public function count() <add> public function count(): int <ide> { <ide> $traversable = $this->optimizeUnwrap(); <ide> <ide> public function count() <ide> * <ide> * @return int <ide> */ <del> public function countKeys() <add> public function countKeys(): int <ide> { <ide> return count($this->toArray()); <ide> } <ide><path>src/Collection/ExtractTrait.php <ide> trait ExtractTrait <ide> * of doing that. <ide> * @return callable <ide> */ <del> protected function _propertyExtractor($callback) <add> protected function _propertyExtractor($callback): callable <ide> { <ide> if (!is_string($callback)) { <ide> return $callback; <ide> protected function _simpleExtract($data, $path) <ide> * value to be compared the item with. <ide> * @return callable <ide> */ <del> protected function _createMatcherFilter(array $conditions) <add> protected function _createMatcherFilter(array $conditions): callable <ide> { <ide> $matchers = []; <ide> foreach ($conditions as $property => $value) { <ide><path>src/Collection/Iterator/BufferedIterator.php <ide> public function next() <ide> * <ide> * @return int <ide> */ <del> public function count() <add> public function count(): int <ide> { <ide> if (!$this->_started) { <ide> $this->rewind(); <ide><path>src/Collection/Iterator/ExtractIterator.php <ide> use ArrayIterator; <ide> use Cake\Collection\Collection; <ide> use Cake\Collection\CollectionInterface; <add>use Traversable; <ide> <ide> /** <ide> * Creates an iterator from another iterator that extract the requested column <ide> public function current() <ide> * We perform here some strictness analysis so that the <ide> * iterator logic is bypassed entirely. <ide> * <del> * @return \Iterator <add> * @return \Traversable <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> $iterator = $this->getInnerIterator(); <ide> <ide><path>src/Collection/Iterator/FilterIterator.php <ide> use Cake\Collection\CollectionInterface; <ide> use CallbackFilterIterator; <ide> use Iterator; <add>use Traversable; <ide> <ide> /** <ide> * Creates a filtered iterator from another iterator. The filtering is done by <ide> public function __construct($items, callable $callback) <ide> * We perform here some strictness analysis so that the <ide> * iterator logic is bypassed entirely. <ide> * <del> * @return \Iterator <add> * @return \Traversable <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> $filter = $this->getInnerIterator(); <ide> $iterator = $filter->getInnerIterator(); <ide> public function unwrap() <ide> <ide> // ArrayIterator can be traversed strictly. <ide> // Let's do that for performance gains <del> <ide> $callback = $this->_callback; <ide> $res = []; <ide> <ide><path>src/Collection/Iterator/ReplaceIterator.php <ide> use ArrayIterator; <ide> use Cake\Collection\Collection; <ide> use Cake\Collection\CollectionInterface; <add>use Traversable; <ide> <ide> /** <ide> * Creates an iterator from another iterator that will modify each of the values <ide> public function current() <ide> * We perform here some strictness analysis so that the <ide> * iterator logic is bypassed entirely. <ide> * <del> * @return \Iterator <add> * @return \Traversable <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> $iterator = $this->_innerIterator; <ide> <ide><path>src/Collection/Iterator/SortIterator.php <ide> <ide> use Cake\Collection\Collection; <ide> use DateTimeInterface; <add>use Traversable; <ide> <ide> /** <ide> * An iterator that will return the passed items in order. The order is given by <ide> public function __construct($items, $callback, $dir = \SORT_DESC, $type = \SORT_ <ide> /** <ide> * {@inheritDoc} <ide> * <del> * @return \Iterator <add> * @return \Traversable <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> return $this->getInnerIterator(); <ide> } <ide><path>src/Collection/Iterator/StoppableIterator.php <ide> use ArrayIterator; <ide> use Cake\Collection\Collection; <ide> use Cake\Collection\CollectionInterface; <add>use Traversable; <ide> <ide> /** <ide> * Creates an iterator from another iterator that will verify a condition on each <ide> public function valid() <ide> * We perform here some strictness analysis so that the <ide> * iterator logic is bypassed entirely. <ide> * <del> * @return \Iterator <add> * @return \Traversable <ide> */ <del> public function unwrap() <add> public function unwrap(): Traversable <ide> { <ide> $iterator = $this->_innerIterator; <ide> <ide><path>src/Collection/Iterator/TreeIterator.php <ide> */ <ide> namespace Cake\Collection\Iterator; <ide> <add>use Cake\Collection\CollectionInterface; <ide> use Cake\Collection\CollectionTrait; <ide> use RecursiveIterator; <ide> use RecursiveIteratorIterator; <ide> * A Recursive iterator used to flatten nested structures and also exposes <ide> * all Collection methods <ide> */ <del>class TreeIterator extends RecursiveIteratorIterator <add>class TreeIterator extends RecursiveIteratorIterator implements CollectionInterface <ide> { <ide> <ide> use CollectionTrait;
11
Python
Python
fix syntax error
3a5335f09f58439f8e3c0bddbed8e4c7eeb32482
<ide><path>rest_framework/fields.py <ide> def enforce_timezone(self, value): <ide> return value <ide> <ide> def to_internal_value(self, value): <del> if (isinstance(value, datetime.date) and not isinstance(value, datetime.datetime): <add> if isinstance(value, datetime.date) and not isinstance(value, datetime.datetime): <ide> self.fail('date') <ide> <ide> if isinstance(value, datetime.datetime):
1
Text
Text
add additional information on layer 7
8be46155c1d68a87edea1cde02665c20250bae81
<ide><path>guide/english/network-engineering/osi-layers/index.md <ide> In the diagram above, to the extreme left is the unit of data that is used in ea <ide> <ide> * _**Layer 6 - Presentation Layer:**_ The presentation layer formats the data to be presented to the application layer. <ide> <del>* _**Layer 7 - Application Layer:**_ The application layer serves as the window for users and application processes to access network services. <add>* _**Layer 7 - Application Layer:**_ The application layer serves as the window for users and application processes to access network services like DNS (Domain Name System), FTP (File Transfer Protocol), SMTP (Simple Mail Transfer Protocol), etc. <ide> <del>#### More Information: <add>## Additional Resources <ide> * https://www.mheducation.com/highered/product/data-communications-networking-forouzan/M0073376221.html
1
Javascript
Javascript
prioritize current dir for local lookups
d38503ab01c607ca55b4816e2c249411a3f10269
<ide><path>lib/module.js <ide> Module._resolveLookupPaths = function(request, parent) { <ide> if (!parent || !parent.id || !parent.filename) { <ide> // make require('./path/to/foo') work - normally the path is taken <ide> // from realpath(__filename) but with eval there is no filename <del> var mainPaths = ['.'].concat(modulePaths); <del> mainPaths = Module._nodeModulePaths('.').concat(mainPaths); <add> var mainPaths = ['.'].concat(Module._nodeModulePaths('.'), modulePaths); <ide> return [request, mainPaths]; <ide> } <ide> <ide><path>test/parallel/test-module-relative-lookup.js <add>'use strict'; <add> <add>require('../common'); <add>const assert = require('assert'); <add>const _module = require('module'); // avoid collision with global.module <add>const lookupResults = _module._resolveLookupPaths('./lodash'); <add>const paths = lookupResults[1]; <add> <add>assert.strictEqual(paths[0], '.', <add> 'Current directory is prioritized before node_modules for local modules');
2
Text
Text
remove empty line from top of article
26f08342881de309c6351547b4c810c37b1addec
<ide><path>guide/spanish/blockchain/features/index.md <del> <ide> --- <ide> title: Features of BlockTech <ide> localeTitle: Características de BlockTech
1
Python
Python
fix the test for numpy.ndindex()
aef2cf73aafc9a945d88cb9464f62135b177a2f4
<ide><path>numpy/lib/index_tricks.py <ide> class ndindex(object): <ide> <ide> """ <ide> # This is a hack to handle 0-d arrays correctly. <del> # Fixing nditer would be more work but should be done eventually. <add> # Fixing nditer would be more work but should be done eventually, <add> # and then this entire __new__ method can be removed. <ide> def __new__(cls, *shape): <del> if len(shape) == 0: <del> def zerodim_gen(): <del> yield () <del> return zerodim_gen() <add> if len(shape) == 0 or (len(shape) == 1 and len(shape[0]) == 0): <add> class zero_dim_iter(object): <add> def __init__(self): <add> self._N = 1 <add> def __iter__(self): <add> return self <add> def ndincr(self): <add> return self.next() <add> def next(self): <add> if self._N > 0: <add> self._N -= 1 <add> return () <add> raise StopIteration <add> return zero_dim_iter() <ide> else: <ide> return super(ndindex, cls).__new__(cls) <ide> <ide> def __init__(self, *shape): <add> if len(shape) == 1 and isinstance(shape[0], tuple): <add> shape = shape[0] <ide> x = as_strided(_nx.zeros(1), shape=shape, strides=_nx.zeros_like(shape)) <ide> self._it = _nx.nditer(x, flags=['multi_index'], order='C') <ide> <ide><path>numpy/lib/tests/test_index_tricks.py <ide> def test_ndindex(): <ide> expected = [ix for ix, e in np.ndenumerate(np.zeros((1, 2, 3)))] <ide> assert_array_equal(x, expected) <ide> <add> x = list(np.ndindex((1, 2, 3))) <add> assert_array_equal(x, expected) <add> <ide> # Make sure size argument is optional <ide> x = list(np.ndindex()) <ide> assert_equal(x, [()]) <ide> <add> x = list(np.ndindex(())) <add> assert_equal(x, [()]) <add> <ide> <ide> if __name__ == "__main__": <ide> run_module_suite()
2
Ruby
Ruby
correct error message for failed creation
8c110387f442aeaf19357f44fb20c2464a4f2754
<ide><path>activesupport/lib/active_support/testing/assertions.rb <ide> def assert_difference(expression, difference = 1, message = nil, &block) <ide> # <ide> # A error message can be specified. <ide> # <del> # assert_no_difference 'Article.count', "An Article should not be destroyed" do <add> # assert_no_difference 'Article.count', "An Article should not be created" do <ide> # post :create, :article => invalid_attributes <ide> # end <ide> def assert_no_difference(expression, message = nil, &block)
1
Ruby
Ruby
require mocha >= 0.9.0 for as tests
f927a60d0fa33f3e0fc3c0c891ae7657a227707f
<ide><path>activesupport/test/abstract_unit.rb <ide> require 'active_support' <ide> require 'active_support/test_case' <ide> <del>def uses_mocha(test_name, &block) <del> yield <del>end <del> <ide> def uses_memcached(test_name) <ide> require 'memcache' <ide> MemCache.new('localhost').stats <ide><path>activesupport/test/caching_test.rb <ide> def test_object_assigned_fragment_cache_store <ide> end <ide> end <ide> <del>uses_mocha 'high-level cache store tests' do <del> class CacheStoreTest < Test::Unit::TestCase <del> def setup <del> @cache = ActiveSupport::Cache.lookup_store(:memory_store) <del> end <add>class CacheStoreTest < Test::Unit::TestCase <add> def setup <add> @cache = ActiveSupport::Cache.lookup_store(:memory_store) <add> end <ide> <del> def test_fetch_without_cache_miss <del> @cache.stubs(:read).with('foo', {}).returns('bar') <del> @cache.expects(:write).never <del> assert_equal 'bar', @cache.fetch('foo') { 'baz' } <del> end <add> def test_fetch_without_cache_miss <add> @cache.stubs(:read).with('foo', {}).returns('bar') <add> @cache.expects(:write).never <add> assert_equal 'bar', @cache.fetch('foo') { 'baz' } <add> end <ide> <del> def test_fetch_with_cache_miss <del> @cache.stubs(:read).with('foo', {}).returns(nil) <del> @cache.expects(:write).with('foo', 'baz', {}) <del> assert_equal 'baz', @cache.fetch('foo') { 'baz' } <del> end <add> def test_fetch_with_cache_miss <add> @cache.stubs(:read).with('foo', {}).returns(nil) <add> @cache.expects(:write).with('foo', 'baz', {}) <add> assert_equal 'baz', @cache.fetch('foo') { 'baz' } <add> end <ide> <del> def test_fetch_with_forced_cache_miss <del> @cache.expects(:read).never <del> @cache.expects(:write).with('foo', 'bar', :force => true) <del> @cache.fetch('foo', :force => true) { 'bar' } <del> end <add> def test_fetch_with_forced_cache_miss <add> @cache.expects(:read).never <add> @cache.expects(:write).with('foo', 'bar', :force => true) <add> @cache.fetch('foo', :force => true) { 'bar' } <ide> end <ide> end <ide> <ide><path>activesupport/test/core_ext/array_ext_test.rb <ide> def test_extract_options <ide> end <ide> end <ide> <del>uses_mocha "ArrayExtRandomTests" do <del> class ArrayExtRandomTests < Test::Unit::TestCase <del> def test_random_element_from_array <del> assert_nil [].rand <add>class ArrayExtRandomTests < Test::Unit::TestCase <add> def test_random_element_from_array <add> assert_nil [].rand <ide> <del> Kernel.expects(:rand).with(1).returns(0) <del> assert_equal 'x', ['x'].rand <add> Kernel.expects(:rand).with(1).returns(0) <add> assert_equal 'x', ['x'].rand <ide> <del> Kernel.expects(:rand).with(3).returns(1) <del> assert_equal 2, [1, 2, 3].rand <del> end <add> Kernel.expects(:rand).with(3).returns(1) <add> assert_equal 2, [1, 2, 3].rand <ide> end <ide> end <ide><path>activesupport/test/core_ext/date_ext_test.rb <ide> def test_xmlschema <ide> end <ide> end <ide> <del> uses_mocha 'past?, today? and future?' do <del> def test_today <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, Date.new(1999, 12, 31).today? <del> assert_equal true, Date.new(2000,1,1).today? <del> assert_equal false, Date.new(2000,1,2).today? <del> end <del> <del> def test_past <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal true, Date.new(1999, 12, 31).past? <del> assert_equal false, Date.new(2000,1,1).past? <del> assert_equal false, Date.new(2000,1,2).past? <del> end <del> <del> def test_future <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, Date.new(1999, 12, 31).future? <del> assert_equal false, Date.new(2000,1,1).future? <del> assert_equal true, Date.new(2000,1,2).future? <add> def test_today <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, Date.new(1999, 12, 31).today? <add> assert_equal true, Date.new(2000,1,1).today? <add> assert_equal false, Date.new(2000,1,2).today? <add> end <add> <add> def test_past <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal true, Date.new(1999, 12, 31).past? <add> assert_equal false, Date.new(2000,1,1).past? <add> assert_equal false, Date.new(2000,1,2).past? <add> end <add> <add> def test_future <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, Date.new(1999, 12, 31).future? <add> assert_equal false, Date.new(2000,1,1).future? <add> assert_equal true, Date.new(2000,1,2).future? <add> end <add> <add> def test_current_returns_date_today_when_zone_default_not_set <add> with_env_tz 'US/Central' do <add> Time.stubs(:now).returns Time.local(1999, 12, 31, 23) <add> assert_equal Date.new(1999, 12, 31), Date.today <add> assert_equal Date.new(1999, 12, 31), Date.current <ide> end <ide> end <ide> <del> uses_mocha 'TestDateCurrent' do <del> def test_current_returns_date_today_when_zone_default_not_set <add> def test_current_returns_time_zone_today_when_zone_default_set <add> silence_warnings do # silence warnings raised by tzinfo gem <add> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <ide> with_env_tz 'US/Central' do <ide> Time.stubs(:now).returns Time.local(1999, 12, 31, 23) <ide> assert_equal Date.new(1999, 12, 31), Date.today <del> assert_equal Date.new(1999, 12, 31), Date.current <del> end <del> end <del> <del> def test_current_returns_time_zone_today_when_zone_default_set <del> silence_warnings do # silence warnings raised by tzinfo gem <del> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> with_env_tz 'US/Central' do <del> Time.stubs(:now).returns Time.local(1999, 12, 31, 23) <del> assert_equal Date.new(1999, 12, 31), Date.today <del> assert_equal Date.new(2000, 1, 1), Date.current <del> end <add> assert_equal Date.new(2000, 1, 1), Date.current <ide> end <del> ensure <del> Time.zone_default = nil <ide> end <add> ensure <add> Time.zone_default = nil <ide> end <ide> <ide> protected <ide><path>activesupport/test/core_ext/date_time_ext_test.rb <ide> def test_xmlschema <ide> assert_match(/^2080-02-28T15:15:10-06:?00$/, DateTime.civil(2080, 2, 28, 15, 15, 10, -0.25).xmlschema) <ide> end <ide> <del> uses_mocha 'Test DateTime past?, today? and future?' do <del> def test_today_with_offset <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, DateTime.civil(1999,12,31,23,59,59, Rational(-18000, 86400)).today? <del> assert_equal true, DateTime.civil(2000,1,1,0,0,0, Rational(-18000, 86400)).today? <del> assert_equal true, DateTime.civil(2000,1,1,23,59,59, Rational(-18000, 86400)).today? <del> assert_equal false, DateTime.civil(2000,1,2,0,0,0, Rational(-18000, 86400)).today? <del> end <del> <del> def test_today_without_offset <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, DateTime.civil(1999,12,31,23,59,59).today? <del> assert_equal true, DateTime.civil(2000,1,1,0).today? <del> assert_equal true, DateTime.civil(2000,1,1,23,59,59).today? <del> assert_equal false, DateTime.civil(2000,1,2,0).today? <del> end <del> <del> def test_past_with_offset <del> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <del> assert_equal true, DateTime.civil(2005,2,10,15,30,44, Rational(-18000, 86400)).past? <del> assert_equal false, DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400)).past? <del> assert_equal false, DateTime.civil(2005,2,10,15,30,46, Rational(-18000, 86400)).past? <del> end <del> <del> def test_past_without_offset <del> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <del> assert_equal true, DateTime.civil(2005,2,10,20,30,44).past? <del> assert_equal false, DateTime.civil(2005,2,10,20,30,45).past? <del> assert_equal false, DateTime.civil(2005,2,10,20,30,46).past? <del> end <del> <del> def test_future_with_offset <del> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <del> assert_equal false, DateTime.civil(2005,2,10,15,30,44, Rational(-18000, 86400)).future? <del> assert_equal false, DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400)).future? <del> assert_equal true, DateTime.civil(2005,2,10,15,30,46, Rational(-18000, 86400)).future? <del> end <del> <del> def test_future_without_offset <del> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <del> assert_equal false, DateTime.civil(2005,2,10,20,30,44).future? <del> assert_equal false, DateTime.civil(2005,2,10,20,30,45).future? <del> assert_equal true, DateTime.civil(2005,2,10,20,30,46).future? <del> end <add> def test_today_with_offset <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, DateTime.civil(1999,12,31,23,59,59, Rational(-18000, 86400)).today? <add> assert_equal true, DateTime.civil(2000,1,1,0,0,0, Rational(-18000, 86400)).today? <add> assert_equal true, DateTime.civil(2000,1,1,23,59,59, Rational(-18000, 86400)).today? <add> assert_equal false, DateTime.civil(2000,1,2,0,0,0, Rational(-18000, 86400)).today? <add> end <add> <add> def test_today_without_offset <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, DateTime.civil(1999,12,31,23,59,59).today? <add> assert_equal true, DateTime.civil(2000,1,1,0).today? <add> assert_equal true, DateTime.civil(2000,1,1,23,59,59).today? <add> assert_equal false, DateTime.civil(2000,1,2,0).today? <add> end <add> <add> def test_past_with_offset <add> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <add> assert_equal true, DateTime.civil(2005,2,10,15,30,44, Rational(-18000, 86400)).past? <add> assert_equal false, DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400)).past? <add> assert_equal false, DateTime.civil(2005,2,10,15,30,46, Rational(-18000, 86400)).past? <ide> end <ide> <del> uses_mocha 'TestDateTimeCurrent' do <del> def test_current_returns_date_today_when_zone_default_not_set <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns Time.local(1999, 12, 31, 23, 59, 59) <del> assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current <del> end <add> def test_past_without_offset <add> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <add> assert_equal true, DateTime.civil(2005,2,10,20,30,44).past? <add> assert_equal false, DateTime.civil(2005,2,10,20,30,45).past? <add> assert_equal false, DateTime.civil(2005,2,10,20,30,46).past? <add> end <add> <add> def test_future_with_offset <add> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <add> assert_equal false, DateTime.civil(2005,2,10,15,30,44, Rational(-18000, 86400)).future? <add> assert_equal false, DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400)).future? <add> assert_equal true, DateTime.civil(2005,2,10,15,30,46, Rational(-18000, 86400)).future? <add> end <add> <add> def test_future_without_offset <add> DateTime.stubs(:current).returns(DateTime.civil(2005,2,10,15,30,45, Rational(-18000, 86400))) <add> assert_equal false, DateTime.civil(2005,2,10,20,30,44).future? <add> assert_equal false, DateTime.civil(2005,2,10,20,30,45).future? <add> assert_equal true, DateTime.civil(2005,2,10,20,30,46).future? <add> end <add> <add> def test_current_returns_date_today_when_zone_default_not_set <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns Time.local(1999, 12, 31, 23, 59, 59) <add> assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current <ide> end <add> end <ide> <del> def test_current_returns_time_zone_today_when_zone_default_set <del> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns Time.local(1999, 12, 31, 23, 59, 59) <del> assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current <del> end <del> ensure <del> Time.zone_default = nil <add> def test_current_returns_time_zone_today_when_zone_default_set <add> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns Time.local(1999, 12, 31, 23, 59, 59) <add> assert_equal DateTime.new(1999, 12, 31, 23, 59, 59, Rational(-18000, 86400)), DateTime.current <ide> end <add> ensure <add> Time.zone_default = nil <ide> end <ide> <ide> def test_current_without_time_zone <ide><path>activesupport/test/core_ext/duration_test.rb <ide> def test_fractional_days <ide> assert_equal 86400 * 1.7, 1.7.days <ide> end <ide> <del> uses_mocha 'TestDurationSinceAndAgoWithCurrentTime' do <del> def test_since_and_ago_with_fractional_days <del> Time.stubs(:now).returns Time.local(2000) <del> # since <del> assert_equal 36.hours.since, 1.5.days.since <del> assert_equal((24 * 1.7).hours.since, 1.7.days.since) <del> # ago <del> assert_equal 36.hours.ago, 1.5.days.ago <del> assert_equal((24 * 1.7).hours.ago, 1.7.days.ago) <del> end <add> def test_since_and_ago_with_fractional_days <add> Time.stubs(:now).returns Time.local(2000) <add> # since <add> assert_equal 36.hours.since, 1.5.days.since <add> assert_equal((24 * 1.7).hours.since, 1.7.days.since) <add> # ago <add> assert_equal 36.hours.ago, 1.5.days.ago <add> assert_equal((24 * 1.7).hours.ago, 1.7.days.ago) <add> end <add> <add> def test_since_and_ago_with_fractional_weeks <add> Time.stubs(:now).returns Time.local(2000) <add> # since <add> assert_equal((7 * 36).hours.since, 1.5.weeks.since) <add> assert_equal((7 * 24 * 1.7).hours.since, 1.7.weeks.since) <add> # ago <add> assert_equal((7 * 36).hours.ago, 1.5.weeks.ago) <add> assert_equal((7 * 24 * 1.7).hours.ago, 1.7.weeks.ago) <add> end <add> <add> def test_deprecated_fractional_years <add> years_re = /Fractional years are not respected\. Convert value to integer before calling #years\./ <add> assert_deprecated(years_re){1.0.years} <add> assert_deprecated(years_re){1.5.years} <add> assert_not_deprecated{1.years} <add> assert_deprecated(years_re){1.0.year} <add> assert_deprecated(years_re){1.5.year} <add> assert_not_deprecated{1.year} <add> end <ide> <del> def test_since_and_ago_with_fractional_weeks <add> def test_deprecated_fractional_months <add> months_re = /Fractional months are not respected\. Convert value to integer before calling #months\./ <add> assert_deprecated(months_re){1.5.months} <add> assert_deprecated(months_re){1.0.months} <add> assert_not_deprecated{1.months} <add> assert_deprecated(months_re){1.5.month} <add> assert_deprecated(months_re){1.0.month} <add> assert_not_deprecated{1.month} <add> end <add> <add> def test_since_and_ago_anchored_to_time_now_when_time_zone_default_not_set <add> Time.zone_default = nil <add> with_env_tz 'US/Eastern' do <ide> Time.stubs(:now).returns Time.local(2000) <ide> # since <del> assert_equal((7 * 36).hours.since, 1.5.weeks.since) <del> assert_equal((7 * 24 * 1.7).hours.since, 1.7.weeks.since) <add> assert_equal false, 5.seconds.since.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal Time.local(2000,1,1,0,0,5), 5.seconds.since <ide> # ago <del> assert_equal((7 * 36).hours.ago, 1.5.weeks.ago) <del> assert_equal((7 * 24 * 1.7).hours.ago, 1.7.weeks.ago) <del> end <del> <del> def test_deprecated_fractional_years <del> years_re = /Fractional years are not respected\. Convert value to integer before calling #years\./ <del> assert_deprecated(years_re){1.0.years} <del> assert_deprecated(years_re){1.5.years} <del> assert_not_deprecated{1.years} <del> assert_deprecated(years_re){1.0.year} <del> assert_deprecated(years_re){1.5.year} <del> assert_not_deprecated{1.year} <del> end <del> <del> def test_deprecated_fractional_months <del> months_re = /Fractional months are not respected\. Convert value to integer before calling #months\./ <del> assert_deprecated(months_re){1.5.months} <del> assert_deprecated(months_re){1.0.months} <del> assert_not_deprecated{1.months} <del> assert_deprecated(months_re){1.5.month} <del> assert_deprecated(months_re){1.0.month} <del> assert_not_deprecated{1.month} <add> assert_equal false, 5.seconds.ago.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal Time.local(1999,12,31,23,59,55), 5.seconds.ago <ide> end <add> end <ide> <del> def test_since_and_ago_anchored_to_time_now_when_time_zone_default_not_set <del> Time.zone_default = nil <add> def test_since_and_ago_anchored_to_time_zone_now_when_time_zone_default_set <add> silence_warnings do # silence warnings raised by tzinfo gem <add> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <ide> with_env_tz 'US/Eastern' do <ide> Time.stubs(:now).returns Time.local(2000) <ide> # since <del> assert_equal false, 5.seconds.since.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal Time.local(2000,1,1,0,0,5), 5.seconds.since <add> assert_equal true, 5.seconds.since.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal Time.utc(2000,1,1,0,0,5), 5.seconds.since.time <add> assert_equal 'Eastern Time (US & Canada)', 5.seconds.since.time_zone.name <ide> # ago <del> assert_equal false, 5.seconds.ago.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal Time.local(1999,12,31,23,59,55), 5.seconds.ago <add> assert_equal true, 5.seconds.ago.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal Time.utc(1999,12,31,23,59,55), 5.seconds.ago.time <add> assert_equal 'Eastern Time (US & Canada)', 5.seconds.ago.time_zone.name <ide> end <ide> end <del> <del> def test_since_and_ago_anchored_to_time_zone_now_when_time_zone_default_set <del> silence_warnings do # silence warnings raised by tzinfo gem <del> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns Time.local(2000) <del> # since <del> assert_equal true, 5.seconds.since.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal Time.utc(2000,1,1,0,0,5), 5.seconds.since.time <del> assert_equal 'Eastern Time (US & Canada)', 5.seconds.since.time_zone.name <del> # ago <del> assert_equal true, 5.seconds.ago.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal Time.utc(1999,12,31,23,59,55), 5.seconds.ago.time <del> assert_equal 'Eastern Time (US & Canada)', 5.seconds.ago.time_zone.name <del> end <del> end <del> ensure <del> Time.zone_default = nil <del> end <add> ensure <add> Time.zone_default = nil <ide> end <ide> <ide> protected <ide><path>activesupport/test/core_ext/hash_ext_test.rb <ide> def test_except_with_original_frozen <ide> assert_nothing_raised { original.except(:a) } <ide> end <ide> <del> uses_mocha 'except with expectation' do <del> def test_except_with_mocha_expectation_on_original <del> original = { :a => 'x', :b => 'y' } <del> original.expects(:delete).never <del> original.except(:a) <del> end <add> def test_except_with_mocha_expectation_on_original <add> original = { :a => 'x', :b => 'y' } <add> original.expects(:delete).never <add> original.except(:a) <ide> end <ide> end <ide> <ide><path>activesupport/test/core_ext/time_ext_test.rb <ide> def test_days_in_month_with_year <ide> assert_equal 31, Time.days_in_month(12, 2005) <ide> end <ide> <del> uses_mocha 'TestTimeDaysInMonthWithoutYearArg' do <del> def test_days_in_month_feb_in_common_year_without_year_arg <del> Time.stubs(:now).returns(Time.utc(2007)) <del> assert_equal 28, Time.days_in_month(2) <del> end <add> def test_days_in_month_feb_in_common_year_without_year_arg <add> Time.stubs(:now).returns(Time.utc(2007)) <add> assert_equal 28, Time.days_in_month(2) <add> end <ide> <del> def test_days_in_month_feb_in_leap_year_without_year_arg <del> Time.stubs(:now).returns(Time.utc(2008)) <del> assert_equal 29, Time.days_in_month(2) <del> end <add> def test_days_in_month_feb_in_leap_year_without_year_arg <add> Time.stubs(:now).returns(Time.utc(2008)) <add> assert_equal 29, Time.days_in_month(2) <ide> end <ide> <ide> def test_time_with_datetime_fallback <ide> def test_xmlschema_is_available <ide> assert_nothing_raised { Time.now.xmlschema } <ide> end <ide> <del> uses_mocha 'Test Time past?, today? and future?' do <del> def test_today_with_time_local <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, Time.local(1999,12,31,23,59,59).today? <del> assert_equal true, Time.local(2000,1,1,0).today? <del> assert_equal true, Time.local(2000,1,1,23,59,59).today? <del> assert_equal false, Time.local(2000,1,2,0).today? <del> end <del> <del> def test_today_with_time_utc <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, Time.utc(1999,12,31,23,59,59).today? <del> assert_equal true, Time.utc(2000,1,1,0).today? <del> assert_equal true, Time.utc(2000,1,1,23,59,59).today? <del> assert_equal false, Time.utc(2000,1,2,0).today? <del> end <del> <del> def test_past_with_time_current_as_time_local <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <del> assert_equal true, Time.local(2005,2,10,15,30,44).past? <del> assert_equal false, Time.local(2005,2,10,15,30,45).past? <del> assert_equal false, Time.local(2005,2,10,15,30,46).past? <del> assert_equal true, Time.utc(2005,2,10,20,30,44).past? <del> assert_equal false, Time.utc(2005,2,10,20,30,45).past? <del> assert_equal false, Time.utc(2005,2,10,20,30,46).past? <del> end <del> end <del> <del> def test_past_with_time_current_as_time_with_zone <del> with_env_tz 'US/Eastern' do <del> twz = Time.utc(2005,2,10,15,30,45).in_time_zone('Central Time (US & Canada)') <del> Time.stubs(:current).returns(twz) <del> assert_equal true, Time.local(2005,2,10,10,30,44).past? <del> assert_equal false, Time.local(2005,2,10,10,30,45).past? <del> assert_equal false, Time.local(2005,2,10,10,30,46).past? <del> assert_equal true, Time.utc(2005,2,10,15,30,44).past? <del> assert_equal false, Time.utc(2005,2,10,15,30,45).past? <del> assert_equal false, Time.utc(2005,2,10,15,30,46).past? <del> end <del> end <del> <del> def test_future_with_time_current_as_time_local <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <del> assert_equal false, Time.local(2005,2,10,15,30,44).future? <del> assert_equal false, Time.local(2005,2,10,15,30,45).future? <del> assert_equal true, Time.local(2005,2,10,15,30,46).future? <del> assert_equal false, Time.utc(2005,2,10,20,30,44).future? <del> assert_equal false, Time.utc(2005,2,10,20,30,45).future? <del> assert_equal true, Time.utc(2005,2,10,20,30,46).future? <del> end <del> end <del> <del> def test_future_with_time_current_as_time_with_zone <del> with_env_tz 'US/Eastern' do <del> twz = Time.utc(2005,2,10,15,30,45).in_time_zone('Central Time (US & Canada)') <del> Time.stubs(:current).returns(twz) <del> assert_equal false, Time.local(2005,2,10,10,30,44).future? <del> assert_equal false, Time.local(2005,2,10,10,30,45).future? <del> assert_equal true, Time.local(2005,2,10,10,30,46).future? <del> assert_equal false, Time.utc(2005,2,10,15,30,44).future? <del> assert_equal false, Time.utc(2005,2,10,15,30,45).future? <del> assert_equal true, Time.utc(2005,2,10,15,30,46).future? <del> end <add> def test_today_with_time_local <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, Time.local(1999,12,31,23,59,59).today? <add> assert_equal true, Time.local(2000,1,1,0).today? <add> assert_equal true, Time.local(2000,1,1,23,59,59).today? <add> assert_equal false, Time.local(2000,1,2,0).today? <add> end <add> <add> def test_today_with_time_utc <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, Time.utc(1999,12,31,23,59,59).today? <add> assert_equal true, Time.utc(2000,1,1,0).today? <add> assert_equal true, Time.utc(2000,1,1,23,59,59).today? <add> assert_equal false, Time.utc(2000,1,2,0).today? <add> end <add> <add> def test_past_with_time_current_as_time_local <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <add> assert_equal true, Time.local(2005,2,10,15,30,44).past? <add> assert_equal false, Time.local(2005,2,10,15,30,45).past? <add> assert_equal false, Time.local(2005,2,10,15,30,46).past? <add> assert_equal true, Time.utc(2005,2,10,20,30,44).past? <add> assert_equal false, Time.utc(2005,2,10,20,30,45).past? <add> assert_equal false, Time.utc(2005,2,10,20,30,46).past? <add> end <add> end <add> <add> def test_past_with_time_current_as_time_with_zone <add> with_env_tz 'US/Eastern' do <add> twz = Time.utc(2005,2,10,15,30,45).in_time_zone('Central Time (US & Canada)') <add> Time.stubs(:current).returns(twz) <add> assert_equal true, Time.local(2005,2,10,10,30,44).past? <add> assert_equal false, Time.local(2005,2,10,10,30,45).past? <add> assert_equal false, Time.local(2005,2,10,10,30,46).past? <add> assert_equal true, Time.utc(2005,2,10,15,30,44).past? <add> assert_equal false, Time.utc(2005,2,10,15,30,45).past? <add> assert_equal false, Time.utc(2005,2,10,15,30,46).past? <add> end <add> end <add> <add> def test_future_with_time_current_as_time_local <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <add> assert_equal false, Time.local(2005,2,10,15,30,44).future? <add> assert_equal false, Time.local(2005,2,10,15,30,45).future? <add> assert_equal true, Time.local(2005,2,10,15,30,46).future? <add> assert_equal false, Time.utc(2005,2,10,20,30,44).future? <add> assert_equal false, Time.utc(2005,2,10,20,30,45).future? <add> assert_equal true, Time.utc(2005,2,10,20,30,46).future? <add> end <add> end <add> <add> def test_future_with_time_current_as_time_with_zone <add> with_env_tz 'US/Eastern' do <add> twz = Time.utc(2005,2,10,15,30,45).in_time_zone('Central Time (US & Canada)') <add> Time.stubs(:current).returns(twz) <add> assert_equal false, Time.local(2005,2,10,10,30,44).future? <add> assert_equal false, Time.local(2005,2,10,10,30,45).future? <add> assert_equal true, Time.local(2005,2,10,10,30,46).future? <add> assert_equal false, Time.utc(2005,2,10,15,30,44).future? <add> assert_equal false, Time.utc(2005,2,10,15,30,45).future? <add> assert_equal true, Time.utc(2005,2,10,15,30,46).future? <ide> end <ide> end <ide> <ide><path>activesupport/test/core_ext/time_with_zone_test.rb <ide> def test_between? <ide> assert_equal false, @twz.between?(Time.utc(2000,1,1,0,0,1), Time.utc(2000,1,1,0,0,2)) <ide> end <ide> <del> uses_mocha 'TimeWithZone past?, today? and future?' do <del> def test_today <del> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(1999,12,31,23,59,59) ).today? <del> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,1,0) ).today? <del> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,1,23,59,59) ).today? <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,2,0) ).today? <del> end <del> <del> def test_past_with_time_current_as_time_local <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <del> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).past? <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).past? <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).past? <del> end <del> end <del> <del> def test_past_with_time_current_as_time_with_zone <del> twz = ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45) ) <del> Time.stubs(:current).returns(twz) <add> def test_today <add> Date.stubs(:current).returns(Date.new(2000, 1, 1)) <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(1999,12,31,23,59,59) ).today? <add> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,1,0) ).today? <add> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,1,23,59,59) ).today? <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.utc(2000,1,2,0) ).today? <add> end <add> <add> def test_past_with_time_current_as_time_local <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <ide> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).past? <ide> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).past? <ide> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).past? <ide> end <del> <del> def test_future_with_time_current_as_time_local <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).future? <del> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).future? <del> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).future? <del> end <del> end <del> <del> def future_with_time_current_as_time_with_zone <del> twz = ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45) ) <del> Time.stubs(:current).returns(twz) <add> end <add> <add> def test_past_with_time_current_as_time_with_zone <add> twz = ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45) ) <add> Time.stubs(:current).returns(twz) <add> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).past? <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).past? <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).past? <add> end <add> <add> def test_future_with_time_current_as_time_local <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:current).returns(Time.local(2005,2,10,15,30,45)) <ide> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).future? <ide> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).future? <ide> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).future? <ide> end <ide> end <ide> <add> def future_with_time_current_as_time_with_zone <add> twz = ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45) ) <add> Time.stubs(:current).returns(twz) <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,44)).future? <add> assert_equal false, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,45)).future? <add> assert_equal true, ActiveSupport::TimeWithZone.new( nil, @time_zone, Time.local(2005,2,10,15,30,46)).future? <add> end <add> <ide> def test_eql? <ide> assert @twz.eql?(Time.utc(2000)) <ide> assert @twz.eql?( ActiveSupport::TimeWithZone.new(Time.utc(2000), ActiveSupport::TimeZone["Hawaii"]) ) <ide> def test_freeze_preloads_instance_variables <ide> end <ide> end <ide> <del> uses_mocha 'TestDatePartValueMethods' do <del> def test_method_missing_with_non_time_return_value <del> silence_warnings do # silence warnings raised by tzinfo gem <del> @twz.time.expects(:foo).returns('bar') <del> assert_equal 'bar', @twz.foo <del> end <add> def test_method_missing_with_non_time_return_value <add> silence_warnings do # silence warnings raised by tzinfo gem <add> @twz.time.expects(:foo).returns('bar') <add> assert_equal 'bar', @twz.foo <ide> end <add> end <ide> <del> def test_date_part_value_methods <del> silence_warnings do # silence warnings raised by tzinfo gem <del> twz = ActiveSupport::TimeWithZone.new(Time.utc(1999,12,31,19,18,17,500), @time_zone) <del> twz.expects(:method_missing).never <del> assert_equal 1999, twz.year <del> assert_equal 12, twz.month <del> assert_equal 31, twz.day <del> assert_equal 14, twz.hour <del> assert_equal 18, twz.min <del> assert_equal 17, twz.sec <del> assert_equal 500, twz.usec <del> assert_equal 5, twz.wday <del> assert_equal 365, twz.yday <del> end <add> def test_date_part_value_methods <add> silence_warnings do # silence warnings raised by tzinfo gem <add> twz = ActiveSupport::TimeWithZone.new(Time.utc(1999,12,31,19,18,17,500), @time_zone) <add> twz.expects(:method_missing).never <add> assert_equal 1999, twz.year <add> assert_equal 12, twz.month <add> assert_equal 31, twz.day <add> assert_equal 14, twz.hour <add> assert_equal 18, twz.min <add> assert_equal 17, twz.sec <add> assert_equal 500, twz.usec <add> assert_equal 5, twz.wday <add> assert_equal 365, twz.yday <ide> end <ide> end <ide> <ide> def test_time_zone_setter_with_non_identifying_argument_returns_nil <ide> assert_equal nil, Time.zone <ide> end <ide> <del> uses_mocha 'TestTimeCurrent' do <del> def test_current_returns_time_now_when_zone_default_not_set <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns Time.local(2000) <del> assert_equal false, Time.current.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal Time.local(2000), Time.current <del> end <add> def test_current_returns_time_now_when_zone_default_not_set <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns Time.local(2000) <add> assert_equal false, Time.current.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal Time.local(2000), Time.current <ide> end <add> end <ide> <del> def test_current_returns_time_zone_now_when_zone_default_set <del> silence_warnings do # silence warnings raised by tzinfo gem <del> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns Time.local(2000) <del> assert_equal true, Time.current.is_a?(ActiveSupport::TimeWithZone) <del> assert_equal 'Eastern Time (US & Canada)', Time.current.time_zone.name <del> assert_equal Time.utc(2000), Time.current.time <del> end <add> def test_current_returns_time_zone_now_when_zone_default_set <add> silence_warnings do # silence warnings raised by tzinfo gem <add> Time.zone_default = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns Time.local(2000) <add> assert_equal true, Time.current.is_a?(ActiveSupport::TimeWithZone) <add> assert_equal 'Eastern Time (US & Canada)', Time.current.time_zone.name <add> assert_equal Time.utc(2000), Time.current.time <ide> end <del> ensure <del> Time.zone_default = nil <ide> end <add> ensure <add> Time.zone_default = nil <ide> end <ide> <ide> protected <ide><path>activesupport/test/i18n_test.rb <ide> def setup <ide> @time = Time.utc(2008, 7, 2, 16, 47, 1) <ide> end <ide> <del> uses_mocha 'I18nTimeZoneTest' do <del> def test_time_zone_localization_with_default_format <del> Time.zone.stubs(:now).returns Time.local(2000) <del> assert_equal Time.zone.now.strftime("%a, %d %b %Y %H:%M:%S %z"), I18n.localize(Time.zone.now) <del> end <add> def test_time_zone_localization_with_default_format <add> Time.zone.stubs(:now).returns Time.local(2000) <add> assert_equal Time.zone.now.strftime("%a, %d %b %Y %H:%M:%S %z"), I18n.localize(Time.zone.now) <ide> end <ide> <ide> def test_date_localization_should_use_default_format <ide><path>activesupport/test/json/encoding_test.rb <ide> def with_env_tz(new_tz = 'US/Eastern') <ide> end <ide> end <ide> <del>uses_mocha 'JsonOptionsTests' do <del> class JsonOptionsTests < Test::Unit::TestCase <del> def test_enumerable_should_passthrough_options_to_elements <del> json_options = { :include => :posts } <del> ActiveSupport::JSON.expects(:encode).with(1, json_options) <del> ActiveSupport::JSON.expects(:encode).with(2, json_options) <del> ActiveSupport::JSON.expects(:encode).with('foo', json_options) <del> <del> [1, 2, 'foo'].to_json(json_options) <del> end <add>class JsonOptionsTests < Test::Unit::TestCase <add> def test_enumerable_should_passthrough_options_to_elements <add> json_options = { :include => :posts } <add> ActiveSupport::JSON.expects(:encode).with(1, json_options) <add> ActiveSupport::JSON.expects(:encode).with(2, json_options) <add> ActiveSupport::JSON.expects(:encode).with('foo', json_options) <add> <add> [1, 2, 'foo'].to_json(json_options) <ide> end <ide> end <ide><path>activesupport/test/memoizable_test.rb <ide> require 'abstract_unit' <ide> <del>uses_mocha 'Memoizable' do <del> class MemoizableTest < Test::Unit::TestCase <del> class Person <del> extend ActiveSupport::Memoizable <del> <del> attr_reader :name_calls, :age_calls <del> def initialize <del> @name_calls = 0 <del> @age_calls = 0 <del> end <add>class MemoizableTest < Test::Unit::TestCase <add> class Person <add> extend ActiveSupport::Memoizable <ide> <del> def name <del> @name_calls += 1 <del> "Josh" <del> end <add> attr_reader :name_calls, :age_calls <add> def initialize <add> @name_calls = 0 <add> @age_calls = 0 <add> end <ide> <del> def name? <del> true <del> end <del> memoize :name? <add> def name <add> @name_calls += 1 <add> "Josh" <add> end <ide> <del> def update(name) <del> "Joshua" <del> end <del> memoize :update <add> def name? <add> true <add> end <add> memoize :name? <ide> <del> def age <del> @age_calls += 1 <del> nil <del> end <add> def update(name) <add> "Joshua" <add> end <add> memoize :update <ide> <del> memoize :name, :age <add> def age <add> @age_calls += 1 <add> nil <ide> end <ide> <del> class Company <del> attr_reader :name_calls <del> def initialize <del> @name_calls = 0 <del> end <add> memoize :name, :age <add> end <ide> <del> def name <del> @name_calls += 1 <del> "37signals" <del> end <add> class Company <add> attr_reader :name_calls <add> def initialize <add> @name_calls = 0 <ide> end <ide> <del> module Rates <del> extend ActiveSupport::Memoizable <del> <del> attr_reader :sales_tax_calls <del> def sales_tax(price) <del> @sales_tax_calls ||= 0 <del> @sales_tax_calls += 1 <del> price * 0.1025 <del> end <del> memoize :sales_tax <add> def name <add> @name_calls += 1 <add> "37signals" <ide> end <add> end <ide> <del> class Calculator <del> extend ActiveSupport::Memoizable <del> include Rates <add> module Rates <add> extend ActiveSupport::Memoizable <ide> <del> attr_reader :fib_calls <del> def initialize <del> @fib_calls = 0 <del> end <add> attr_reader :sales_tax_calls <add> def sales_tax(price) <add> @sales_tax_calls ||= 0 <add> @sales_tax_calls += 1 <add> price * 0.1025 <add> end <add> memoize :sales_tax <add> end <ide> <del> def fib(n) <del> @fib_calls += 1 <add> class Calculator <add> extend ActiveSupport::Memoizable <add> include Rates <ide> <del> if n == 0 || n == 1 <del> n <del> else <del> fib(n - 1) + fib(n - 2) <del> end <del> end <del> memoize :fib <add> attr_reader :fib_calls <add> def initialize <add> @fib_calls = 0 <add> end <add> <add> def fib(n) <add> @fib_calls += 1 <ide> <del> def counter <del> @count ||= 0 <del> @count += 1 <add> if n == 0 || n == 1 <add> n <add> else <add> fib(n - 1) + fib(n - 2) <ide> end <del> memoize :counter <ide> end <add> memoize :fib <ide> <del> def setup <del> @person = Person.new <del> @calculator = Calculator.new <add> def counter <add> @count ||= 0 <add> @count += 1 <ide> end <add> memoize :counter <add> end <ide> <del> def test_memoization <del> assert_equal "Josh", @person.name <del> assert_equal 1, @person.name_calls <add> def setup <add> @person = Person.new <add> @calculator = Calculator.new <add> end <ide> <del> 3.times { assert_equal "Josh", @person.name } <del> assert_equal 1, @person.name_calls <del> end <add> def test_memoization <add> assert_equal "Josh", @person.name <add> assert_equal 1, @person.name_calls <ide> <del> def test_memoization_with_punctuation <del> assert_equal true, @person.name? <add> 3.times { assert_equal "Josh", @person.name } <add> assert_equal 1, @person.name_calls <add> end <ide> <del> assert_nothing_raised(NameError) do <del> @person.memoize_all <del> @person.unmemoize_all <del> end <add> def test_memoization_with_punctuation <add> assert_equal true, @person.name? <add> <add> assert_nothing_raised(NameError) do <add> @person.memoize_all <add> @person.unmemoize_all <ide> end <add> end <ide> <del> def test_memoization_with_nil_value <del> assert_equal nil, @person.age <del> assert_equal 1, @person.age_calls <add> def test_memoization_with_nil_value <add> assert_equal nil, @person.age <add> assert_equal 1, @person.age_calls <ide> <del> 3.times { assert_equal nil, @person.age } <del> assert_equal 1, @person.age_calls <del> end <add> 3.times { assert_equal nil, @person.age } <add> assert_equal 1, @person.age_calls <add> end <ide> <del> def test_memorized_results_are_immutable <del> assert_equal "Josh", @person.name <del> assert_raise(ActiveSupport::FrozenObjectError) { @person.name.gsub!("Josh", "Gosh") } <del> end <add> def test_memorized_results_are_immutable <add> assert_equal "Josh", @person.name <add> assert_raise(ActiveSupport::FrozenObjectError) { @person.name.gsub!("Josh", "Gosh") } <add> end <ide> <del> def test_reloadable <del> counter = @calculator.counter <del> assert_equal 1, @calculator.counter <del> assert_equal 2, @calculator.counter(:reload) <del> assert_equal 2, @calculator.counter <del> assert_equal 3, @calculator.counter(true) <del> assert_equal 3, @calculator.counter <del> end <add> def test_reloadable <add> counter = @calculator.counter <add> assert_equal 1, @calculator.counter <add> assert_equal 2, @calculator.counter(:reload) <add> assert_equal 2, @calculator.counter <add> assert_equal 3, @calculator.counter(true) <add> assert_equal 3, @calculator.counter <add> end <ide> <del> def test_unmemoize_all <del> assert_equal 1, @calculator.counter <add> def test_unmemoize_all <add> assert_equal 1, @calculator.counter <ide> <del> assert @calculator.instance_variable_get(:@_memoized_counter).any? <del> @calculator.unmemoize_all <del> assert @calculator.instance_variable_get(:@_memoized_counter).empty? <add> assert @calculator.instance_variable_get(:@_memoized_counter).any? <add> @calculator.unmemoize_all <add> assert @calculator.instance_variable_get(:@_memoized_counter).empty? <ide> <del> assert_equal 2, @calculator.counter <del> end <add> assert_equal 2, @calculator.counter <add> end <ide> <del> def test_memoize_all <del> @calculator.memoize_all <del> assert @calculator.instance_variable_defined?(:@_memoized_counter) <del> end <add> def test_memoize_all <add> @calculator.memoize_all <add> assert @calculator.instance_variable_defined?(:@_memoized_counter) <add> end <ide> <del> def test_memoization_cache_is_different_for_each_instance <del> assert_equal 1, @calculator.counter <del> assert_equal 2, @calculator.counter(:reload) <del> assert_equal 1, Calculator.new.counter <del> end <add> def test_memoization_cache_is_different_for_each_instance <add> assert_equal 1, @calculator.counter <add> assert_equal 2, @calculator.counter(:reload) <add> assert_equal 1, Calculator.new.counter <add> end <ide> <del> def test_memoized_is_not_affected_by_freeze <del> @person.freeze <del> assert_equal "Josh", @person.name <del> assert_equal "Joshua", @person.update("Joshua") <del> end <add> def test_memoized_is_not_affected_by_freeze <add> @person.freeze <add> assert_equal "Josh", @person.name <add> assert_equal "Joshua", @person.update("Joshua") <add> end <ide> <del> def test_memoization_with_args <del> assert_equal 55, @calculator.fib(10) <del> assert_equal 11, @calculator.fib_calls <del> end <add> def test_memoization_with_args <add> assert_equal 55, @calculator.fib(10) <add> assert_equal 11, @calculator.fib_calls <add> end <ide> <del> def test_reloadable_with_args <del> assert_equal 55, @calculator.fib(10) <del> assert_equal 11, @calculator.fib_calls <del> assert_equal 55, @calculator.fib(10, :reload) <del> assert_equal 12, @calculator.fib_calls <del> assert_equal 55, @calculator.fib(10, true) <del> assert_equal 13, @calculator.fib_calls <del> end <add> def test_reloadable_with_args <add> assert_equal 55, @calculator.fib(10) <add> assert_equal 11, @calculator.fib_calls <add> assert_equal 55, @calculator.fib(10, :reload) <add> assert_equal 12, @calculator.fib_calls <add> assert_equal 55, @calculator.fib(10, true) <add> assert_equal 13, @calculator.fib_calls <add> end <ide> <del> def test_object_memoization <del> [Company.new, Company.new, Company.new].each do |company| <del> company.extend ActiveSupport::Memoizable <del> company.memoize :name <add> def test_object_memoization <add> [Company.new, Company.new, Company.new].each do |company| <add> company.extend ActiveSupport::Memoizable <add> company.memoize :name <ide> <del> assert_equal "37signals", company.name <del> assert_equal 1, company.name_calls <del> assert_equal "37signals", company.name <del> assert_equal 1, company.name_calls <del> end <add> assert_equal "37signals", company.name <add> assert_equal 1, company.name_calls <add> assert_equal "37signals", company.name <add> assert_equal 1, company.name_calls <ide> end <add> end <ide> <del> def test_memoized_module_methods <del> assert_equal 1.025, @calculator.sales_tax(10) <del> assert_equal 1, @calculator.sales_tax_calls <del> assert_equal 1.025, @calculator.sales_tax(10) <del> assert_equal 1, @calculator.sales_tax_calls <del> assert_equal 2.5625, @calculator.sales_tax(25) <del> assert_equal 2, @calculator.sales_tax_calls <del> end <add> def test_memoized_module_methods <add> assert_equal 1.025, @calculator.sales_tax(10) <add> assert_equal 1, @calculator.sales_tax_calls <add> assert_equal 1.025, @calculator.sales_tax(10) <add> assert_equal 1, @calculator.sales_tax_calls <add> assert_equal 2.5625, @calculator.sales_tax(25) <add> assert_equal 2, @calculator.sales_tax_calls <add> end <ide> <del> def test_object_memoized_module_methods <del> company = Company.new <del> company.extend(Rates) <add> def test_object_memoized_module_methods <add> company = Company.new <add> company.extend(Rates) <ide> <del> assert_equal 1.025, company.sales_tax(10) <del> assert_equal 1, company.sales_tax_calls <del> assert_equal 1.025, company.sales_tax(10) <del> assert_equal 1, company.sales_tax_calls <del> assert_equal 2.5625, company.sales_tax(25) <del> assert_equal 2, company.sales_tax_calls <del> end <add> assert_equal 1.025, company.sales_tax(10) <add> assert_equal 1, company.sales_tax_calls <add> assert_equal 1.025, company.sales_tax(10) <add> assert_equal 1, company.sales_tax_calls <add> assert_equal 2.5625, company.sales_tax(25) <add> assert_equal 2, company.sales_tax_calls <add> end <ide> <del> def test_double_memoization <del> assert_raise(RuntimeError) { Person.memoize :name } <del> person = Person.new <del> person.extend ActiveSupport::Memoizable <del> assert_raise(RuntimeError) { person.memoize :name } <add> def test_double_memoization <add> assert_raise(RuntimeError) { Person.memoize :name } <add> person = Person.new <add> person.extend ActiveSupport::Memoizable <add> assert_raise(RuntimeError) { person.memoize :name } <ide> <del> company = Company.new <del> company.extend ActiveSupport::Memoizable <del> company.memoize :name <del> assert_raise(RuntimeError) { company.memoize :name } <del> end <add> company = Company.new <add> company.extend ActiveSupport::Memoizable <add> company.memoize :name <add> assert_raise(RuntimeError) { company.memoize :name } <ide> end <ide> end <ide><path>activesupport/test/multibyte_unicode_database_test.rb <ide> # encoding: utf-8 <del> <ide> require 'abstract_unit' <ide> <del>uses_mocha "MultibyteUnicodeDatabaseTest" do <del> <ide> class MultibyteUnicodeDatabaseTest < Test::Unit::TestCase <del> <ide> def setup <ide> @ucd = ActiveSupport::Multibyte::UnicodeDatabase.new <ide> end <ide> def test_load <ide> end <ide> end <ide> end <del> <del>end <ide>\ No newline at end of file <ide><path>activesupport/test/time_zone_test.rb <ide> def test_from_duration_to_map <ide> end <ide> end <ide> <del> uses_mocha 'TestTimeZoneNowAndToday' do <del> def test_now <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns(Time.local(2000)) <del> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> assert_instance_of ActiveSupport::TimeWithZone, zone.now <del> assert_equal Time.utc(2000,1,1,5), zone.now.utc <del> assert_equal Time.utc(2000), zone.now.time <del> assert_equal zone, zone.now.time_zone <del> end <add> def test_now <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns(Time.local(2000)) <add> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> assert_instance_of ActiveSupport::TimeWithZone, zone.now <add> assert_equal Time.utc(2000,1,1,5), zone.now.utc <add> assert_equal Time.utc(2000), zone.now.time <add> assert_equal zone, zone.now.time_zone <ide> end <add> end <ide> <del> def test_now_enforces_spring_dst_rules <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns(Time.local(2006,4,2,2)) # 2AM springs forward to 3AM <del> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> assert_equal Time.utc(2006,4,2,3), zone.now.time <del> assert_equal true, zone.now.dst? <del> end <add> def test_now_enforces_spring_dst_rules <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns(Time.local(2006,4,2,2)) # 2AM springs forward to 3AM <add> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> assert_equal Time.utc(2006,4,2,3), zone.now.time <add> assert_equal true, zone.now.dst? <ide> end <add> end <ide> <del> def test_now_enforces_fall_dst_rules <del> with_env_tz 'US/Eastern' do <del> Time.stubs(:now).returns(Time.at(1162098000)) # equivalent to 1AM DST <del> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> assert_equal Time.utc(2006,10,29,1), zone.now.time <del> assert_equal true, zone.now.dst? <del> end <add> def test_now_enforces_fall_dst_rules <add> with_env_tz 'US/Eastern' do <add> Time.stubs(:now).returns(Time.at(1162098000)) # equivalent to 1AM DST <add> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> assert_equal Time.utc(2006,10,29,1), zone.now.time <add> assert_equal true, zone.now.dst? <ide> end <add> end <ide> <del> def test_today <del> Time.stubs(:now).returns(Time.utc(2000, 1, 1, 4, 59, 59)) # 1 sec before midnight Jan 1 EST <del> assert_equal Date.new(1999, 12, 31), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <del> Time.stubs(:now).returns(Time.utc(2000, 1, 1, 5)) # midnight Jan 1 EST <del> assert_equal Date.new(2000, 1, 1), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <del> Time.stubs(:now).returns(Time.utc(2000, 1, 2, 4, 59, 59)) # 1 sec before midnight Jan 2 EST <del> assert_equal Date.new(2000, 1, 1), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <del> Time.stubs(:now).returns(Time.utc(2000, 1, 2, 5)) # midnight Jan 2 EST <del> assert_equal Date.new(2000, 1, 2), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <del> end <add> def test_today <add> Time.stubs(:now).returns(Time.utc(2000, 1, 1, 4, 59, 59)) # 1 sec before midnight Jan 1 EST <add> assert_equal Date.new(1999, 12, 31), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <add> Time.stubs(:now).returns(Time.utc(2000, 1, 1, 5)) # midnight Jan 1 EST <add> assert_equal Date.new(2000, 1, 1), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <add> Time.stubs(:now).returns(Time.utc(2000, 1, 2, 4, 59, 59)) # 1 sec before midnight Jan 2 EST <add> assert_equal Date.new(2000, 1, 1), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <add> Time.stubs(:now).returns(Time.utc(2000, 1, 2, 5)) # midnight Jan 2 EST <add> assert_equal Date.new(2000, 1, 2), ActiveSupport::TimeZone['Eastern Time (US & Canada)'].today <ide> end <ide> <ide> def test_local <ide> def test_parse_returns_nil_when_string_without_date_information_is_passed_in <ide> end <ide> end <ide> <del> uses_mocha 'TestParseWithIncompleteDate' do <del> def test_parse_with_incomplete_date <del> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <del> zone.stubs(:now).returns zone.local(1999,12,31) <del> twz = zone.parse('19:00:00') <del> assert_equal Time.utc(1999,12,31,19), twz.time <del> end <add> def test_parse_with_incomplete_date <add> zone = ActiveSupport::TimeZone['Eastern Time (US & Canada)'] <add> zone.stubs(:now).returns zone.local(1999,12,31) <add> twz = zone.parse('19:00:00') <add> assert_equal Time.utc(1999,12,31,19), twz.time <ide> end <ide> <ide> def test_utc_offset_lazy_loaded_from_tzinfo_when_not_passed_in_to_initialize
14
Javascript
Javascript
replace flushdiscreteupdates with flushsync
32eefcb3c5131f4d77a2195ff11a00a2513cf62f
<ide><path>packages/react-dom/src/__tests__/ReactDOMFiber-test.js <ide> describe('ReactDOMFiber', () => { <ide> expect(ops).toEqual(['A']); <ide> <ide> if (__DEV__) { <del> const errorCalls = console.error.calls.count(); <add> expect(console.error.calls.count()).toBe(2); <ide> expect(console.error.calls.argsFor(0)[0]).toMatch( <ide> 'ReactDOM.render is no longer supported in React 18', <ide> ); <ide> expect(console.error.calls.argsFor(1)[0]).toMatch( <ide> 'ReactDOM.render is no longer supported in React 18', <ide> ); <del> // TODO: this warning shouldn't be firing in the first place if user didn't call it. <del> for (let i = 2; i < errorCalls; i++) { <del> expect(console.error.calls.argsFor(i)[0]).toMatch( <del> 'unstable_flushDiscreteUpdates: Cannot flush updates when React is already rendering.', <del> ); <del> } <ide> } <ide> }); <ide> <ide><path>packages/react-dom/src/client/ReactDOM.js <ide> import {createEventHandle} from './ReactDOMEventHandle'; <ide> import { <ide> batchedUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <ide> flushSync, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> flushControlled, <ide> injectIntoDevTools, <ide> attemptSynchronousHydration, <ide> setRestoreImplementation(restoreControlledState); <ide> setBatchingImplementation( <ide> batchedUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> ); <ide> <ide> function createPortal( <ide><path>packages/react-dom/src/events/ReactDOMUpdateBatching.js <ide> let batchedUpdatesImpl = function(fn, bookkeeping) { <ide> let discreteUpdatesImpl = function(fn, a, b, c, d) { <ide> return fn(a, b, c, d); <ide> }; <del>let flushDiscreteUpdatesImpl = function() {}; <add>let flushSyncImpl = function() {}; <ide> <ide> let isInsideEventHandler = false; <ide> <ide> function finishEventHandler() { <ide> // bails out of the update without touching the DOM. <ide> // TODO: Restore state in the microtask, after the discrete updates flush, <ide> // instead of early flushing them here. <del> flushDiscreteUpdatesImpl(); <add> flushSyncImpl(); <ide> restoreStateIfNeeded(); <ide> } <ide> } <ide> export function discreteUpdates(fn, a, b, c, d) { <ide> export function setBatchingImplementation( <ide> _batchedUpdatesImpl, <ide> _discreteUpdatesImpl, <del> _flushDiscreteUpdatesImpl, <add> _flushSyncImpl, <ide> ) { <ide> batchedUpdatesImpl = _batchedUpdatesImpl; <ide> discreteUpdatesImpl = _discreteUpdatesImpl; <del> flushDiscreteUpdatesImpl = _flushDiscreteUpdatesImpl; <add> flushSyncImpl = _flushSyncImpl; <ide> } <ide><path>packages/react-noop-renderer/src/ReactNoop.js <ide> export const { <ide> unbatchedUpdates, <ide> discreteUpdates, <ide> idleUpdates, <del> flushDiscreteUpdates, <ide> flushSync, <ide> flushPassiveEffects, <ide> act, <ide><path>packages/react-noop-renderer/src/createReactNoop.js <ide> function createReactNoop(reconciler: Function, useMutation: boolean) { <ide> } <ide> }, <ide> <del> flushDiscreteUpdates: NoopRenderer.flushDiscreteUpdates, <del> <ide> flushSync(fn: () => mixed) { <ide> NoopRenderer.flushSync(fn); <ide> }, <ide><path>packages/react-reconciler/src/ReactFiberReconciler.js <ide> import { <ide> unbatchedUpdates as unbatchedUpdates_old, <ide> deferredUpdates as deferredUpdates_old, <ide> discreteUpdates as discreteUpdates_old, <del> flushDiscreteUpdates as flushDiscreteUpdates_old, <ide> flushControlled as flushControlled_old, <ide> flushSync as flushSync_old, <add> flushSyncWithoutWarningIfAlreadyRendering as flushSyncWithoutWarningIfAlreadyRendering_old, <ide> flushPassiveEffects as flushPassiveEffects_old, <ide> getPublicRootInstance as getPublicRootInstance_old, <ide> attemptSynchronousHydration as attemptSynchronousHydration_old, <ide> import { <ide> unbatchedUpdates as unbatchedUpdates_new, <ide> deferredUpdates as deferredUpdates_new, <ide> discreteUpdates as discreteUpdates_new, <del> flushDiscreteUpdates as flushDiscreteUpdates_new, <ide> flushControlled as flushControlled_new, <ide> flushSync as flushSync_new, <add> flushSyncWithoutWarningIfAlreadyRendering as flushSyncWithoutWarningIfAlreadyRendering_new, <ide> flushPassiveEffects as flushPassiveEffects_new, <ide> getPublicRootInstance as getPublicRootInstance_new, <ide> attemptSynchronousHydration as attemptSynchronousHydration_new, <ide> export const deferredUpdates = enableNewReconciler <ide> export const discreteUpdates = enableNewReconciler <ide> ? discreteUpdates_new <ide> : discreteUpdates_old; <del>export const flushDiscreteUpdates = enableNewReconciler <del> ? flushDiscreteUpdates_new <del> : flushDiscreteUpdates_old; <ide> export const flushControlled = enableNewReconciler <ide> ? flushControlled_new <ide> : flushControlled_old; <ide> export const flushSync = enableNewReconciler ? flushSync_new : flushSync_old; <add>export const flushSyncWithoutWarningIfAlreadyRendering = enableNewReconciler <add> ? flushSyncWithoutWarningIfAlreadyRendering_new <add> : flushSyncWithoutWarningIfAlreadyRendering_old; <ide> export const flushPassiveEffects = enableNewReconciler <ide> ? flushPassiveEffects_new <ide> : flushPassiveEffects_old; <ide><path>packages/react-reconciler/src/ReactFiberReconciler.new.js <ide> import { <ide> flushControlled, <ide> deferredUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> flushPassiveEffects, <ide> } from './ReactFiberWorkLoop.new'; <ide> import { <ide> export { <ide> unbatchedUpdates, <ide> deferredUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <ide> flushControlled, <ide> flushSync, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> flushPassiveEffects, <ide> }; <ide> <ide><path>packages/react-reconciler/src/ReactFiberReconciler.old.js <ide> import { <ide> flushControlled, <ide> deferredUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> flushPassiveEffects, <ide> } from './ReactFiberWorkLoop.old'; <ide> import { <ide> export { <ide> unbatchedUpdates, <ide> deferredUpdates, <ide> discreteUpdates, <del> flushDiscreteUpdates, <ide> flushControlled, <ide> flushSync, <add> flushSyncWithoutWarningIfAlreadyRendering, <ide> flushPassiveEffects, <ide> }; <ide> <ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.new.js <ide> export function getExecutionContext(): ExecutionContext { <ide> return executionContext; <ide> } <ide> <del>export function flushDiscreteUpdates() { <del> // TODO: Should be able to flush inside batchedUpdates, but not inside `act`. <del> // However, `act` uses `batchedUpdates`, so there's no way to distinguish <del> // those two cases. Need to fix this before exposing flushDiscreteUpdates <del> // as a public API. <del> if ( <del> (executionContext & (BatchedContext | RenderContext | CommitContext)) !== <del> NoContext <del> ) { <del> if (__DEV__) { <del> if ((executionContext & RenderContext) !== NoContext) { <del> console.error( <del> 'unstable_flushDiscreteUpdates: Cannot flush updates when React is ' + <del> 'already rendering.', <del> ); <del> } <del> } <del> // We're already rendering, so we can't synchronously flush pending work. <del> // This is probably a nested event dispatch triggered by a lifecycle/effect, <del> // like `el.focus()`. Exit. <del> return; <del> } <del> flushSyncCallbacks(); <del> // If the discrete updates scheduled passive effects, flush them now so that <del> // they fire before the next serial event. <del> flushPassiveEffects(); <del>} <del> <ide> export function deferredUpdates<A>(fn: () => A): A { <ide> const previousPriority = getCurrentUpdatePriority(); <ide> const prevTransition = ReactCurrentBatchConfig.transition; <ide> export function unbatchedUpdates<A, R>(fn: (a: A) => R, a: A): R { <ide> } <ide> } <ide> <del>export function flushSync<A, R>(fn: A => R, a: A): R { <add>export function flushSyncWithoutWarningIfAlreadyRendering<A, R>( <add> fn: A => R, <add> a: A, <add>): R { <ide> const prevExecutionContext = executionContext; <ide> executionContext |= BatchedContext; <ide> <ide> export function flushSync<A, R>(fn: A => R, a: A): R { <ide> // the stack. <ide> if ((executionContext & (RenderContext | CommitContext)) === NoContext) { <ide> flushSyncCallbacks(); <del> } else { <del> if (__DEV__) { <del> console.error( <del> 'flushSync was called from inside a lifecycle method. React cannot ' + <del> 'flush when React is already rendering. Consider moving this call to ' + <del> 'a scheduler task or micro task.', <del> ); <del> } <ide> } <ide> } <ide> } <ide> <add>export function flushSync<A, R>(fn: A => R, a: A): R { <add> if (__DEV__) { <add> if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { <add> console.error( <add> 'flushSync was called from inside a lifecycle method. React cannot ' + <add> 'flush when React is already rendering. Consider moving this call to ' + <add> 'a scheduler task or micro task.', <add> ); <add> } <add> } <add> return flushSyncWithoutWarningIfAlreadyRendering(fn, a); <add>} <add> <ide> export function flushControlled(fn: () => mixed): void { <ide> const prevExecutionContext = executionContext; <ide> executionContext |= BatchedContext; <ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.old.js <ide> export function getExecutionContext(): ExecutionContext { <ide> return executionContext; <ide> } <ide> <del>export function flushDiscreteUpdates() { <del> // TODO: Should be able to flush inside batchedUpdates, but not inside `act`. <del> // However, `act` uses `batchedUpdates`, so there's no way to distinguish <del> // those two cases. Need to fix this before exposing flushDiscreteUpdates <del> // as a public API. <del> if ( <del> (executionContext & (BatchedContext | RenderContext | CommitContext)) !== <del> NoContext <del> ) { <del> if (__DEV__) { <del> if ((executionContext & RenderContext) !== NoContext) { <del> console.error( <del> 'unstable_flushDiscreteUpdates: Cannot flush updates when React is ' + <del> 'already rendering.', <del> ); <del> } <del> } <del> // We're already rendering, so we can't synchronously flush pending work. <del> // This is probably a nested event dispatch triggered by a lifecycle/effect, <del> // like `el.focus()`. Exit. <del> return; <del> } <del> flushSyncCallbacks(); <del> // If the discrete updates scheduled passive effects, flush them now so that <del> // they fire before the next serial event. <del> flushPassiveEffects(); <del>} <del> <ide> export function deferredUpdates<A>(fn: () => A): A { <ide> const previousPriority = getCurrentUpdatePriority(); <ide> const prevTransition = ReactCurrentBatchConfig.transition; <ide> export function unbatchedUpdates<A, R>(fn: (a: A) => R, a: A): R { <ide> } <ide> } <ide> <del>export function flushSync<A, R>(fn: A => R, a: A): R { <add>export function flushSyncWithoutWarningIfAlreadyRendering<A, R>( <add> fn: A => R, <add> a: A, <add>): R { <ide> const prevExecutionContext = executionContext; <ide> executionContext |= BatchedContext; <ide> <ide> export function flushSync<A, R>(fn: A => R, a: A): R { <ide> // the stack. <ide> if ((executionContext & (RenderContext | CommitContext)) === NoContext) { <ide> flushSyncCallbacks(); <del> } else { <del> if (__DEV__) { <del> console.error( <del> 'flushSync was called from inside a lifecycle method. React cannot ' + <del> 'flush when React is already rendering. Consider moving this call to ' + <del> 'a scheduler task or micro task.', <del> ); <del> } <ide> } <ide> } <ide> } <ide> <add>export function flushSync<A, R>(fn: A => R, a: A): R { <add> if (__DEV__) { <add> if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { <add> console.error( <add> 'flushSync was called from inside a lifecycle method. React cannot ' + <add> 'flush when React is already rendering. Consider moving this call to ' + <add> 'a scheduler task or micro task.', <add> ); <add> } <add> } <add> return flushSyncWithoutWarningIfAlreadyRendering(fn, a); <add>} <add> <ide> export function flushControlled(fn: () => mixed): void { <ide> const prevExecutionContext = executionContext; <ide> executionContext |= BatchedContext; <ide><path>packages/react-reconciler/src/__tests__/ReactFlushSync-test.js <ide> describe('ReactFlushSync', () => { <ide> // Effect flushes after paint. <ide> expect(Scheduler).toHaveYielded(['Effect']); <ide> }); <add> <add> test('does not flush pending passive effects', async () => { <add> function App() { <add> useEffect(() => { <add> Scheduler.unstable_yieldValue('Effect'); <add> }, []); <add> return <Text text="Child" />; <add> } <add> <add> const root = ReactNoop.createRoot(); <add> await act(async () => { <add> root.render(<App />); <add> expect(Scheduler).toFlushUntilNextPaint(['Child']); <add> expect(root).toMatchRenderedOutput('Child'); <add> <add> // Passive effects are pending. Calling flushSync should not affect them. <add> ReactNoop.flushSync(); <add> // Effects still haven't fired. <add> expect(Scheduler).toHaveYielded([]); <add> }); <add> // Now the effects have fired. <add> expect(Scheduler).toHaveYielded(['Effect']); <add> }); <ide> });
11
Text
Text
replace function with arrow function in vm.md
c9da77405133a2934fdb19c349cc3ed2a6bc1a33
<ide><path>doc/api/vm.md <ide> to the `http` module passed to it. For instance: <ide> const vm = require('vm'); <ide> <ide> const code = ` <del>(function(require) { <add>((require) => { <ide> const http = require('http'); <ide> <ide> http.createServer((request, response) => {
1
Ruby
Ruby
fix defaults for database configs
5b9e96d38f5f83097f0cc46a45f53d90e83d3a0b
<ide><path>activerecord/lib/active_record/database_configurations/hash_config.rb <ide> def database <ide> end <ide> <ide> def pool <del> configuration_hash.fetch(:pool, 5).to_i <add> (configuration_hash[:pool] || 5).to_i <ide> end <ide> <ide> def checkout_timeout <del> configuration_hash.fetch(:checkout_timeout, 5).to_f <add> (configuration_hash[:checkout_timeout] || 5).to_f <ide> end <ide> <ide> # +reaping_frequency+ is configurable mostly for historical reasons, but it could <ide> # also be useful if someone wants a very low +idle_timeout+. <ide> def reaping_frequency <del> configuration_hash.fetch(:reaping_frequency, 60).to_f <add> configuration_hash.fetch(:reaping_frequency, 60)&.to_f <ide> end <ide> <ide> def idle_timeout <ide><path>activerecord/test/cases/database_configurations/hash_config_test.rb <add># frozen_string_literal: true <add> <add>require "cases/helper" <add> <add>module ActiveRecord <add> class DatabaseConfigurations <add> class HashConfigTest < ActiveRecord::TestCase <add> def test_pool_default_when_nil <add> config = HashConfig.new("default_env", "primary", pool: nil) <add> assert_equal 5, config.pool <add> end <add> <add> def test_pool_overrides_with_value <add> config = HashConfig.new("default_env", "primary", pool: "0") <add> assert_equal 0, config.pool <add> end <add> <add> def test_when_no_pool_uses_default <add> config = HashConfig.new("default_env", "primary", {}) <add> assert_equal 5, config.pool <add> end <add> <add> def test_checkout_timeout_default_when_nil <add> config = HashConfig.new("default_env", "primary", checkout_timeout: nil) <add> assert_equal 5.0, config.checkout_timeout <add> end <add> <add> def test_checkout_timeout_overrides_with_value <add> config = HashConfig.new("default_env", "primary", checkout_timeout: "0") <add> assert_equal 0.0, config.checkout_timeout <add> end <add> <add> def test_when_no_checkout_timeout_uses_default <add> config = HashConfig.new("default_env", "primary", {}) <add> assert_equal 5.0, config.checkout_timeout <add> end <add> <add> def test_reaping_frequency_default_when_nil <add> config = HashConfig.new("default_env", "primary", reaping_frequency: nil) <add> assert_nil config.reaping_frequency <add> end <add> <add> def test_reaping_frequency_overrides_with_value <add> config = HashConfig.new("default_env", "primary", reaping_frequency: "0") <add> assert_equal 0.0, config.reaping_frequency <add> end <add> <add> def test_when_no_reaping_frequency_uses_default <add> config = HashConfig.new("default_env", "primary", {}) <add> assert_equal 60.0, config.reaping_frequency <add> end <add> <add> def test_idle_timeout_default_when_nil <add> config = HashConfig.new("default_env", "primary", idle_timeout: nil) <add> assert_nil config.idle_timeout <add> end <add> <add> def test_idle_timeout_overrides_with_value <add> config = HashConfig.new("default_env", "primary", idle_timeout: "1") <add> assert_equal 1.0, config.idle_timeout <add> end <add> <add> def test_when_no_idle_timeout_uses_default <add> config = HashConfig.new("default_env", "primary", {}) <add> assert_equal 300.0, config.idle_timeout <add> end <add> <add> def test_idle_timeout_nil_when_less_than_or_equal_to_zero <add> config = HashConfig.new("default_env", "primary", idle_timeout: "0") <add> assert_nil config.idle_timeout <add> end <add> end <add> end <add>end
2
Javascript
Javascript
remove json check for jscs config
1514dca501fd383a9cdb6b42cce7c71fdad1ee38
<ide><path>Gruntfile.js <ide> module.exports = function( grunt ) { <ide> src: [ "package.json" ] <ide> }, <ide> <del> jscs: { <del> src: [ ".jscs.json" ] <del> }, <del> <ide> bower: { <ide> src: [ "bower.json" ] <ide> }
1
Python
Python
codebase improvements on fileuploadparser
e36e4f48ad481b4303e68ed524677add07b224f7
<ide><path>rest_framework/parsers.py <ide> class FileUploadParser(BaseParser): <ide> media_type = '*/*' <ide> <ide> def parse(self, stream, media_type=None, parser_context=None): <add> """ <add> Returns a DataAndFiles object. <add> <add> `.data` will be None (we expect request body to be a file content). <add> `.files` will be a `QueryDict` containing one 'file' elemnt - a parsed file. <add> """ <add> <ide> parser_context = parser_context or {} <ide> request = parser_context['request'] <ide> encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) <ide> meta = request.META <del> <del> try: <del> disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION']) <del> filename = disposition[1]['filename'] <del> except KeyError: <del> filename = None <add> upload_handlers = request.upload_handlers <add> filename = self.get_filename(stream, media_type, parser_context) <ide> <ide> content_type = meta.get('HTTP_CONTENT_TYPE', meta.get('CONTENT_TYPE', '')) <ide> try: <ide> def parse(self, stream, media_type=None, parser_context=None): <ide> content_length = None <ide> <ide> # See if the handler will want to take care of the parsing. <del> for handler in request.upload_handlers: <add> for handler in upload_handlers: <ide> result = handler.handle_raw_input(None, <ide> meta, <ide> content_length, <ide> None, <ide> encoding) <ide> if result is not None: <del> return DataAndFiles(result[0], {'file': result[1]}) <add> return DataAndFiles(None, {'file': result[1]}) <ide> <del> possible_sizes = [x.chunk_size for x in request.upload_handlers if x.chunk_size] <add> possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size] <ide> chunk_size = min([2**31-4] + possible_sizes) <ide> chunks = ChunkIter(stream, chunk_size) <del> counters = [0] * len(request.upload_handlers) <add> counters = [0] * len(upload_handlers) <ide> <del> for handler in request.upload_handlers: <add> for handler in upload_handlers: <ide> try: <ide> handler.new_file(None, filename, content_type, content_length, encoding) <ide> except StopFutureHandlers: <ide> break <ide> <ide> for chunk in chunks: <del> for i, handler in enumerate(request.upload_handlers): <add> for i, handler in enumerate(upload_handlers): <ide> chunk_length = len(chunk) <ide> chunk = handler.receive_data_chunk(chunk, counters[i]) <ide> counters[i] += chunk_length <ide> if chunk is None: <ide> # If the chunk received by the handler is None, then don't continue. <ide> break <ide> <del> for i, handler in enumerate(request.upload_handlers): <add> for i, handler in enumerate(upload_handlers): <ide> file_obj = handler.file_complete(counters[i]) <ide> if file_obj: <ide> return DataAndFiles(None, {'file': file_obj}) <add> <add> def get_filename(self, stream, media_type, parser_context): <add> """ <add> Detects the uploaded file name. First searches a 'filename' url kwarg. <add> Then tries to parse Content-Disposition header. <add> """ <add> try: <add> return parser_context['kwargs']['filename'] <add> except KeyError: <add> pass <add> try: <add> meta = parser_context['request'].META <add> disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION']) <add> return disposition[1]['filename'] <add> except (AttributeError, KeyError): <add> pass <ide><path>rest_framework/tests/parsers.py <ide> class MockRequest(object): <ide> 'HTTP_CONTENT_DISPOSITION': 'Content-Disposition: inline; filename=file.txt'.encode('utf-8'), <ide> 'HTTP_CONTENT_LENGTH': 14, <ide> } <del> self.parser_context = {'request': request} <add> self.parser_context = {'request': request, 'kwargs': {}} <ide> <ide> def test_parse(self): <ide> """ Make sure the `QueryDict` works OK """ <ide> parser = FileUploadParser() <del> data_and_files = parser.parse(self.stream, parser_context=self.parser_context) <add> self.stream.seek(0) <add> data_and_files = parser.parse(self.stream, None, self.parser_context) <ide> file_obj = data_and_files.files['file'] <ide> self.assertEqual(file_obj._size, 14) <add> <add> def test_get_filename(self): <add> parser = FileUploadParser() <add> filename = parser.get_filename(self.stream, None, self.parser_context) <add> self.assertEqual(filename, 'file.txt'.encode('utf-8'))
2
Text
Text
add a breakathon for testing
6fc83eefd9e8d78044a51250d2ad185513fddd27
<ide><path>hack/RELEASE-CHECKLIST.md <ide> docker run \ <ide> hack/release.sh <ide> ``` <ide> <del>### 9. Apply tag <add>### 9. Breakathon <add> <add>Spend several days along with the community explicitly investing time and <add>resources to try and break Docker in every possible way, documenting any <add>findings pertinent to the release. This time should be spent testing and <add>finding ways in which the release might have caused various features or upgrade <add>environments to have issues, not coding. During this time, the release is in <add>code freeze, and any additional code changes will be pushed out to the next <add>release. <add> <add>It should include various levels of breaking Docker, beyond just using Docker <add>by the book. <add> <add>Any issues found may still remain issues for this release, but they should be <add>documented and give appropriate warnings. <add> <add>### 10. Apply tag <ide> <ide> ```bash <ide> git tag -a $VERSION -m $VERSION bump_$VERSION <ide> git push origin $VERSION <ide> It's very important that we don't make the tag until after the official <ide> release is uploaded to get.docker.io! <ide> <del>### 10. Go to github to merge the `bump_$VERSION` branch into release <add>### 11. Go to github to merge the `bump_$VERSION` branch into release <ide> <ide> Don't forget to push that pretty blue button to delete the leftover <ide> branch afterwards! <ide> <del>### 11. Update the docs branch <add>### 12. Update the docs branch <ide> <ide> ```bash <ide> git checkout docs <ide> Updating the docs branch will automatically update the documentation on the <ide> after the merge. The docs will appear on http://docs.docker.io/. For more <ide> information about documentation releases, see `docs/README.md`. <ide> <del>### 12. Create a new pull request to merge release back into master <add>### 13. Create a new pull request to merge release back into master <ide> <ide> ```bash <ide> git checkout master <ide> echo "https://github.com/dotcloud/docker/compare/master...merge_release_$VERSION <ide> Again, get two maintainers to validate, then merge, then push that pretty <ide> blue button to delete your branch. <ide> <del>### 13. Rejoice and Evangelize! <add>### 14. Rejoice and Evangelize! <ide> <ide> Congratulations! You're done. <ide>
1
PHP
PHP
fix tmpdir path
1f47e127ab79bad66372a0f806af4dab3b1b203a
<ide><path>src/Log/Engine/FileLog.php <ide> public function __construct(array $config = []) <ide> { <ide> parent::__construct($config); <ide> <del> $this->_path = $this->getConfig('path', sys_get_temp_dir()); <add> $this->_path = $this->getConfig('path', sys_get_temp_dir() . DIRECTORY_SEPARATOR); <ide> if (Configure::read('debug') && !is_dir($this->_path)) { <ide> mkdir($this->_path, 0775, true); <ide> }
1
Javascript
Javascript
get remainder of query_params_test.js passing
97c10391c8d3a98c5ffba3a940f2b3dcf73485e2
<ide><path>packages/ember-htmlbars/tests/system/make_view_helper_test.js <ide> import makeViewHelper from "ember-htmlbars/system/make-view-helper"; <ide> <ide> QUnit.module("ember-htmlbars: makeViewHelper"); <ide> <add>// note: fixing this probably means breaking link-to component, which accepts params <ide> QUnit.skip("makes helpful assertion when called with invalid arguments", function() { <ide> var viewClass = { toString() { return 'Some Random Class'; } }; <ide> <ide><path>packages/ember-routing-htmlbars/lib/helpers/link-to.js <ide> import 'ember-htmlbars'; <ide> @return {String} HTML string <ide> @see {Ember.LinkView} <ide> */ <add>// this has been replaced by link-to component <ide> function linkToHelper(params, hash) { <ide> // TODO: Implement more than just stub functionality here <ide> this.yieldIn(linkToTemplate, { href: "#", classes: hash.class }); <ide><path>packages/ember-routing-views/lib/views/link.js <ide> var LinkComponent = EmberComponent.extend({ <ide> } <ide> } <ide> <del> if (this.bubbles === false) { event.stopPropagation(); } <add> if (this.attrs.bubbles === false) { event.stopPropagation(); } <ide> <ide> if (get(this, '_isDisabled')) { return false; } <ide> <ide> var LinkComponent = EmberComponent.extend({ <ide> return false; <ide> } <ide> <del> get(this, '_routing').transitionTo(get(this, 'targetRouteName'), get(this, 'models'), get(this, 'queryParams'), get(this, 'attrs.replace')); <add> get(this, '_routing').transitionTo(get(this, 'targetRouteName'), get(this, 'models'), get(this, 'queryParams.values'), get(this, 'attrs.replace')); <ide> }, <ide> <ide> queryParams: null, <ide> var LinkComponent = EmberComponent.extend({ <ide> <ide> @property href <ide> **/ <del> href: computed('models', 'targetRouteName', function computeLinkViewHref() { <add> href: computed('models', 'targetRouteName', '_routing.currentState', function computeLinkViewHref() { <ide> if (get(this, 'tagName') !== 'a') { return; } <ide> <ide> var targetRouteName = get(this, 'targetRouteName'); <ide> var LinkComponent = EmberComponent.extend({ <ide> if (get(this, 'loading')) { return get(this, 'loadingHref'); } <ide> <ide> var routing = get(this, '_routing'); <del> return routing.generateURL(targetRouteName, models, get(this, 'queryParams')); <add> return routing.generateURL(targetRouteName, models, get(this, 'queryParams.values')); <ide> }), <ide> <ide> loading: computed('models', 'targetRouteName', function() { <ide><path>packages/ember/tests/helpers/link_to_test.js <ide> QUnit.test("The {{link-to}} helper defaults to bubbling", function() { <ide> equal(hidden, 1, "The link bubbles"); <ide> }); <ide> <del>QUnit.skip("The {{link-to}} helper supports bubbles=false", function() { <add>QUnit.test("The {{link-to}} helper supports bubbles=false", function() { <ide> Ember.TEMPLATES.about = compile("<div {{action 'hide'}}>{{#link-to 'about.contact' id='about-contact' bubbles=false}}About{{/link-to}}</div>{{outlet}}"); <ide> Ember.TEMPLATES['about/contact'] = compile("<h1 id='contact'>Contact</h1>"); <ide> <ide><path>packages/ember/tests/routing/query_params_test.js <ide> QUnit.test("model hooks receives query params", function() { <ide> equal(router.get('location.path'), ""); <ide> }); <ide> <del>QUnit.skip("controllers won't be eagerly instantiated by internal query params logic", function() { <add>QUnit.test("controllers won't be eagerly instantiated by internal query params logic", function() { <ide> expect(10); <ide> Router.map(function() { <ide> this.resource('cats', function() { <ide> QUnit.test("An explicit replace:false on a changed QP always wins and causes a p <ide> Ember.run(appController, 'setProperties', { alex: 'sriracha' }); <ide> }); <ide> <del>QUnit.skip("can opt into full transition by setting refreshModel in route queryParams when transitioning from child to parent", function() { <add>QUnit.test("can opt into full transition by setting refreshModel in route queryParams when transitioning from child to parent", function() { <ide> Ember.TEMPLATES.parent = compile('{{outlet}}'); <ide> Ember.TEMPLATES['parent/child'] = compile("{{link-to 'Parent' 'parent' (query-params foo='change') id='parent-link'}}"); <ide> <ide> QUnit.test("URL transitions that remove QPs still register as QP changes", funct <ide> equal(indexController.get('omg'), 'lol'); <ide> }); <ide> <del>QUnit.skip("Subresource naming style is supported", function() { <add>QUnit.test("Subresource naming style is supported", function() { <ide> <ide> Router.map(function() { <ide> this.resource('abc.def', { path: '/abcdef' }, function() { <ide> QUnit.test("A child of a resource route still defaults to parent route's model e <ide> bootApplication(); <ide> }); <ide> <del>QUnit.skip("opting into replace does not affect transitions between routes", function() { <add>QUnit.test("opting into replace does not affect transitions between routes", function() { <ide> expect(5); <ide> Ember.TEMPLATES.application = compile( <ide> "{{link-to 'Foo' 'foo' id='foo-link'}}" + <ide> "{{link-to 'Bar' 'bar' id='bar-no-qp-link'}}" + <del> "{{link-to 'Bar' 'bar' (query-params raytiley='isanerd') id='bar-link'}}" + <add> "{{link-to 'Bar' 'bar' (query-params raytiley='isthebest') id='bar-link'}}" + <ide> "{{outlet}}" <ide> ); <ide> App.Router.map(function() { <ide> QUnit.skip("opting into replace does not affect transitions between routes", fun <ide> <ide> App.BarController = Ember.Controller.extend({ <ide> queryParams: ['raytiley'], <del> raytiley: 'isadork' <add> raytiley: 'israd' <ide> }); <ide> <ide> App.BarRoute = Ember.Route.extend({ <ide> QUnit.skip("opting into replace does not affect transitions between routes", fun <ide> expectedPushURL = '/bar'; <ide> Ember.run(Ember.$('#bar-no-qp-link'), 'click'); <ide> <del> expectedReplaceURL = '/bar?raytiley=boo'; <del> setAndFlush(controller, 'raytiley', 'boo'); <add> expectedReplaceURL = '/bar?raytiley=woot'; <add> setAndFlush(controller, 'raytiley', 'woot'); <ide> <ide> expectedPushURL = '/foo'; <ide> Ember.run(Ember.$('#foo-link'), 'click'); <ide> <del> expectedPushURL = '/bar?raytiley=isanerd'; <add> expectedPushURL = '/bar?raytiley=isthebest'; <ide> Ember.run(Ember.$('#bar-link'), 'click'); <ide> }); <ide> <ide> QUnit.module("Model Dep Query Params", { <ide> } <ide> }); <ide> <del>QUnit.skip("query params have 'model' stickiness by default", function() { <add>QUnit.test("query params have 'model' stickiness by default", function() { <ide> this.boot(); <ide> <ide> Ember.run(this.$link1, 'click'); <ide> QUnit.skip("query params have 'model' stickiness by default", function() { <ide> equal(this.$link3.attr('href'), '/a/a-3'); <ide> }); <ide> <del>QUnit.skip("query params have 'model' stickiness by default (url changes)", function() { <add>QUnit.test("query params have 'model' stickiness by default (url changes)", function() { <ide> <ide> this.boot(); <ide> <ide> QUnit.skip("query params have 'model' stickiness by default (url changes)", func <ide> }); <ide> <ide> <del>QUnit.skip("query params have 'model' stickiness by default (params-based transitions)", function() { <add>QUnit.test("query params have 'model' stickiness by default (params-based transitions)", function() { <ide> Ember.TEMPLATES.application = compile("{{#each a in articles}} {{link-to 'Article' 'article' a.id id=a.id}} {{/each}}"); <ide> <ide> this.boot(); <ide> QUnit.skip("query params have 'model' stickiness by default (params-based transi <ide> equal(this.$link3.attr('href'), '/a/a-3?q=hay'); <ide> }); <ide> <del>QUnit.skip("'controller' stickiness shares QP state between models", function() { <add>QUnit.test("'controller' stickiness shares QP state between models", function() { <ide> App.ArticleController.reopen({ <ide> queryParams: { q: { scope: 'controller' } } <ide> });
5
Javascript
Javascript
add spec for alertmanager
122cc8ba8ab7c5bbcc042f5221f5c0c53ec99c19
<ide><path>Libraries/Alert/Alert.js <ide> <ide> 'use strict'; <ide> <del>import NativeModules from '../BatchedBridge/NativeModules'; <ide> import Platform from '../Utilities/Platform'; <del>import DialogManagerAndroid, { <add>import NativeDialogManagerAndroid, { <ide> type DialogOptions, <ide> } from '../NativeModules/specs/NativeDialogManagerAndroid'; <del> <del>const RCTAlertManager = NativeModules.AlertManager; <del> <del>export type Buttons = Array<{ <del> text?: string, <del> onPress?: ?Function, <del> style?: AlertButtonStyle, <del>}>; <del> <del>type Options = { <del> cancelable?: ?boolean, <del> onDismiss?: ?Function, <del>}; <del> <del>type AlertType = $Keys<{ <del> default: string, <del> 'plain-text': string, <del> 'secure-text': string, <del> 'login-password': string, <del>}>; <del> <del>export type AlertButtonStyle = $Keys<{ <del> default: string, <del> cancel: string, <del> destructive: string, <del>}>; <add>import RCTAlertManager from './RCTAlertManager'; <add>import {type Buttons, type Options, type AlertType} from './NativeAlertManager'; <ide> <ide> /** <ide> * Launches an alert dialog with the specified title and message. <ide> class Alert { <ide> if (Platform.OS === 'ios') { <ide> Alert.prompt(title, message, buttons, 'default'); <ide> } else if (Platform.OS === 'android') { <del> if (!DialogManagerAndroid) { <add> if (!NativeDialogManagerAndroid) { <ide> return; <ide> } <del> const constants = DialogManagerAndroid.getConstants(); <add> const constants = NativeDialogManagerAndroid.getConstants(); <ide> <ide> const config: DialogOptions = { <ide> title: title || '', <ide> class Alert { <ide> } <ide> }; <ide> const onError = errorMessage => console.warn(errorMessage); <del> DialogManagerAndroid.showAlert(config, onError, onAction); <add> NativeDialogManagerAndroid.showAlert(config, onError, onAction); <ide> } <ide> } <ide> <ide> class Alert { <ide> { <ide> title: title || '', <ide> type: 'plain-text', <del> defaultValue: message, <add> defaultValue: message || '', <ide> }, <ide> (id, value) => { <ide> callback(value); <ide> class Alert { <ide> }, <ide> (id, value) => { <ide> const cb = callbacks[id]; <add> // $FlowFixMe <ide> cb && cb(value); <ide> }, <ide> ); <ide><path>Libraries/Alert/NativeAlertManager.js <add>/** <add> * Copyright (c) Facebook, Inc. and its affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow <add> * @format <add> */ <add> <add>'use strict'; <add> <add>import type {TurboModule} from 'RCTExport'; <add>import * as TurboModuleRegistry from 'TurboModuleRegistry'; <add> <add>export type Buttons = Array<{ <add> text?: string, <add> onPress?: ?Function, <add> style?: AlertButtonStyle, <add>}>; <add> <add>export type Options = { <add> cancelable?: ?boolean, <add> onDismiss?: ?() => void, <add>}; <add> <add>/* 'default' | plain-text' | 'secure-text' | 'login-password' */ <add>export type AlertType = string; <add> <add>/* 'default' | 'cancel' | 'destructive' */ <add>export type AlertButtonStyle = string; <add> <add>export type Args = {| <add> title?: string, <add> message?: string, <add> buttons?: Buttons, <add> type?: string, <add> defaultValue?: string, <add> cancelButtonKey?: string, <add> destructiveButtonKey?: string, <add> keyboardType?: string, <add>|}; <add> <add>export interface Spec extends TurboModule { <add> +alertWithArgs: ( <add> args: Args, <add> callback: (id: number, value: string) => void, <add> ) => void; <add>} <add> <add>export default TurboModuleRegistry.get<Spec>('AlertManager'); <ide><path>Libraries/Alert/RCTAlertManager.ios.js <ide> <ide> 'use strict'; <ide> <del>const RCTAlertManager = require('../BatchedBridge/NativeModules').AlertManager; <add>import NativeAlertManager from './NativeAlertManager'; <add>import type {Args} from './NativeAlertManager'; <ide> <del>module.exports = RCTAlertManager; <add>module.exports = { <add> alertWithArgs( <add> args: Args, <add> callback: (id: number, value: string) => void, <add> ): void { <add> if (NativeAlertManager == null) { <add> return; <add> } <add> NativeAlertManager.alertWithArgs(args, callback); <add> }, <add>};
3
Javascript
Javascript
add a simple (failing) test for named block usage
19fb4a7eb8078c92c6c98b9c5ca98052722231e7
<ide><path>packages/@ember/-internals/glimmer/tests/integration/helpers/yield-test.js <ide> import { Component } from '../../utils/helpers'; <ide> moduleFor( <ide> 'Helpers test: {{yield}} helper', <ide> class extends RenderingTestCase { <del> ['@test can yield to block']() { <add> ['@test can yield to a default block']() { <ide> this.registerComponent('yield-comp', { <ide> template: '[In layout:] {{yield}}', <ide> }); <ide> moduleFor( <ide> this.assertText('[In layout:] [In Block:] Seattle'); <ide> } <ide> <add> ['@feature(EMBER_NAMED_BLOCKS) can yield to a named block']() { <add> // This test fails when the default Ember component backing class is used: <add> this.registerComponent('yield-comp', { <add> template: '[In layout:] {{yield to="block"}}', <add> <add> // It passes with no backing class: <add> // ComponentClass: null, <add> <add> // And it passes using `GlimmerishComponent`: <add> // ComponentClass: require('../../utils/glimmerish-component').default, <add> }); <add> <add> this.render('<YieldComp><:block>[In block:] {{object.title}}</:block></YieldComp>', { <add> object: { title: 'Seattle' }, <add> }); <add> <add> this.assertText('[In layout:] [In block:] Seattle'); <add> <add> this.assertStableRerender(); <add> <add> runTask(() => set(this.context, 'object.title', 'Vancouver')); <add> this.assertText('[In layout:] [In block:] Vancouver'); <add> <add> runTask(() => set(this.context, 'object', { title: 'Seattle' })); <add> this.assertText('[In layout:] [In block:] Seattle'); <add> } <add> <ide> ['@test templates should yield to block inside a nested component']() { <ide> this.registerComponent('outer-comp', { <ide> template: '<div>[In layout:] {{yield}}</div>',
1
Ruby
Ruby
accept ldflags as string or array(string)
97bc320836deb1195a1bf8f646a26c8c72ff845e
<ide><path>Library/Homebrew/formula.rb <ide> def std_cmake_args(install_prefix: prefix, install_libdir: "lib", find_framework <ide> end <ide> <ide> # Standard parameters for Go builds. <del> sig { params(output: T.any(String, Pathname), ldflags: T.nilable(String)).returns(T::Array[String]) } <add> sig { <add> params(output: T.any(String, Pathname), <add> ldflags: T.nilable(T.any(String, T::Array[String]))).returns(T::Array[String]) <add> } <ide> def std_go_args(output: bin/name, ldflags: nil) <ide> args = ["-trimpath", "-o=#{output}"] <del> args += ["-ldflags=#{ldflags}"] if ldflags <add> args += ["-ldflags=#{Array(ldflags).join(" ")}"] if ldflags <ide> args <ide> end <ide>
1
Javascript
Javascript
remove unnecessary tag end from commitranked view
ed94600fc6bbe0d6ea34a581672190e369b3d805
<ide><path>packages/react-devtools-shared/src/devtools/views/Profiler/CommitRanked.js <ide> function CommitRanked({chartData, commitTree, height, width}: Props) { <ide> width={width}> <ide> {CommitRankedListItem} <ide> </FixedSizeList> <del> > <ide> </Tooltip> <ide> ); <ide> }
1
PHP
PHP
class.
f4a509a38edbbe6908685ee22e06b9d2159fb3f5
<ide><path>src/Illuminate/Console/GeneratorCommand.php <ide> protected function alreadyExists($rawName) <ide> */ <ide> protected function getPath($name) <ide> { <del> $name = str_replace_first($this->rootNamespace(), '', $name); <add> $name = Str::replaceFirst($this->rootNamespace(), '', $name); <ide> <ide> return $this->laravel['path'].'/'.str_replace('\\', '/', $name).'.php'; <ide> } <ide><path>src/Illuminate/Routing/ImplicitRouteBinding.php <ide> <ide> namespace Illuminate\Routing; <ide> <add>use Illuminate\Support\Str; <ide> use Illuminate\Database\Eloquent\Model; <ide> <ide> class ImplicitRouteBinding <ide> protected static function getParameterName($name, $parameters) <ide> return $name; <ide> } <ide> <del> $snakedName = snake_case($name); <del> <del> if (array_key_exists($snakedName, $parameters)) { <add> if (array_key_exists($snakedName = Str::snake($name), $parameters)) { <ide> return $snakedName; <ide> } <ide> } <ide><path>tests/Database/DatabaseMigratorIntegrationTest.php <ide> <ide> namespace Illuminate\Tests\Database; <ide> <add>use Illuminate\Support\Str; <ide> use PHPUnit\Framework\TestCase; <ide> use Illuminate\Filesystem\Filesystem; <ide> use Illuminate\Database\Migrations\Migrator; <ide> public function testBasicMigrationOfSingleFolder() <ide> $this->assertTrue($this->db->schema()->hasTable('users')); <ide> $this->assertTrue($this->db->schema()->hasTable('password_resets')); <ide> <del> $this->assertTrue(str_contains($ran[0], 'users')); <del> $this->assertTrue(str_contains($ran[1], 'password_resets')); <add> $this->assertTrue(Str::contains($ran[0], 'users')); <add> $this->assertTrue(Str::contains($ran[1], 'password_resets')); <ide> } <ide> <ide> public function testMigrationsCanBeRolledBack() <ide> public function testMigrationsCanBeRolledBack() <ide> $this->assertFalse($this->db->schema()->hasTable('users')); <ide> $this->assertFalse($this->db->schema()->hasTable('password_resets')); <ide> <del> $this->assertTrue(str_contains($rolledBack[0], 'password_resets')); <del> $this->assertTrue(str_contains($rolledBack[1], 'users')); <add> $this->assertTrue(Str::contains($rolledBack[0], 'password_resets')); <add> $this->assertTrue(Str::contains($rolledBack[1], 'users')); <ide> } <ide> <ide> public function testMigrationsCanBeReset() <ide> public function testMigrationsCanBeReset() <ide> $this->assertFalse($this->db->schema()->hasTable('users')); <ide> $this->assertFalse($this->db->schema()->hasTable('password_resets')); <ide> <del> $this->assertTrue(str_contains($rolledBack[0], 'password_resets')); <del> $this->assertTrue(str_contains($rolledBack[1], 'users')); <add> $this->assertTrue(Str::contains($rolledBack[0], 'password_resets')); <add> $this->assertTrue(Str::contains($rolledBack[1], 'users')); <ide> } <ide> <ide> public function testNoErrorIsThrownWhenNoOutstandingMigrationsExist() <ide><path>tests/Mail/MailSesTransportTest.php <ide> namespace Illuminate\Tests\Mail; <ide> <ide> use Aws\Ses\SesClient; <add>use Illuminate\Support\Str; <ide> use PHPUnit\Framework\TestCase; <ide> use Illuminate\Support\Collection; <ide> use Illuminate\Mail\TransportManager; <ide> public function testSend() <ide> <ide> // Generate a messageId for our mock to return to ensure that the post-sent message <ide> // has X-SES-Message-ID in its headers <del> $messageId = str_random(32); <add> $messageId = Str::random(32); <ide> $sendRawEmailMock = new sendRawEmailMock($messageId); <ide> $client->expects($this->once()) <ide> ->method('sendRawEmail') <ide><path>tests/Routing/RoutingRouteTest.php <ide> namespace Illuminate\Tests\Routing; <ide> <ide> use stdClass; <add>use Illuminate\Support\Str; <ide> use Illuminate\Http\Request; <ide> use Illuminate\Routing\Route; <ide> use UnexpectedValueException; <ide> public function testControllerCallActionMethodParameters() <ide> <ide> // Has one argument but receives two <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@oneArgument'); <add> $router->get(($str = Str::random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@oneArgument'); <ide> $router->dispatch(Request::create($str.'/one/two', 'GET')); <ide> $this->assertEquals(['one' => 'one', 'two' => 'two'], $_SERVER['__test.controller_callAction_parameters']); <ide> <ide> // Has two arguments and receives two <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@twoArguments'); <add> $router->get(($str = Str::random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@twoArguments'); <ide> $router->dispatch(Request::create($str.'/one/two', 'GET')); <ide> $this->assertEquals(['one' => 'one', 'two' => 'two'], $_SERVER['__test.controller_callAction_parameters']); <ide> <ide> // Has two arguments but with different names from the ones passed from the route <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@differentArgumentNames'); <add> $router->get(($str = Str::random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@differentArgumentNames'); <ide> $router->dispatch(Request::create($str.'/one/two', 'GET')); <ide> $this->assertEquals(['one' => 'one', 'two' => 'two'], $_SERVER['__test.controller_callAction_parameters']); <ide> <ide> // Has two arguments with same name but argument order is reversed <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@reversedArguments'); <add> $router->get(($str = Str::random()).'/{one}/{two}', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@reversedArguments'); <ide> $router->dispatch(Request::create($str.'/one/two', 'GET')); <ide> $this->assertEquals(['one' => 'one', 'two' => 'two'], $_SERVER['__test.controller_callAction_parameters']); <ide> <ide> // No route parameters while method has parameters <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@oneArgument'); <add> $router->get(($str = Str::random()).'', 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@oneArgument'); <ide> $router->dispatch(Request::create($str, 'GET')); <ide> $this->assertEquals([], $_SERVER['__test.controller_callAction_parameters']); <ide> <ide> // With model bindings <ide> unset($_SERVER['__test.controller_callAction_parameters']); <del> $router->get(($str = str_random()).'/{user}/{defaultNull?}/{team?}', [ <add> $router->get(($str = Str::random()).'/{user}/{defaultNull?}/{team?}', [ <ide> 'middleware' => SubstituteBindings::class, <ide> 'uses' => 'Illuminate\Tests\Routing\RouteTestAnotherControllerWithParameterStub@withModels', <ide> ]); <ide><path>tests/Support/SupportHelpersTest.php <ide> public function testStrLimit() <ide> <ide> public function testCamelCase() <ide> { <del> $this->assertEquals('fooBar', camel_case('FooBar')); <del> $this->assertEquals('fooBar', camel_case('foo_bar')); <del> $this->assertEquals('fooBar', camel_case('foo_bar')); // test cache <del> $this->assertEquals('fooBarBaz', camel_case('Foo-barBaz')); <del> $this->assertEquals('fooBarBaz', camel_case('foo-bar_baz')); <add> $this->assertEquals('fooBar', Str::camel('FooBar')); <add> $this->assertEquals('fooBar', Str::camel('foo_bar')); <add> $this->assertEquals('fooBar', Str::camel('foo_bar')); // test cache <add> $this->assertEquals('fooBarBaz', Str::camel('Foo-barBaz')); <add> $this->assertEquals('fooBarBaz', Str::camel('foo-bar_baz')); <ide> } <ide> <ide> public function testStudlyCase()
6
PHP
PHP
fix docblock typo
9c226485ba79eeaf483d8ed587341161c9202219
<ide><path>src/Routing/RouteCollection.php <ide> class RouteCollection { <ide> * Add a route to the collection. <ide> * <ide> * @param \Cake\Routing\Route\Route $route The route object to add. <del> * @param array $options Addtional options for the route. Primarily for the <add> * @param array $options Additional options for the route. Primarily for the <ide> * `_name` option, which enables named routes. <ide> * @return void <ide> */
1
Java
Java
add flux<part> serverrequest.parts()
92981ac9dee2f8145b1253478264643df709cda9
<ide><path>spring-test/src/main/java/org/springframework/mock/web/reactive/function/server/MockServerRequest.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2019 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public Mono<MultiValueMap<String, Part>> multipartData() { <ide> return (Mono<MultiValueMap<String, Part>>) this.body; <ide> } <ide> <add> @Override <add> @SuppressWarnings("unchecked") <add> public Flux<Part> parts() { <add> Assert.state(this.body != null, "No body"); <add> return (Flux<Part>) this.body; <add> } <add> <ide> @Override <ide> public ServerWebExchange exchange() { <ide> Assert.state(this.exchange != null, "No exchange"); <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/DefaultServerRequest.java <ide> public Mono<MultiValueMap<String, Part>> multipartData() { <ide> return this.exchange.getMultipartData(); <ide> } <ide> <add> @Override <add> public Flux<Part> parts() { <add> return this.exchange.getParts(); <add> } <add> <ide> private ServerHttpRequest request() { <ide> return this.exchange.getRequest(); <ide> } <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/DefaultServerRequestBuilder.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2019 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> private static class DelegatingServerWebExchange implements ServerWebExchange { <ide> private static final ResolvableType FORM_DATA_TYPE = <ide> ResolvableType.forClassWithGenerics(MultiValueMap.class, String.class, String.class); <ide> <del> private static final ResolvableType MULTIPART_DATA_TYPE = ResolvableType.forClassWithGenerics( <del> MultiValueMap.class, String.class, Part.class); <add> private static final ResolvableType PARTS_DATA_TYPE = ResolvableType.forClass(Part.class); <ide> <ide> private static final Mono<MultiValueMap<String, String>> EMPTY_FORM_DATA = <ide> Mono.just(CollectionUtils.unmodifiableMultiValueMap(new LinkedMultiValueMap<String, String>(0))).cache(); <ide> private static class DelegatingServerWebExchange implements ServerWebExchange { <ide> <ide> private final Mono<MultiValueMap<String, Part>> multipartDataMono; <ide> <add> private final Flux<Part> parts; <add> <ide> public DelegatingServerWebExchange( <ide> ServerHttpRequest request, ServerWebExchange delegate, List<HttpMessageReader<?>> messageReaders) { <ide> <ide> this.request = request; <ide> this.delegate = delegate; <ide> this.formDataMono = initFormData(request, messageReaders); <del> this.multipartDataMono = initMultipartData(request, messageReaders); <add> this.parts = initParts(request, messageReaders); <add> this.multipartDataMono = initMultipartData(this.parts); <ide> } <ide> <ide> @SuppressWarnings("unchecked") <ide> private static Mono<MultiValueMap<String, String>> initFormData(ServerHttpReques <ide> } <ide> <ide> @SuppressWarnings("unchecked") <del> private static Mono<MultiValueMap<String, Part>> initMultipartData(ServerHttpRequest request, <del> List<HttpMessageReader<?>> readers) { <add> private static Flux<Part> initParts(ServerHttpRequest request, List<HttpMessageReader<?>> readers) { <ide> <ide> try { <ide> MediaType contentType = request.getHeaders().getContentType(); <ide> if (MediaType.MULTIPART_FORM_DATA.isCompatibleWith(contentType)) { <del> return ((HttpMessageReader<MultiValueMap<String, Part>>) readers.stream() <del> .filter(reader -> reader.canRead(MULTIPART_DATA_TYPE, MediaType.MULTIPART_FORM_DATA)) <add> return ((HttpMessageReader<Part>)readers.stream() <add> .filter(reader -> reader.canRead(PARTS_DATA_TYPE, MediaType.MULTIPART_FORM_DATA)) <ide> .findFirst() <ide> .orElseThrow(() -> new IllegalStateException("No multipart HttpMessageReader."))) <del> .readMono(MULTIPART_DATA_TYPE, request, Hints.none()) <del> .switchIfEmpty(EMPTY_MULTIPART_DATA) <del> .cache(); <add> .read(PARTS_DATA_TYPE, request, Hints.none()); <ide> } <ide> } <ide> catch (InvalidMediaTypeException ex) { <ide> // Ignore <ide> } <del> return EMPTY_MULTIPART_DATA; <add> return Flux.empty(); <add> } <add> <add> private static Mono<MultiValueMap<String, Part>> initMultipartData(Flux<Part> parts) { <add> return parts.collect( <add> () -> (MultiValueMap<String, Part>) new LinkedMultiValueMap<String, Part>(), <add> (map, part) -> map.add(part.name(), part)) <add> .switchIfEmpty(EMPTY_MULTIPART_DATA) <add> .cache(); <ide> } <add> <ide> @Override <ide> public ServerHttpRequest getRequest() { <ide> return this.request; <ide> public Mono<MultiValueMap<String, Part>> getMultipartData() { <ide> return this.multipartDataMono; <ide> } <ide> <add> @Override <add> public Flux<Part> getParts() { <add> return this.parts; <add> } <add> <ide> // Delegating methods <ide> <ide> @Override <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RequestPredicates.java <ide> public Mono<MultiValueMap<String, Part>> multipartData() { <ide> return this.request.multipartData(); <ide> } <ide> <add> @Override <add> public Flux<Part> parts() { <add> return this.request.parts(); <add> } <add> <ide> @Override <ide> public ServerWebExchange exchange() { <ide> return this.request.exchange(); <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/ServerRequest.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2019 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> default String pathVariable(String name) { <ide> * <p><strong>Note:</strong> calling this method causes the request body to <ide> * be read and parsed in full, and the resulting {@code MultiValueMap} is <ide> * cached so that this method is safe to call more than once. <add> * <p><strong>Note:</strong>the {@linkplain Part#content() contents} of each <add> * part is not cached, and can only be read once. <ide> */ <ide> Mono<MultiValueMap<String, Part>> multipartData(); <ide> <add> /** <add> * Get the parts of a multipart request if the Content-Type is <add> * {@code "multipart/form-data"} or an empty flux otherwise. <add> * <p><strong>Note:</strong> calling this method causes the request body to <add> * be read and parsed in full and the resulting {@code Flux} is <add> * cached so that this method is safe to call more than once. <add> * <p><strong>Note:</strong>the {@linkplain Part#content() contents} of each <add> * part is not cached, and can only be read once. <add> * @since 5.2 <add> */ <add> Flux<Part> parts(); <add> <ide> /** <ide> * Get the web exchange that this request is based on. <ide> * <p>Note: Manipulating the exchange directly (instead of using the methods provided on <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/support/ServerRequestWrapper.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2019 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public Mono<MultiValueMap<String, Part>> multipartData() { <ide> return this.delegate.multipartData(); <ide> } <ide> <add> @Override <add> public Flux<Part> parts() { <add> return this.delegate.parts(); <add> } <add> <ide> @Override <ide> public ServerWebExchange exchange() { <ide> return this.delegate.exchange(); <ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/MultipartIntegrationTests.java <ide> <ide> package org.springframework.web.reactive.function; <ide> <add>import java.io.IOException; <add>import java.nio.file.Files; <add>import java.nio.file.Path; <add>import java.nio.file.Paths; <ide> import java.util.Map; <ide> <ide> import org.junit.Test; <ide> import org.springframework.http.codec.multipart.FilePart; <ide> import org.springframework.http.codec.multipart.FormFieldPart; <ide> import org.springframework.http.codec.multipart.Part; <add>import org.springframework.util.FileCopyUtils; <ide> import org.springframework.util.MultiValueMap; <ide> import org.springframework.web.reactive.function.client.ClientResponse; <ide> import org.springframework.web.reactive.function.client.WebClient; <ide> import org.springframework.web.reactive.function.server.ServerResponse; <ide> <ide> import static org.assertj.core.api.Assertions.assertThat; <del>import static org.springframework.web.reactive.function.server.RequestPredicates.POST; <add>import static org.assertj.core.api.Assertions.fail; <ide> import static org.springframework.web.reactive.function.server.RouterFunctions.route; <ide> <ide> /** <ide> public class MultipartIntegrationTests extends AbstractRouterFunctionIntegration <ide> <ide> private final WebClient webClient = WebClient.create(); <ide> <add> private ClassPathResource resource = new ClassPathResource("org/springframework/http/codec/multipart/foo.txt"); <add> <ide> <ide> @Test <ide> public void multipartData() { <ide> public void parts() { <ide> .verifyComplete(); <ide> } <ide> <add> @Test <add> public void transferTo() { <add> Mono<String> result = webClient <add> .post() <add> .uri("http://localhost:" + this.port + "/transferTo") <add> .syncBody(generateBody()) <add> .retrieve() <add> .bodyToMono(String.class); <add> <add> StepVerifier <add> .create(result) <add> .consumeNextWith(location -> { <add> try { <add> byte[] actualBytes = Files.readAllBytes(Paths.get(location)); <add> byte[] expectedBytes = FileCopyUtils.copyToByteArray(this.resource.getInputStream()); <add> assertThat(actualBytes).isEqualTo(expectedBytes); <add> } <add> catch (IOException ex) { <add> fail("IOException", ex); <add> } <add> }) <add> .verifyComplete(); <add> } <add> <ide> private MultiValueMap<String, HttpEntity<?>> generateBody() { <ide> MultipartBodyBuilder builder = new MultipartBodyBuilder(); <del> builder.part("fooPart", new ClassPathResource("org/springframework/http/codec/multipart/foo.txt")); <add> builder.part("fooPart", resource); <ide> builder.part("barPart", "bar"); <ide> return builder.build(); <ide> } <ide> <ide> @Override <ide> protected RouterFunction<ServerResponse> routerFunction() { <ide> MultipartHandler multipartHandler = new MultipartHandler(); <del> return route(POST("/multipartData"), multipartHandler::multipartData) <del> .andRoute(POST("/parts"), multipartHandler::parts); <add> return route() <add> .POST("/multipartData", multipartHandler::multipartData) <add> .POST("/parts", multipartHandler::parts) <add> .POST("/transferTo", multipartHandler::transferTo) <add> .build(); <ide> } <ide> <ide> <ide> private static class MultipartHandler { <ide> <ide> public Mono<ServerResponse> multipartData(ServerRequest request) { <del> return request <del> .body(BodyExtractors.toMultipartData()) <add> return request.multipartData() <ide> .flatMap(map -> { <ide> Map<String, Part> parts = map.toSingleValueMap(); <ide> try { <ide> assertThat(parts.size()).isEqualTo(2); <ide> assertThat(((FilePart) parts.get("fooPart")).filename()).isEqualTo("foo.txt"); <ide> assertThat(((FormFieldPart) parts.get("barPart")).value()).isEqualTo("bar"); <add> return ServerResponse.ok().build(); <ide> } <ide> catch(Exception e) { <ide> return Mono.error(e); <ide> } <del> return ServerResponse.ok().build(); <ide> }); <ide> } <ide> <ide> public Mono<ServerResponse> parts(ServerRequest request) { <del> return request.body(BodyExtractors.toParts()).collectList() <add> return request.parts().collectList() <ide> .flatMap(parts -> { <ide> try { <ide> assertThat(parts.size()).isEqualTo(2); <ide> assertThat(((FilePart) parts.get(0)).filename()).isEqualTo("foo.txt"); <ide> assertThat(((FormFieldPart) parts.get(1)).value()).isEqualTo("bar"); <add> return ServerResponse.ok().build(); <ide> } <ide> catch(Exception e) { <ide> return Mono.error(e); <ide> } <del> return ServerResponse.ok().build(); <ide> }); <ide> } <add> <add> public Mono<ServerResponse> transferTo(ServerRequest request) { <add> return request.parts() <add> .filter(part -> part instanceof FilePart) <add> .next() <add> .cast(FilePart.class) <add> .flatMap(part -> { <add> try { <add> Path tempFile = Files.createTempFile("MultipartIntegrationTests", null); <add> return part.transferTo(tempFile) <add> .then(ServerResponse.ok() <add> .syncBody(tempFile.toString())); <add> } <add> catch (Exception e) { <add> return Mono.error(e); <add> } <add> }); <add> } <add> <ide> } <ide> <ide> } <ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/server/MockServerRequest.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2019 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public Mono<MultiValueMap<String, Part>> multipartData() { <ide> return (Mono<MultiValueMap<String, Part>>) this.body; <ide> } <ide> <add> @Override <add> @SuppressWarnings("unchecked") <add> public Flux<Part> parts() { <add> Assert.state(this.body != null, "No body"); <add> return (Flux<Part>) this.body; <add> } <add> <ide> @Override <ide> public ServerWebExchange exchange() { <ide> Assert.state(this.exchange != null, "No exchange");
8
Go
Go
fix tests and windows service
6e7405ebd4df360bc84f651c977ece31283eb3ee
<ide><path>cli/cobra.go <ide> func FlagErrorFunc(cmd *cobra.Command, err error) error { <ide> if cmd.HasSubCommands() { <ide> usage = "\n\n" + cmd.UsageString() <ide> } <del> return fmt.Errorf("%s\nSee '%s --help'.%s", err, cmd.CommandPath(), usage) <add> return StatusError{ <add> Status: fmt.Sprintf("%s\nSee '%s --help'.%s", err, cmd.CommandPath(), usage), <add> StatusCode: 125, <add> } <ide> } <ide> <ide> var usageTemplate = `Usage: {{if not .HasSubCommands}}{{.UseLine}}{{end}}{{if .HasSubCommands}}{{ .CommandPath}} COMMAND{{end}} <ide><path>cmd/docker/daemon_none.go <ide> package main <ide> <ide> import ( <ide> "fmt" <del> "github.com/spf13/cobra" <ide> "runtime" <ide> "strings" <add> <add> "github.com/spf13/cobra" <ide> ) <ide> <ide> func newDaemonCommand() *cobra.Command { <ide><path>cmd/docker/docker.go <ide> import ( <ide> <ide> func newDockerCommand(dockerCli *client.DockerCli) *cobra.Command { <ide> opts := cliflags.NewClientOptions() <add> var flags *pflag.FlagSet <add> <ide> cmd := &cobra.Command{ <ide> Use: "docker [OPTIONS] COMMAND [arg...]", <ide> Short: "A self-sufficient runtime for containers.", <ide> SilenceUsage: true, <ide> SilenceErrors: true, <ide> TraverseChildren: true, <del> Args: cli.NoArgs, <add> Args: noArgs, <ide> RunE: func(cmd *cobra.Command, args []string) error { <ide> if opts.Version { <ide> showVersion() <ide> func newDockerCommand(dockerCli *client.DockerCli) *cobra.Command { <ide> return nil <ide> }, <ide> PersistentPreRunE: func(cmd *cobra.Command, args []string) error { <del> dockerPreRun(cmd.Flags(), opts) <add> // flags must be the top-level command flags, not cmd.Flags() <add> opts.Common.SetDefaultOptions(flags) <add> dockerPreRun(opts) <ide> return dockerCli.Initialize(opts) <ide> }, <ide> } <ide> cli.SetupRootCommand(cmd) <ide> <del> flags := cmd.Flags() <add> flags = cmd.Flags() <ide> flags.BoolVarP(&opts.Version, "version", "v", false, "Print version information and quit") <ide> flags.StringVar(&opts.ConfigDir, "config", cliconfig.ConfigDir(), "Location of client config files") <ide> opts.Common.InstallFlags(flags) <ide> func newDockerCommand(dockerCli *client.DockerCli) *cobra.Command { <ide> return cmd <ide> } <ide> <add>func noArgs(cmd *cobra.Command, args []string) error { <add> if len(args) == 0 { <add> return nil <add> } <add> return fmt.Errorf( <add> "docker: '%s' is not a docker command.\nSee 'docker --help'%s", args[0], ".") <add>} <add> <ide> func main() { <ide> // Set terminal emulation based on platform as required. <ide> stdin, stdout, stderr := term.StdStreams() <ide> func showVersion() { <ide> } <ide> } <ide> <del>func dockerPreRun(flags *pflag.FlagSet, opts *cliflags.ClientOptions) { <del> opts.Common.SetDefaultOptions(flags) <add>func dockerPreRun(opts *cliflags.ClientOptions) { <ide> cliflags.SetDaemonLogLevel(opts.Common.LogLevel) <ide> <ide> if opts.ConfigDir != "" { <ide><path>cmd/dockerd/docker.go <ide> package main <ide> <ide> import ( <ide> "fmt" <add> "os" <ide> <ide> "github.com/Sirupsen/logrus" <ide> "github.com/docker/docker/cli" <ide> func runDaemon(opts daemonOptions) error { <ide> return nil <ide> } <ide> <add> daemonCli := NewDaemonCli() <add> <ide> // On Windows, this may be launching as a service or with an option to <ide> // register the service. <del> stop, err := initService() <add> stop, err := initService(daemonCli) <ide> if err != nil { <ide> logrus.Fatal(err) <ide> } <ide> func runDaemon(opts daemonOptions) error { <ide> return nil <ide> } <ide> <del> err = NewDaemonCli().start(opts) <add> err = daemonCli.start(opts) <ide> notifyShutdown(err) <ide> return err <ide> } <ide> func main() { <ide> cmd := newDaemonCommand() <ide> cmd.SetOutput(stdout) <ide> if err := cmd.Execute(); err != nil { <del> logrus.Fatal(err) <add> fmt.Fprintf(stderr, "%s\n", err) <add> os.Exit(1) <ide> } <ide> } <ide><path>cmd/dockerd/service_unsupported.go <ide> import ( <ide> "github.com/spf13/pflag" <ide> ) <ide> <del>func initService() (bool, error) { <add>func initService(daemonCli *DaemonCli) (bool, error) { <ide> return false, nil <ide> } <ide> <ide><path>cmd/dockerd/service_windows.go <ide> package main <ide> import ( <ide> "bytes" <ide> "errors" <del> "flag" <ide> "fmt" <ide> "io/ioutil" <ide> "os" <ide> func installServiceFlags(flags *pflag.FlagSet) { <ide> } <ide> <ide> type handler struct { <del> tosvc chan bool <del> fromsvc chan error <add> tosvc chan bool <add> fromsvc chan error <add> daemonCli *DaemonCli <ide> } <ide> <ide> type etwHook struct { <ide> func unregisterService() error { <ide> return nil <ide> } <ide> <del>func initService() (bool, error) { <add>func initService(daemonCli *DaemonCli) (bool, error) { <ide> if *flUnregisterService { <ide> if *flRegisterService { <ide> return true, errors.New("--register-service and --unregister-service cannot be used together") <ide> func initService() (bool, error) { <ide> } <ide> <ide> h := &handler{ <del> tosvc: make(chan bool), <del> fromsvc: make(chan error), <add> tosvc: make(chan bool), <add> fromsvc: make(chan error), <add> daemonCli: daemonCli, <ide> } <ide> <ide> var log *eventlog.Log <ide> func initService() (bool, error) { <ide> <ide> func (h *handler) started() error { <ide> // This must be delayed until daemonCli initializes Config.Root <del> err := initPanicFile(filepath.Join(daemonCli.Config.Root, "panic.log")) <add> err := initPanicFile(filepath.Join(h.daemonCli.Config.Root, "panic.log")) <ide> if err != nil { <ide> return err <ide> } <ide> Loop: <ide> case c := <-r: <ide> switch c.Cmd { <ide> case svc.Cmd(windows.SERVICE_CONTROL_PARAMCHANGE): <del> daemonCli.reloadConfig() <add> h.daemonCli.reloadConfig() <ide> case svc.Interrogate: <ide> s <- c.CurrentStatus <ide> case svc.Stop, svc.Shutdown: <ide> s <- svc.Status{State: svc.StopPending, Accepts: 0} <del> daemonCli.stop() <add> h.daemonCli.stop() <ide> } <ide> } <ide> } <ide><path>daemon/config_unix.go <ide> func (config *Config) InstallFlags(flags *pflag.FlagSet) { <ide> <ide> // Then platform-specific install flags <ide> flags.BoolVar(&config.EnableSelinuxSupport, "selinux-enabled", false, "Enable selinux support") <add> flags.StringVarP(&config.SocketGroup, "group", "G", "docker", "Group for the unix socket") <ide> flags.Var(runconfigopts.NewUlimitOpt(&config.Ulimits), "default-ulimit", "Default ulimits for containers") <ide> flags.BoolVar(&config.bridgeConfig.EnableIPTables, "iptables", true, "Enable addition of iptables rules") <ide> flags.BoolVar(&config.bridgeConfig.EnableIPForward, "ip-forward", true, "Enable net.ipv4.ip_forward") <ide><path>integration-cli/docker_cli_build_test.go <ide> func (s *DockerSuite) TestBuildWithInaccessibleFilesInContext(c *check.C) { <ide> c.Fatalf("failed to chmod file to 700: %s", err) <ide> } <ide> <del> buildCmd := exec.Command("su", "unprivilegeduser", "-c", fmt.Sprintf("%s build -t %s .", dockerBinary, name)) <del> buildCmd.Dir = ctx.Dir <del> if out, _, err := runCommandWithOutput(buildCmd); err != nil { <del> c.Fatalf("build should have worked: %s %s", err, out) <del> } <del> <add> result := icmd.RunCmd(icmd.Cmd{ <add> Dir: ctx.Dir, <add> Command: []string{"su", "unprivilegeduser", "-c", <add> fmt.Sprintf("%s build -t %s .", dockerBinary, name)}, <add> }) <add> result.Assert(c, icmd.Expected{}) <ide> } <ide> } <ide> <ide><path>integration-cli/docker_cli_cp_test.go <ide> import ( <ide> "strings" <ide> <ide> "github.com/docker/docker/pkg/integration/checker" <add> icmd "github.com/docker/docker/pkg/integration/cmd" <ide> "github.com/go-check/check" <ide> ) <ide> <ide> func (s *DockerSuite) TestCpUnprivilegedUser(c *check.C) { <ide> <ide> c.Assert(os.Chmod(tmpdir, 0777), checker.IsNil) <ide> <del> path := cpTestName <del> <del> _, _, err = runCommandWithOutput(exec.Command("su", "unprivilegeduser", "-c", dockerBinary+" cp "+containerID+":"+path+" "+tmpdir)) <del> c.Assert(err, checker.IsNil, check.Commentf("couldn't copy with unprivileged user: %s:%s", containerID, path)) <add> result := icmd.RunCommand("su", "unprivilegeduser", "-c", <add> fmt.Sprintf("%s cp %s:%s %s", dockerBinary, containerID, cpTestName, tmpdir)) <add> result.Assert(c, icmd.Expected{}) <ide> } <ide> <ide> func (s *DockerSuite) TestCpSpecialFiles(c *check.C) { <ide><path>integration-cli/docker_cli_daemon_test.go <ide> import ( <ide> "github.com/docker/docker/pkg/integration/checker" <ide> icmd "github.com/docker/docker/pkg/integration/cmd" <ide> "github.com/docker/docker/pkg/mount" <add> "github.com/docker/docker/pkg/testutil/tempfile" <ide> "github.com/docker/go-units" <ide> "github.com/docker/libnetwork/iptables" <ide> "github.com/docker/libtrust" <ide> func (s *DockerDaemonSuite) TestDaemonIptablesCreate(c *check.C) { <ide> func (s *DockerSuite) TestDaemonIPv6Enabled(c *check.C) { <ide> testRequires(c, IPv6) <ide> <del> if err := setupV6(); err != nil { <del> c.Fatal("Could not set up host for IPv6 tests") <del> } <del> <add> setupV6(c) <add> defer teardownV6(c) <ide> d := NewDaemon(c) <ide> <ide> if err := d.StartWithBusybox("--ipv6"); err != nil { <ide> func (s *DockerSuite) TestDaemonIPv6Enabled(c *check.C) { <ide> if ip := net.ParseIP(out); ip != nil { <ide> c.Fatalf("Container should not have a global IPv6 address: %v", out) <ide> } <del> <del> if err := teardownV6(); err != nil { <del> c.Fatal("Could not perform teardown for IPv6 tests") <del> } <del> <ide> } <ide> <ide> // TestDaemonIPv6FixedCIDR checks that when the daemon is started with --ipv6=true and a fixed CIDR <ide> // that running containers are given a link-local and global IPv6 address <ide> func (s *DockerDaemonSuite) TestDaemonIPv6FixedCIDR(c *check.C) { <ide> // IPv6 setup is messing with local bridge address. <ide> testRequires(c, SameHostDaemon) <del> err := setupV6() <del> c.Assert(err, checker.IsNil, check.Commentf("Could not set up host for IPv6 tests")) <add> setupV6(c) <add> defer teardownV6(c) <ide> <del> err = s.d.StartWithBusybox("--ipv6", "--fixed-cidr-v6='2001:db8:2::/64'", "--default-gateway-v6='2001:db8:2::100'") <add> err := s.d.StartWithBusybox("--ipv6", "--fixed-cidr-v6=2001:db8:2::/64", "--default-gateway-v6=2001:db8:2::100") <ide> c.Assert(err, checker.IsNil, check.Commentf("Could not start daemon with busybox: %v", err)) <ide> <ide> out, err := s.d.Cmd("run", "-itd", "--name=ipv6test", "busybox:latest") <ide> c.Assert(err, checker.IsNil, check.Commentf("Could not run container: %s, %v", out, err)) <ide> <del> out, err = s.d.Cmd("inspect", "--format", "'{{.NetworkSettings.Networks.bridge.GlobalIPv6Address}}'", "ipv6test") <add> out, err = s.d.Cmd("inspect", "--format", "{{.NetworkSettings.Networks.bridge.GlobalIPv6Address}}", "ipv6test") <ide> out = strings.Trim(out, " \r\n'") <ide> <ide> c.Assert(err, checker.IsNil, check.Commentf(out)) <ide> <ide> ip := net.ParseIP(out) <ide> c.Assert(ip, checker.NotNil, check.Commentf("Container should have a global IPv6 address")) <ide> <del> out, err = s.d.Cmd("inspect", "--format", "'{{.NetworkSettings.Networks.bridge.IPv6Gateway}}'", "ipv6test") <add> out, err = s.d.Cmd("inspect", "--format", "{{.NetworkSettings.Networks.bridge.IPv6Gateway}}", "ipv6test") <ide> c.Assert(err, checker.IsNil, check.Commentf(out)) <ide> <ide> c.Assert(strings.Trim(out, " \r\n'"), checker.Equals, "2001:db8:2::100", check.Commentf("Container should have a global IPv6 gateway")) <del> <del> err = teardownV6() <del> c.Assert(err, checker.IsNil, check.Commentf("Could not perform teardown for IPv6 tests")) <ide> } <ide> <ide> // TestDaemonIPv6FixedCIDRAndMac checks that when the daemon is started with ipv6 fixed CIDR <ide> // the running containers are given an IPv6 address derived from the MAC address and the ipv6 fixed CIDR <ide> func (s *DockerDaemonSuite) TestDaemonIPv6FixedCIDRAndMac(c *check.C) { <ide> // IPv6 setup is messing with local bridge address. <ide> testRequires(c, SameHostDaemon) <del> err := setupV6() <del> c.Assert(err, checker.IsNil) <add> setupV6(c) <add> defer teardownV6(c) <ide> <del> err = s.d.StartWithBusybox("--ipv6", "--fixed-cidr-v6='2001:db8:1::/64'") <add> err := s.d.StartWithBusybox("--ipv6", "--fixed-cidr-v6=2001:db8:1::/64") <ide> c.Assert(err, checker.IsNil) <ide> <ide> out, err := s.d.Cmd("run", "-itd", "--name=ipv6test", "--mac-address", "AA:BB:CC:DD:EE:FF", "busybox") <ide> c.Assert(err, checker.IsNil) <ide> <del> out, err = s.d.Cmd("inspect", "--format", "'{{.NetworkSettings.Networks.bridge.GlobalIPv6Address}}'", "ipv6test") <add> out, err = s.d.Cmd("inspect", "--format", "{{.NetworkSettings.Networks.bridge.GlobalIPv6Address}}", "ipv6test") <ide> c.Assert(err, checker.IsNil) <ide> c.Assert(strings.Trim(out, " \r\n'"), checker.Equals, "2001:db8:1::aabb:ccdd:eeff") <del> <del> err = teardownV6() <del> c.Assert(err, checker.IsNil) <ide> } <ide> <ide> func (s *DockerDaemonSuite) TestDaemonLogLevelWrong(c *check.C) { <ide> func (s *DockerDaemonSuite) TestDaemonNoTlsCliTlsVerifyWithEnv(c *check.C) { <ide> <ide> } <ide> <del>func setupV6() error { <add>func setupV6(c *check.C) { <ide> // Hack to get the right IPv6 address on docker0, which has already been created <del> return exec.Command("ip", "addr", "add", "fe80::1/64", "dev", "docker0").Run() <add> result := icmd.RunCommand("ip", "addr", "add", "fe80::1/64", "dev", "docker0") <add> result.Assert(c, icmd.Expected{}) <ide> } <ide> <del>func teardownV6() error { <del> return exec.Command("ip", "addr", "del", "fe80::1/64", "dev", "docker0").Run() <add>func teardownV6(c *check.C) { <add> result := icmd.RunCommand("ip", "addr", "del", "fe80::1/64", "dev", "docker0") <add> result.Assert(c, icmd.Expected{}) <ide> } <ide> <ide> func (s *DockerDaemonSuite) TestDaemonRestartWithContainerWithRestartPolicyAlways(c *check.C) { <ide> func (s *DockerSuite) TestDaemonDiscoveryBackendConfigReload(c *check.C) { <ide> testRequires(c, SameHostDaemon, DaemonIsLinux) <ide> <ide> // daemon config file <del> daemonConfig := `{ "debug" : false }` <del> configFilePath := "test.json" <del> <del> configFile, err := os.Create(configFilePath) <del> c.Assert(err, checker.IsNil) <del> fmt.Fprintf(configFile, "%s", daemonConfig) <add> tmpfile := tempfile.NewTempFile(c, "config-test", `{ "debug" : false }`) <add> defer tmpfile.Remove() <ide> <ide> d := NewDaemon(c) <del> err = d.Start(fmt.Sprintf("--config-file=%s", configFilePath)) <add> // --log-level needs to be set so that d.Start() doesn't add --debug causing <add> // a conflict with the config <add> err := d.Start("--config-file", tmpfile.Name(), "--log-level=info") <ide> c.Assert(err, checker.IsNil) <ide> defer d.Stop() <ide> <ide> // daemon config file <del> daemonConfig = `{ <add> daemonConfig := `{ <ide> "cluster-store": "consul://consuladdr:consulport/some/path", <ide> "cluster-advertise": "192.168.56.100:0", <ide> "debug" : false <ide> }` <ide> <del> configFile.Close() <del> os.Remove(configFilePath) <del> <del> configFile, err = os.Create(configFilePath) <add> os.Remove(tmpfile.Name()) <add> configFile, err := os.Create(tmpfile.Name()) <ide> c.Assert(err, checker.IsNil) <del> defer os.Remove(configFilePath) <ide> fmt.Fprintf(configFile, "%s", daemonConfig) <ide> configFile.Close() <ide> <ide> func (s *DockerSuite) TestDaemonDiscoveryBackendConfigReload(c *check.C) { <ide> <ide> out, err := d.Cmd("info") <ide> c.Assert(err, checker.IsNil) <add> <ide> c.Assert(out, checker.Contains, fmt.Sprintf("Cluster Store: consul://consuladdr:consulport/some/path")) <ide> c.Assert(out, checker.Contains, fmt.Sprintf("Cluster Advertise: 192.168.56.100:0")) <ide> } <ide><path>integration-cli/docker_cli_help_test.go <ide> import ( <ide> <ide> "github.com/docker/docker/pkg/homedir" <ide> "github.com/docker/docker/pkg/integration/checker" <add> icmd "github.com/docker/docker/pkg/integration/cmd" <ide> "github.com/go-check/check" <ide> ) <ide> <ide> func (s *DockerSuite) TestHelpTextVerify(c *check.C) { <ide> out, _, err := runCommandWithOutput(helpCmd) <ide> c.Assert(err, checker.IsNil, check.Commentf(out)) <ide> lines := strings.Split(out, "\n") <del> foundTooLongLine := false <ide> for _, line := range lines { <del> if !foundTooLongLine && len(line) > 80 { <del> c.Logf("Line is too long:\n%s", line) <del> foundTooLongLine = true <del> } <ide> // All lines should not end with a space <ide> c.Assert(line, checker.Not(checker.HasSuffix), " ", check.Commentf("Line should not end with a space")) <ide> <ide> func testCommand(cmd string, newEnvs []string, scanForHome bool, home string) er <ide> // Check each line for lots of stuff <ide> lines := strings.Split(out, "\n") <ide> for _, line := range lines { <del> if len(line) > 107 { <del> return fmt.Errorf("Help for %q is too long:\n%s\n", cmd, line) <del> } <del> <del> if scanForHome && strings.Contains(line, `"`+home) { <del> return fmt.Errorf("Help for %q should use ~ instead of %q on:\n%s\n", <del> cmd, home, line) <del> } <ide> i := strings.Index(line, "~") <ide> if i >= 0 && i != len(line)-1 && line[i+1] != '/' { <ide> return fmt.Errorf("Help for %q should not have used ~:\n%s", cmd, line) <ide> func testCommand(cmd string, newEnvs []string, scanForHome bool, home string) er <ide> } <ide> <ide> if _, ok := noShortUsage[cmd]; !ok { <del> // For each command run it w/o any args. It will either return <del> // valid output or print a short-usage <del> var dCmd *exec.Cmd <del> <ide> // skipNoArgs are ones that we don't want to try w/o <ide> // any args. Either because it'll hang the test or <ide> // lead to incorrect test result (like false negative). <ide> func testCommand(cmd string, newEnvs []string, scanForHome bool, home string) er <ide> "load": {}, <ide> } <ide> <del> ec := 0 <add> var result *icmd.Result <ide> if _, ok := skipNoArgs[cmd]; !ok { <del> args = strings.Split(cmd, " ") <del> dCmd = exec.Command(dockerBinary, args...) <del> out, stderr, ec, err = runCommandWithStdoutStderr(dCmd) <add> result = dockerCmdWithResult(strings.Split(cmd, " ")...) <ide> } <ide> <ide> // If its ok w/o any args then try again with an arg <del> if ec == 0 { <del> args = strings.Split(cmd+" badArg", " ") <del> dCmd = exec.Command(dockerBinary, args...) <del> out, stderr, ec, err = runCommandWithStdoutStderr(dCmd) <add> if result == nil || result.ExitCode == 0 { <add> result = dockerCmdWithResult(strings.Split(cmd+" badArg", " ")...) <ide> } <ide> <del> if len(out) != 0 || len(stderr) == 0 || ec == 0 || err == nil { <del> return fmt.Errorf("Bad output from %q\nstdout:%q\nstderr:%q\nec:%d\nerr:%q\n", args, out, stderr, ec, err) <del> } <del> // Should have just short usage <del> if !strings.Contains(stderr, "\nUsage:") { <del> return fmt.Errorf("Missing short usage on %q\n:%#v", args, stderr) <add> if err := result.Compare(icmd.Expected{ <add> Out: icmd.None, <add> Err: "\nUsage:", <add> ExitCode: 1, <add> }); err != nil { <add> return err <ide> } <del> // But shouldn't have full usage <add> <add> stderr := result.Stderr() <add> // Shouldn't have full usage <ide> if strings.Contains(stderr, "--help=false") { <del> return fmt.Errorf("Should not have full usage on %q\n", args) <add> return fmt.Errorf("Should not have full usage on %q:%v", result.Cmd.Args, stderr) <ide> } <ide> if strings.HasSuffix(stderr, "\n\n") { <del> return fmt.Errorf("Should not have a blank line on %q\n%v", args, stderr) <add> return fmt.Errorf("Should not have a blank line on %q\n%v", result.Cmd.Args, stderr) <ide> } <ide> } <ide> <ide><path>integration-cli/docker_cli_run_test.go <ide> func (s *DockerSuite) TestUserDefinedNetworkAlias(c *check.C) { <ide> // Issue 9677. <ide> func (s *DockerSuite) TestRunWithDaemonFlags(c *check.C) { <ide> out, _, err := dockerCmdWithError("--exec-opt", "foo=bar", "run", "-i", "busybox", "true") <del> if err != nil { <del> if !strings.Contains(out, "flag provided but not defined: --exec-opt") { // no daemon (client-only) <del> c.Fatal(err, out) <del> } <del> } <add> c.Assert(err, checker.NotNil) <add> c.Assert(out, checker.Contains, "unknown flag: --exec-opt") <ide> } <ide> <ide> // Regression test for #4979 <ide> func (s *DockerSuite) TestRunTLSverify(c *check.C) { <ide> <ide> // Regardless of whether we specify true or false we need to <ide> // test to make sure tls is turned on if --tlsverify is specified at all <del> out, code, err := dockerCmdWithError("--tlsverify=false", "ps") <del> if err == nil || code == 0 || !strings.Contains(out, "trying to connect") { <del> c.Fatalf("Should have failed: \net:%v\nout:%v\nerr:%v", code, out, err) <del> } <add> result := dockerCmdWithResult("--tlsverify=false", "ps") <add> result.Assert(c, icmd.Expected{ExitCode: 1, Err: "trying to connect"}) <ide> <del> out, code, err = dockerCmdWithError("--tlsverify=true", "ps") <del> if err == nil || code == 0 || !strings.Contains(out, "cert") { <del> c.Fatalf("Should have failed: \net:%v\nout:%v\nerr:%v", code, out, err) <del> } <add> result = dockerCmdWithResult("--tlsverify=true", "ps") <add> result.Assert(c, icmd.Expected{ExitCode: 1, Err: "cert"}) <ide> } <ide> <ide> func (s *DockerSuite) TestRunPortFromDockerRangeInUse(c *check.C) {
12
Javascript
Javascript
update examples to use modules
74014f57edd45e1628db501ab7a44b8dcb741b1b
<ide><path>src/ng/filter/filters.js <ide> * <ide> * <ide> * @example <del> <example> <add> <example module="currencyExample"> <ide> <file name="index.html"> <ide> <script> <del> function Ctrl($scope) { <del> $scope.amount = 1234.56; <del> } <add> angular.module('currencyExample', []) <add> .controller('ExampleController', ['$scope', function($scope) { <add> $scope.amount = 1234.56; <add> }]); <ide> </script> <del> <div ng-controller="Ctrl"> <add> <div ng-controller="ExampleController"> <ide> <input type="number" ng-model="amount"> <br> <ide> default currency symbol ($): <span id="currency-default">{{amount | currency}}</span><br> <ide> custom currency identifier (USD$): <span>{{amount | currency:"USD$"}}</span> <ide> function currencyFilter($locale) { <ide> * @returns {string} Number rounded to decimalPlaces and places a “,” after each third digit. <ide> * <ide> * @example <del> <example> <add> <example module="numberFilterExample"> <ide> <file name="index.html"> <ide> <script> <del> function Ctrl($scope) { <del> $scope.val = 1234.56789; <del> } <add> angular.module('numberFilterExample', []) <add> .controller('ExampleController', ['$scope', function($scope) { <add> $scope.val = 1234.56789; <add> }]); <ide> </script> <del> <div ng-controller="Ctrl"> <add> <div ng-controller="ExampleController"> <ide> Enter number: <input ng-model='val'><br> <ide> Default formatting: <span id='number-default'>{{val | number}}</span><br> <ide> No fractions: <span>{{val | number:0}}</span><br>
1
PHP
PHP
update owasp synchronizer token pattern link
125bd6eb6920f0be07101e7b47500643da663c6e
<ide><path>src/Http/Middleware/SessionCsrfProtectionMiddleware.php <ide> * <ide> * If you use this middleware *do not* also use CsrfProtectionMiddleware. <ide> * <del> * @see https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#sychronizer-token-pattern <add> * @see https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern <ide> */ <ide> class SessionCsrfProtectionMiddleware implements MiddlewareInterface <ide> {
1
Javascript
Javascript
add tests for matching 'otherwise' routes
65e57a7c3d53fad536797301fe59b5912ed4db16
<ide><path>test/ng/routeSpec.js <ide> describe('$route', function() { <ide> }); <ide> <ide> <del> it('should handle unknown routes with "otherwise" route definition', function() { <del> function NotFoundCtrl() {} <del> <add> it('should chain whens and otherwise', function() { <ide> module(function($routeProvider){ <del> $routeProvider.when('/foo', {templateUrl: 'foo.html'}); <del> $routeProvider.otherwise({templateUrl: '404.html', controller: NotFoundCtrl}); <add> $routeProvider.when('/foo', {templateUrl: 'foo.html'}). <add> otherwise({templateUrl: 'bar.html'}). <add> when('/baz', {templateUrl: 'baz.html'}); <ide> }); <ide> <ide> inject(function($route, $location, $rootScope) { <del> var onChangeSpy = jasmine.createSpy('onChange'); <del> <del> $rootScope.$on('$routeChangeStart', onChangeSpy); <del> expect($route.current).toBeUndefined(); <del> expect(onChangeSpy).not.toHaveBeenCalled(); <del> <del> $location.path('/unknownRoute'); <ide> $rootScope.$digest(); <add> expect($route.current.templateUrl).toBe('bar.html'); <ide> <del> expect($route.current.templateUrl).toBe('404.html'); <del> expect($route.current.controller).toBe(NotFoundCtrl); <del> expect(onChangeSpy).toHaveBeenCalled(); <del> <del> onChangeSpy.reset(); <del> $location.path('/foo'); <add> $location.url('/baz'); <ide> $rootScope.$digest(); <del> <del> expect($route.current.templateUrl).toEqual('foo.html'); <del> expect($route.current.controller).toBeUndefined(); <del> expect(onChangeSpy).toHaveBeenCalled(); <add> expect($route.current.templateUrl).toBe('baz.html'); <ide> }); <ide> }); <ide> <ide> <del> it('should chain whens and otherwise', function() { <del> module(function($routeProvider){ <del> $routeProvider.when('/foo', {templateUrl: 'foo.html'}). <del> otherwise({templateUrl: 'bar.html'}). <del> when('/baz', {templateUrl: 'baz.html'}); <add> describe('otherwise', function() { <add> <add> it('should handle unknown routes with "otherwise" route definition', function() { <add> function NotFoundCtrl() {} <add> <add> module(function($routeProvider){ <add> $routeProvider.when('/foo', {templateUrl: 'foo.html'}); <add> $routeProvider.otherwise({templateUrl: '404.html', controller: NotFoundCtrl}); <add> }); <add> <add> inject(function($route, $location, $rootScope) { <add> var onChangeSpy = jasmine.createSpy('onChange'); <add> <add> $rootScope.$on('$routeChangeStart', onChangeSpy); <add> expect($route.current).toBeUndefined(); <add> expect(onChangeSpy).not.toHaveBeenCalled(); <add> <add> $location.path('/unknownRoute'); <add> $rootScope.$digest(); <add> <add> expect($route.current.templateUrl).toBe('404.html'); <add> expect($route.current.controller).toBe(NotFoundCtrl); <add> expect(onChangeSpy).toHaveBeenCalled(); <add> <add> onChangeSpy.reset(); <add> $location.path('/foo'); <add> $rootScope.$digest(); <add> <add> expect($route.current.templateUrl).toEqual('foo.html'); <add> expect($route.current.controller).toBeUndefined(); <add> expect(onChangeSpy).toHaveBeenCalled(); <add> }); <ide> }); <ide> <del> inject(function($route, $location, $rootScope) { <del> $rootScope.$digest(); <del> expect($route.current.templateUrl).toBe('bar.html'); <ide> <del> $location.url('/baz'); <del> $rootScope.$digest(); <del> expect($route.current.templateUrl).toBe('baz.html'); <add> it('should update $route.current and $route.next when default route is matched', function() { <add> module(function($routeProvider){ <add> $routeProvider.when('/foo', {templateUrl: 'foo.html'}); <add> $routeProvider.otherwise({templateUrl: '404.html'}); <add> }); <add> <add> inject(function($route, $location, $rootScope) { <add> var currentRoute, nextRoute, <add> onChangeSpy = jasmine.createSpy('onChange').andCallFake(function(e, next) { <add> currentRoute = $route.current; <add> nextRoute = next; <add> }); <add> <add> <add> // init <add> $rootScope.$on('$routeChangeStart', onChangeSpy); <add> expect($route.current).toBeUndefined(); <add> expect(onChangeSpy).not.toHaveBeenCalled(); <add> <add> <add> // match otherwise route <add> $location.path('/unknownRoute'); <add> $rootScope.$digest(); <add> <add> expect(currentRoute).toBeUndefined(); <add> expect(nextRoute.templateUrl).toBe('404.html'); <add> expect($route.current.templateUrl).toBe('404.html'); <add> expect(onChangeSpy).toHaveBeenCalled(); <add> onChangeSpy.reset(); <add> <add> // match regular route <add> $location.path('/foo'); <add> $rootScope.$digest(); <add> <add> expect(currentRoute.templateUrl).toBe('404.html'); <add> expect(nextRoute.templateUrl).toBe('foo.html'); <add> expect($route.current.templateUrl).toEqual('foo.html'); <add> expect(onChangeSpy).toHaveBeenCalled(); <add> onChangeSpy.reset(); <add> <add> // match otherwise route again <add> $location.path('/anotherUnknownRoute'); <add> $rootScope.$digest(); <add> <add> expect(currentRoute.templateUrl).toBe('foo.html'); <add> expect(nextRoute.templateUrl).toBe('404.html'); <add> expect($route.current.templateUrl).toEqual('404.html'); <add> expect(onChangeSpy).toHaveBeenCalled(); <add> }); <ide> }); <ide> }); <ide>
1
PHP
PHP
fix getfailedloginmessage typo
b7d653e408dd8ef396b2a7552f81460d71db25da
<ide><path>src/Illuminate/Foundation/Auth/AuthenticatesAndRegistersUsers.php <ide> public function postLogin(Request $request) <ide> return redirect($this->loginPath()) <ide> ->withInput($request->only('email', 'remember')) <ide> ->withErrors([ <del> 'email' => $this->getFailedLoginMesssage(), <add> 'email' => $this->getFailedLoginMessage(), <ide> ]); <ide> } <ide> <ide> public function postLogin(Request $request) <ide> * <ide> * @return string <ide> */ <del> protected function getFailedLoginMesssage() <add> protected function getFailedLoginMessage() <ide> { <ide> return 'These credentials do not match our records.'; <ide> }
1
Javascript
Javascript
use default asset roots on `default.config.js`
47926abca9d60b0a6090c3e82f50fb75ec659c6c
<ide><path>local-cli/default.config.js <ide> var path = require('path'); <ide> */ <ide> var config = { <ide> getProjectRoots() { <del> if (__dirname.match(/node_modules[\/\\]react-native[\/\\]local-cli$/)) { <del> // packager is running from node_modules of another project <del> return [path.resolve(__dirname, '../../..')]; <del> } else if (__dirname.match(/Pods[\/\\]React[\/\\]packager$/)) { <del> // packager is running from node_modules of another project <del> return [path.resolve(__dirname, '../../..')]; <del> } else { <del> return [path.resolve(__dirname, '..')]; <del> } <add> return getRoots(); <ide> }, <ide> <ide> /** <ide> var config = { <ide> * `./<image.extension>` don't require any entry in here. <ide> */ <ide> getAssetRoots() { <del> return []; <add> return getRoots(); <ide> }, <ide> <ide> /** <ide> var config = { <ide> } <ide> }; <ide> <add>function getRoots() { <add> if (__dirname.match(/node_modules[\/\\]react-native[\/\\]local-cli$/)) { <add> // packager is running from node_modules of another project <add> return [path.resolve(__dirname, '../../..')]; <add> } else if (__dirname.match(/Pods[\/\\]React[\/\\]packager$/)) { <add> // packager is running from node_modules of another project <add> return [path.resolve(__dirname, '../../..')]; <add> } else { <add> return [path.resolve(__dirname, '..')]; <add> } <add>} <add> <ide> module.exports = config;
1
Javascript
Javascript
keep track of min/max attars on-the-fly
4b653aeac1aca7ac551738870a2446b6810ca0df
<ide><path>src/ng/directive/input.js <ide> function numberInputType(scope, element, attr, ctrl, $sniffer, $browser) { <ide> }); <ide> <ide> if (attr.min) { <del> var min = parseFloat(attr.min); <ide> var minValidator = function(value) { <add> var min = parseFloat(attr.min); <ide> if (!ctrl.$isEmpty(value) && value < min) { <ide> ctrl.$setValidity('min', false); <ide> return undefined; <ide> function numberInputType(scope, element, attr, ctrl, $sniffer, $browser) { <ide> } <ide> <ide> if (attr.max) { <del> var max = parseFloat(attr.max); <ide> var maxValidator = function(value) { <add> var max = parseFloat(attr.max); <ide> if (!ctrl.$isEmpty(value) && value > max) { <ide> ctrl.$setValidity('max', false); <ide> return undefined; <ide><path>test/ng/directive/inputSpec.js <ide> describe('input', function() { <ide> expect(scope.value).toBe(100); <ide> expect(scope.form.alias.$error.min).toBeFalsy(); <ide> }); <add> <add> it('should validate even if min value changes on-the-fly', function(done) { <add> scope.min = 10; <add> compileInput('<input type="number" ng-model="value" name="alias" min="{{min}}" />'); <add> scope.$digest(); <add> <add> changeInputValueTo('5'); <add> expect(inputElm).toBeInvalid(); <add> <add> scope.min = 0; <add> scope.$digest(function () { <add> expect(inputElm).toBeValid(); <add> done(); <add> }); <add> }); <ide> }); <ide> <ide> <ide> describe('input', function() { <ide> expect(scope.value).toBe(0); <ide> expect(scope.form.alias.$error.max).toBeFalsy(); <ide> }); <add> <add> it('should validate even if max value changes on-the-fly', function(done) { <add> scope.max = 10; <add> compileInput('<input type="number" ng-model="value" name="alias" max="{{max}}" />'); <add> scope.$digest(); <add> <add> changeInputValueTo('5'); <add> expect(inputElm).toBeValid(); <add> <add> scope.max = 0; <add> scope.$digest(function () { <add> expect(inputElm).toBeInvalid(); <add> done(); <add> }); <add> }); <ide> }); <ide> <ide>
2
Javascript
Javascript
fix initial aspect ratio when not responsive
16bcd6adc579cb3deae16ea915680bc219924cdc
<ide><path>src/core/core.controller.js <ide> module.exports = function(Chart) { <ide> <ide> var helpers = Chart.helpers; <add> <ide> // Create a dictionary of chart types, to allow for extension of existing types <ide> Chart.types = {}; <ide> <ide> module.exports = function(Chart) { <ide> // Controllers available for dataset visualization eg. bar, line, slice, etc. <ide> Chart.controllers = {}; <ide> <add> /** <add> * The "used" size is the final value of a dimension property after all calculations have <add> * been performed. This method uses the computed style of `element` but returns undefined <add> * if the computed style is not expressed in pixels. That can happen in some cases where <add> * `element` has a size relative to its parent and this last one is not yet displayed, <add> * for example because of `display: none` on a parent node. <add> * TODO(SB) Move this method in the upcoming core.platform class. <add> * @see https://developer.mozilla.org/en-US/docs/Web/CSS/used_value <add> * @returns {Number} Size in pixels or undefined if unknown. <add> */ <add> function readUsedSize(element, property) { <add> var value = helpers.getStyle(element, property); <add> var matches = value && value.match(/(\d+)px/); <add> return matches? Number(matches[1]) : undefined; <add> } <add> <add> /** <add> * Initializes the canvas style and render size without modifying the canvas display size, <add> * since responsiveness is handled by the controller.resize() method. The config is used <add> * to determine the aspect ratio to apply in case no explicit height has been specified. <add> * TODO(SB) Move this method in the upcoming core.platform class. <add> */ <add> function initCanvas(canvas, config) { <add> var style = canvas.style; <add> <add> // NOTE(SB) canvas.getAttribute('width') !== canvas.width: in the first case it <add> // returns null or '' if no explicit value has been set to the canvas attribute. <add> var renderHeight = canvas.getAttribute('height'); <add> var renderWidth = canvas.getAttribute('width'); <add> <add> // Chart.js modifies some canvas values that we want to restore on destroy <add> canvas._chartjs = { <add> initial: { <add> height: renderHeight, <add> width: renderWidth, <add> style: { <add> display: style.display, <add> height: style.height, <add> width: style.width <add> } <add> } <add> }; <add> <add> // Force canvas to display as block to avoid extra space caused by inline <add> // elements, which would interfere with the responsive resize process. <add> // https://github.com/chartjs/Chart.js/issues/2538 <add> style.display = style.display || 'block'; <add> <add> if (renderWidth === null || renderWidth === '') { <add> var displayWidth = readUsedSize(canvas, 'width'); <add> if (displayWidth !== undefined) { <add> canvas.width = displayWidth; <add> } <add> } <add> <add> if (renderHeight === null || renderHeight === '') { <add> if (canvas.style.height === '') { <add> // If no explicit render height and style height, let's apply the aspect ratio, <add> // which one can be specified by the user but also by charts as default option <add> // (i.e. options.aspectRatio). If not specified, use canvas aspect ratio of 2. <add> canvas.height = canvas.width / (config.options.aspectRatio || 2); <add> } else { <add> var displayHeight = readUsedSize(canvas, 'height'); <add> if (displayWidth !== undefined) { <add> canvas.height = displayHeight; <add> } <add> } <add> } <add> <add> return canvas; <add> } <add> <add> /** <add> * Restores the canvas initial state, such as render/display sizes and style. <add> * TODO(SB) Move this method in the upcoming core.platform class. <add> */ <add> function releaseCanvas(canvas) { <add> if (!canvas._chartjs) { <add> return; <add> } <add> <add> var initial = canvas._chartjs.initial; <add> ['height', 'width'].forEach(function(prop) { <add> var value = initial[prop]; <add> if (value === undefined || value === null) { <add> canvas.removeAttribute(prop); <add> } else { <add> canvas.setAttribute(prop, value); <add> } <add> }); <add> <add> helpers.each(initial.style || {}, function(value, key) { <add> canvas.style[key] = value; <add> }); <add> <add> delete canvas._chartjs; <add> } <add> <add> /** <add> * Initializes the given config with global and chart default values. <add> */ <add> function initConfig(config) { <add> config = config || {}; <add> return helpers.configMerge({ <add> options: helpers.configMerge( <add> Chart.defaults.global, <add> Chart.defaults[config.type], <add> config.options || {}), <add> data: { <add> datasets: [], <add> labels: [] <add> } <add> }, config); <add> } <add> <ide> /** <ide> * @class Chart.Controller <ide> * The main controller of a chart. <ide> */ <del> Chart.Controller = function(instance) { <add> Chart.Controller = function(context, config, instance) { <add> var me = this; <add> var canvas; <ide> <del> this.chart = instance; <del> this.config = instance.config; <del> this.options = this.config.options = helpers.configMerge(Chart.defaults.global, Chart.defaults[this.config.type], this.config.options || {}); <del> this.id = helpers.uid(); <add> config = initConfig(config); <add> canvas = initCanvas(context.canvas, config); <ide> <del> Object.defineProperty(this, 'data', { <add> instance.ctx = context; <add> instance.canvas = canvas; <add> instance.config = config; <add> instance.width = canvas.width; <add> instance.height = canvas.height; <add> instance.aspectRatio = canvas.width / canvas.height; <add> <add> helpers.retinaScale(instance); <add> <add> me.id = helpers.uid(); <add> me.chart = instance; <add> me.config = instance.config; <add> me.options = me.config.options; <add> <add> Object.defineProperty(me, 'data', { <ide> get: function() { <del> return this.config.data; <add> return me.config.data; <add> } <add> }); <add> <add> // Always bind this so that if the responsive state changes we still work <add> helpers.addResizeListener(canvas.parentNode, function() { <add> if (me.config.options.responsive) { <add> me.resize(); <ide> } <ide> }); <ide> <ide> // Add the chart instance to the global namespace <del> Chart.instances[this.id] = this; <add> Chart.instances[me.id] = me; <ide> <del> if (this.options.responsive) { <add> if (me.options.responsive) { <ide> // Silent resize before chart draws <del> this.resize(true); <add> me.resize(true); <ide> } <ide> <del> this.initialize(); <add> me.initialize(); <ide> <del> return this; <add> return me; <ide> }; <ide> <ide> helpers.extend(Chart.Controller.prototype, /** @lends Chart.Controller */ { <del> <ide> initialize: function() { <ide> var me = this; <add> <ide> // Before init plugin notification <ide> Chart.plugins.notify('beforeInit', [me]); <ide> <ide> module.exports = function(Chart) { <ide> resize: function(silent) { <ide> var me = this; <ide> var chart = me.chart; <add> var options = me.options; <ide> var canvas = chart.canvas; <del> var newWidth = helpers.getMaximumWidth(canvas); <del> var aspectRatio = chart.aspectRatio; <del> var newHeight = (me.options.maintainAspectRatio && isNaN(aspectRatio) === false && isFinite(aspectRatio) && aspectRatio !== 0) ? newWidth / aspectRatio : helpers.getMaximumHeight(canvas); <add> var aspectRatio = (options.maintainAspectRatio && chart.aspectRatio) || null; <ide> <del> var sizeChanged = chart.width !== newWidth || chart.height !== newHeight; <add> // the canvas render width and height will be casted to integers so make sure that <add> // the canvas display style uses the same integer values to avoid blurring effect. <add> var newWidth = Math.floor(helpers.getMaximumWidth(canvas)); <add> var newHeight = Math.floor(aspectRatio? newWidth / aspectRatio : helpers.getMaximumHeight(canvas)); <ide> <del> if (!sizeChanged) { <del> return me; <add> if (chart.width === newWidth && chart.height === newHeight) { <add> return; <ide> } <ide> <ide> canvas.width = chart.width = newWidth; <ide> canvas.height = chart.height = newHeight; <ide> <ide> helpers.retinaScale(chart); <ide> <add> canvas.style.width = newWidth + 'px'; <add> canvas.style.height = newHeight + 'px'; <add> <ide> // Notify any plugins about the resize <ide> var newSize = {width: newWidth, height: newHeight}; <ide> Chart.plugins.notify('resize', [me, newSize]); <ide> module.exports = function(Chart) { <ide> me.stop(); <ide> me.update(me.options.responsiveAnimationDuration); <ide> } <del> <del> return me; <ide> }, <ide> <ide> ensureScalesHaveIDs: function() { <ide> module.exports = function(Chart) { <ide> <ide> destroy: function() { <ide> var me = this; <add> var canvas = me.chart.canvas; <add> <ide> me.stop(); <ide> me.clear(); <add> <ide> helpers.unbindEvents(me, me.events); <del> helpers.removeResizeListener(me.chart.canvas.parentNode); <ide> <del> // Reset canvas height/width attributes <del> var canvas = me.chart.canvas; <del> canvas.width = me.chart.width; <del> canvas.height = me.chart.height; <add> if (canvas) { <add> helpers.removeResizeListener(canvas.parentNode); <add> releaseCanvas(canvas); <add> } <ide> <ide> // if we scaled the canvas in response to a devicePixelRatio !== 1, we need to undo that transform here <ide> if (me.chart.originalDevicePixelRatio !== undefined) { <ide> me.chart.ctx.scale(1 / me.chart.originalDevicePixelRatio, 1 / me.chart.originalDevicePixelRatio); <ide> } <ide> <del> // Reset to the old style since it may have been changed by the device pixel ratio changes <del> canvas.style.width = me.chart.originalCanvasStyleWidth; <del> canvas.style.height = me.chart.originalCanvasStyleHeight; <del> <ide> Chart.plugins.notify('destroy', [me]); <ide> <ide> delete Chart.instances[me.id]; <ide><path>src/core/core.helpers.js <ide> module.exports = function(Chart) { <ide> // when destroy is called <ide> chart.originalDevicePixelRatio = chart.originalDevicePixelRatio || pixelRatio; <ide> } <del> <del> canvas.style.width = width + 'px'; <del> canvas.style.height = height + 'px'; <ide> }; <ide> // -- Canvas methods <ide> helpers.clear = function(chart) { <ide><path>src/core/core.js <ide> module.exports = function() { <ide> // Occupy the global variable of Chart, and create a simple base class <ide> var Chart = function(context, config) { <ide> var me = this; <del> var helpers = Chart.helpers; <del> me.config = config || { <del> data: { <del> datasets: [] <del> } <del> }; <ide> <ide> // Support a jQuery'd canvas element <ide> if (context.length && context[0].getContext) { <ide> module.exports = function() { <ide> context = context.getContext('2d'); <ide> } <ide> <del> me.ctx = context; <del> me.canvas = context.canvas; <del> <del> context.canvas.style.display = context.canvas.style.display || 'block'; <del> <del> // Figure out what the size of the chart will be. <del> // If the canvas has a specified width and height, we use those else <del> // we look to see if the canvas node has a CSS width and height. <del> // If there is still no height, fill the parent container <del> me.width = context.canvas.width || parseInt(helpers.getStyle(context.canvas, 'width'), 10) || helpers.getMaximumWidth(context.canvas); <del> me.height = context.canvas.height || parseInt(helpers.getStyle(context.canvas, 'height'), 10) || helpers.getMaximumHeight(context.canvas); <del> <del> me.aspectRatio = me.width / me.height; <del> <del> if (isNaN(me.aspectRatio) || isFinite(me.aspectRatio) === false) { <del> // If the canvas has no size, try and figure out what the aspect ratio will be. <del> // Some charts prefer square canvases (pie, radar, etc). If that is specified, use that <del> // else use the canvas default ratio of 2 <del> me.aspectRatio = config.aspectRatio !== undefined ? config.aspectRatio : 2; <del> } <del> <del> // Store the original style of the element so we can set it back <del> me.originalCanvasStyleWidth = context.canvas.style.width; <del> me.originalCanvasStyleHeight = context.canvas.style.height; <del> <del> // High pixel density displays - multiply the size of the canvas height/width by the device pixel ratio, then scale. <del> helpers.retinaScale(me); <del> me.controller = new Chart.Controller(me); <del> <del> // Always bind this so that if the responsive state changes we still work <del> helpers.addResizeListener(context.canvas.parentNode, function() { <del> if (me.controller && me.controller.config.options.responsive) { <del> me.controller.resize(); <del> } <del> }); <del> <del> return me.controller ? me.controller : me; <add> me.controller = new Chart.Controller(context, config, me); <ide> <add> return me.controller; <ide> }; <ide> <ide> // Globally expose the defaults to allow for user updating/changing <ide> module.exports = function() { <ide> Chart.Chart = Chart; <ide> <ide> return Chart; <del> <ide> };
3
Python
Python
add option to log only once in multinode training
f086652b16e59bece9571fb9a266557ad3181b2a
<ide><path>examples/pytorch/language-modeling/run_clm.py <ide> set_seed, <ide> ) <ide> from transformers.testing_utils import CaptureLogger <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/language-modeling/run_mlm.py <ide> TrainingArguments, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/language-modeling/run_plm.py <ide> XLNetLMHeadModel, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/multiple-choice/run_swag.py <ide> ) <ide> from transformers.file_utils import PaddingStrategy <ide> from transformers.tokenization_utils_base import PreTrainedTokenizerBase <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/question-answering/run_qa.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> from utils_qa import postprocess_qa_predictions <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/question-answering/run_qa_beam_search.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> from utils_qa import postprocess_qa_predictions_with_beam_search <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/summarization/run_summarization.py <ide> set_seed, <ide> ) <ide> from transformers.file_utils import is_offline_mode <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> logger.info(f"Training/evaluation parameters {training_args}") <ide> <ide><path>examples/pytorch/text-classification/run_glue.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/text-classification/run_xnli.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> def main(): <ide> ) <ide> <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/token-classification/run_ner.py <ide> TrainingArguments, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format() <ide><path>examples/pytorch/translation/run_translation.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> logger.info(f"Training/evaluation parameters {training_args}") <ide> <ide><path>src/transformers/trainer.py <ide> def is_local_process_zero(self) -> bool: <ide> Whether or not this process is the local (e.g., on one machine if training in a distributed fashion on several <ide> machines) main process. <ide> """ <del> if is_torch_tpu_available(): <del> return xm.is_master_ordinal(local=True) <del> elif is_sagemaker_mp_enabled(): <del> return smp.local_rank() == 0 <del> else: <del> return self.args.local_rank in [-1, 0] <add> return self.args.local_process_index == 0 <ide> <ide> def is_world_process_zero(self) -> bool: <ide> """ <ide> Whether or not this process is the global main process (when training in a distributed fashion on several <ide> machines, this is only going to be :obj:`True` for one process). <ide> """ <del> if is_torch_tpu_available(): <del> return xm.is_master_ordinal(local=False) <del> elif is_sagemaker_mp_enabled(): <add> # Special case for SageMaker ModelParallel since there process_index is dp_process_index, not the global <add> # process index. <add> if is_sagemaker_mp_enabled(): <ide> return smp.rank() == 0 <ide> else: <ide> return self.args.process_index == 0 <ide><path>src/transformers/training_args.py <ide> class TrainingArguments: <ide> :class:`~transformers.Trainer`, it's intended to be used by your training/evaluation scripts instead. See <ide> the `example scripts <https://github.com/huggingface/transformers/tree/master/examples>`__ for more <ide> details. <add> log_on_each_node (:obj:`bool`, `optional`, defaults to :obj:`True`): <add> In multinode distributed training, whether to log once per node, or only on the main node. <ide> """ <ide> <ide> output_dir: str = field( <ide> class TrainingArguments: <ide> default=None, <ide> metadata={"help": "The path to a folder with a valid checkpoint for your model."}, <ide> ) <add> log_on_each_node: bool = field( <add> default=True, <add> metadata={ <add> "help": "When doing a multinode distributed training, whether to log once per node or just once on the main node." <add> }, <add> ) <ide> _n_gpu: int = field(init=False, repr=False, default=-1) <ide> mp_parameters: str = field( <ide> default="", <ide> def world_size(self): <ide> @torch_required <ide> def process_index(self): <ide> """ <del> The number of processes used in parallel. <add> The index of the current process used. <ide> """ <ide> if is_torch_tpu_available(): <ide> return xm.get_ordinal() <ide> def process_index(self): <ide> return torch.distributed.get_rank() <ide> return 0 <ide> <add> @property <add> @torch_required <add> def local_process_index(self): <add> """ <add> The index of the local process used. <add> """ <add> if is_torch_tpu_available(): <add> return xm.get_ordinal(local=True) <add> elif is_sagemaker_mp_enabled(): <add> return smp.local_rank() <add> elif is_sagemaker_dp_enabled(): <add> return sm_dist.get_rank() <add> elif self.local_rank != -1: <add> return self.local_rank <add> return 0 <add> <add> @property <add> def should_log(self): <add> """ <add> Whether or not the current process should produce log. <add> """ <add> if self.log_on_each_node: <add> return self.local_process_index == 0 <add> else: <add> if is_sagemaker_mp_enabled(): <add> return smp.rank() == 0 <add> else: <add> return self.process_index == 0 <add> <ide> @property <ide> def place_model_on_device(self): <ide> """ <ide><path>templates/adding_a_new_example_script/{{cookiecutter.directory_name}}/run_{{cookiecutter.example_shortcut}}.py <ide> default_data_collator, <ide> set_seed, <ide> ) <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> <ide> <ide> logger = logging.getLogger(__name__) <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> logger.info(f"Training/evaluation parameters {training_args}") <ide> <ide><path>tests/sagemaker/scripts/pytorch/run_glue_model_parallelism.py <ide> # Will import SageMaker Model parallelism specific Trainer <ide> from transformers.sagemaker import SageMakerTrainer as Trainer <ide> from transformers.sagemaker import SageMakerTrainingArguments as TrainingArguments <del>from transformers.trainer_utils import get_last_checkpoint, is_main_process <add>from transformers.trainer_utils import get_last_checkpoint <ide> from transformers.utils import check_min_version <ide> <ide> <ide> def main(): <ide> datefmt="%m/%d/%Y %H:%M:%S", <ide> handlers=[logging.StreamHandler(sys.stdout)], <ide> ) <del> logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN) <add> logger.setLevel(logging.INFO if training_args.should_log else logging.WARN) <ide> <ide> # Log on each process the small summary: <ide> logger.warning( <ide> f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" <ide> + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" <ide> ) <ide> # Set the verbosity to info of the Transformers logger (on main process only): <del> if is_main_process(training_args.local_rank): <add> if training_args.should_log: <ide> transformers.utils.logging.set_verbosity_info() <ide> transformers.utils.logging.enable_default_handler() <ide> transformers.utils.logging.enable_explicit_format()
15
Go
Go
improve the error print of image inspect
cc9ed0a31b0656c58ad7953f2c54d46258380443
<ide><path>daemon/image_inspect.go <ide> package daemon <ide> <ide> import ( <del> "fmt" <ide> "time" <ide> <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/layer" <ide> "github.com/docker/docker/reference" <add> "github.com/pkg/errors" <ide> ) <ide> <ide> // LookupImage looks up an image by name and returns it as an ImageInspect <ide> // structure. <ide> func (daemon *Daemon) LookupImage(name string) (*types.ImageInspect, error) { <ide> img, err := daemon.GetImage(name) <ide> if err != nil { <del> return nil, fmt.Errorf("No such image: %s", name) <add> return nil, errors.Wrapf(err, "no such image: %s", name) <ide> } <ide> <ide> refs := daemon.referenceStore.References(img.ID().Digest()) <ide><path>integration-cli/daemon/daemon.go <ide> func WaitInspectWithArgs(dockerBinary, name, expr, expected string, timeout time <ide> for { <ide> result := icmd.RunCommand(dockerBinary, args...) <ide> if result.Error != nil { <del> if !strings.Contains(result.Stderr(), "No such") { <add> if !strings.Contains(strings.ToLower(result.Stderr()), "no such") { <ide> return errors.Errorf("error executing docker inspect: %v\n%s", <ide> result.Stderr(), result.Stdout()) <ide> }
2
Ruby
Ruby
add go@1.14 to binary urls allowlist
8142bf2797e7411584e219255d0f849b3e9f0a90
<ide><path>Library/Homebrew/rubocops/urls.rb <ide> class Urls < FormulaCop <ide> go@1.11 <ide> go@1.12 <ide> go@1.13 <add> go@1.14 <ide> haskell-stack <ide> ldc <ide> mlton
1
Javascript
Javascript
avoid page double render with emotion vanilla
789a665a036ede0fc8006ff27d29ef38650f6efc
<ide><path>examples/with-emotion-vanilla/pages/_document.js <ide> import * as React from 'react' <ide> import { renderStatic } from '../shared/renderer' <ide> export default class AppDocument extends Document { <ide> static async getInitialProps(ctx) { <del> const page = await ctx.renderPage() <del> const { css, ids } = await renderStatic(page.html) <ide> const initialProps = await Document.getInitialProps(ctx) <add> const { css, ids } = await renderStatic(initialProps.html) <ide> return { <ide> ...initialProps, <ide> styles: (
1
Ruby
Ruby
ignore changed dependents
424ded8fdb9e3e71851a5f8678bfab5a2c3d76c7
<ide><path>Library/Homebrew/cmd/test-bot.rb <ide> def formula formula_name <ide> changed_dependences = dependencies - unchanged_dependencies <ide> <ide> dependents = `brew uses #{formula_name}`.split("\n") <add> dependents -= @formulae <ide> dependents = dependents.map {|d| Formulary.factory(d)} <ide> testable_dependents = dependents.select {|d| d.test_defined? && d.stable.bottled? } <ide> uninstalled_testable_dependents = testable_dependents.reject {|d| d.installed? }
1
Ruby
Ruby
drop unnecessary parens in tests
eed8af9b80979fcdd55dc44c963392dc84e63d4f
<ide><path>Library/Homebrew/test/test_build_environment.rb <ide> def setup <ide> end <ide> <ide> def test_shovel_returns_self <del> assert_same @env, (@env << :foo) <add> assert_same @env, @env << :foo <ide> end <ide> <ide> def test_merge_returns_self <ide><path>Library/Homebrew/test/test_compiler_queue.rb <ide> def setup <ide> end <ide> <ide> def test_shovel_returns_self <del> assert_same @q, (@q << Object.new) <add> assert_same @q, @q << Object.new <ide> end <ide> <ide> def test_empty <ide><path>Library/Homebrew/test/test_dependencies.rb <ide> def setup <ide> end <ide> <ide> def test_shovel_returns_self <del> assert_same @deps, (@deps << Dependency.new("foo")) <add> assert_same @deps, @deps << Dependency.new("foo") <ide> end <ide> <ide> def test_no_duplicate_deps <ide> def setup <ide> end <ide> <ide> def test_shovel_returns_self <del> assert_same @reqs, (@reqs << Object.new) <add> assert_same @reqs, @reqs << Object.new <ide> end <ide> <ide> def test_merging_multiple_dependencies <ide><path>Library/Homebrew/test/test_options.rb <ide> def test_include <ide> end <ide> <ide> def test_union_returns_options <del> assert_instance_of Options, (@options + Options.new) <add> assert_instance_of Options, @options + Options.new <ide> end <ide> <ide> def test_difference_returns_options <del> assert_instance_of Options, (@options - Options.new) <add> assert_instance_of Options, @options - Options.new <ide> end <ide> <ide> def test_shovel_returns_self <del> assert_same @options, (@options << Option.new("foo")) <add> assert_same @options, @options << Option.new("foo") <ide> end <ide> <ide> def test_as_flags
4
Python
Python
fix typo in retribert docstring
9586e222affa86870fef603512ee5b9c6025a42e
<ide><path>src/transformers/models/retribert/tokenization_retribert.py <ide> class RetriBertTokenizer(BertTokenizer): <ide> r""" <ide> Constructs a RetriBERT tokenizer. <ide> <del> [`RetroBertTokenizer`] is identical to [`BertTokenizer`] and runs end-to-end tokenization: punctuation splitting <add> [`RetriBertTokenizer`] is identical to [`BertTokenizer`] and runs end-to-end tokenization: punctuation splitting <ide> and wordpiece. <ide> <ide> Refer to superclass [`BertTokenizer`] for usage examples and documentation concerning parameters.
1
Ruby
Ruby
add tests for env.{append,prepend}_path
865b68de103ccd4a0386dbd127db1827105aae4d
<ide><path>Library/Homebrew/test/test_ENV.rb <ide> def test_with_build_environment_does_not_mutate_interface <ide> @env.with_build_environment { assert_equal expected, @env.methods } <ide> assert_equal expected, @env.methods <ide> end <add> <add> def test_append_path <add> @env.append_path 'FOO', '/usr/bin' <add> assert_equal '/usr/bin', @env['FOO'] <add> @env.append_path 'FOO', '/bin' <add> assert_equal "/usr/bin#{File::PATH_SEPARATOR}/bin", @env['FOO'] <add> end <add> <add> def test_prepend_path <add> @env.prepend_path 'FOO', '/usr/bin' <add> assert_equal '/usr/bin', @env['FOO'] <add> @env.prepend_path 'FOO', '/bin' <add> assert_equal "/bin#{File::PATH_SEPARATOR}/usr/bin", @env['FOO'] <add> end <ide> end
1
PHP
PHP
fix failing tests
d95ef5d5e861de0bf34009a7ed9888732b23a40c
<ide><path>lib/Cake/Test/Case/Network/CakeSocketTest.php <ide> public function testGetContext() { <ide> 'host' => 'smtp.gmail.com', <ide> 'port' => 465, <ide> 'timeout' => 5, <del> 'request' => array( <del> 'context' => array( <del> 'ssl' => array('capture_peer' => true) <del> ) <add> 'context' => array( <add> 'ssl' => array('capture_peer' => true) <ide> ) <ide> ); <ide> $this->Socket = new CakeSocket($config); <ide> $this->Socket->connect(); <ide> $result = $this->Socket->context(); <del> $this->assertEquals($config['request']['context'], $result); <add> $this->assertEquals($config['context'], $result); <ide> } <ide> <ide> } <ide><path>lib/Cake/Test/Case/Utility/FolderTest.php <ide> public function testFind() { <ide> $this->assertSame(array_diff($expected, $result), array()); <ide> <ide> $result = $Folder->find('.*', true); <del> $expected = array('config.php', 'routes.php'); <add> $expected = array('cacert.pem', 'config.php', 'routes.php'); <ide> $this->assertSame($expected, $result); <ide> <ide> $result = $Folder->find('.*\.php');
2
Ruby
Ruby
require turn only for minitest
edf7c9a6a3331bfc0beabc9dc9c8beac22677e53
<ide><path>activesupport/lib/active_support/test_case.rb <ide> end <ide> <ide> # Added by Turn to support natural case names in the output formatting <del>if defined?(MiniTest) && MiniTest::Unit.respond_to?(:use_natural_language_case_names=) <del> MiniTest::Unit.use_natural_language_case_names = true <add>if defined?(MiniTest) <add> require 'turn' <add> <add> if MiniTest::Unit.respond_to?(:use_natural_language_case_names=) <add> MiniTest::Unit.use_natural_language_case_names = true <add> end <ide> end <ide> <ide> module ActiveSupport
1
Go
Go
run btrfs rescan only if userdiskquota is enabled
b36e613d9f311e69387ccec2be16f8618fa1f558
<ide><path>daemon/graphdriver/btrfs/btrfs.go <ide> func init() { <ide> graphdriver.Register("btrfs", Init) <ide> } <ide> <del>var ( <del> quotaEnabled = false <del> userDiskQuota = false <del>) <del> <ide> type btrfsOptions struct { <ide> minSpace uint64 <ide> size uint64 <ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap <ide> return nil, err <ide> } <ide> <del> opt, err := parseOptions(options) <add> opt, userDiskQuota, err := parseOptions(options) <ide> if err != nil { <ide> return nil, err <ide> } <ide> <del> if userDiskQuota { <del> if err := subvolEnableQuota(home); err != nil { <del> return nil, err <del> } <del> quotaEnabled = true <del> } <del> <ide> driver := &Driver{ <ide> home: home, <ide> uidMaps: uidMaps, <ide> gidMaps: gidMaps, <ide> options: opt, <ide> } <ide> <add> if userDiskQuota { <add> if err := driver.subvolEnableQuota(); err != nil { <add> return nil, err <add> } <add> } <add> <ide> return graphdriver.NewNaiveDiffDriver(driver, uidMaps, gidMaps), nil <ide> } <ide> <del>func parseOptions(opt []string) (btrfsOptions, error) { <add>func parseOptions(opt []string) (btrfsOptions, bool, error) { <ide> var options btrfsOptions <add> userDiskQuota := false <ide> for _, option := range opt { <ide> key, val, err := parsers.ParseKeyValueOpt(option) <ide> if err != nil { <del> return options, err <add> return options, userDiskQuota, err <ide> } <ide> key = strings.ToLower(key) <ide> switch key { <ide> case "btrfs.min_space": <ide> minSpace, err := units.RAMInBytes(val) <ide> if err != nil { <del> return options, err <add> return options, userDiskQuota, err <ide> } <ide> userDiskQuota = true <ide> options.minSpace = uint64(minSpace) <ide> default: <del> return options, fmt.Errorf("Unknown option %s", key) <add> return options, userDiskQuota, fmt.Errorf("Unknown option %s", key) <ide> } <ide> } <del> return options, nil <add> return options, userDiskQuota, nil <ide> } <ide> <ide> // Driver contains information about the filesystem mounted. <ide> type Driver struct { <ide> //root of the file system <del> home string <del> uidMaps []idtools.IDMap <del> gidMaps []idtools.IDMap <del> options btrfsOptions <add> home string <add> uidMaps []idtools.IDMap <add> gidMaps []idtools.IDMap <add> options btrfsOptions <add> quotaEnabled bool <ide> } <ide> <ide> // String prints the name of the driver (btrfs). <ide> func (d *Driver) GetMetadata(id string) (map[string]string, error) { <ide> <ide> // Cleanup unmounts the home directory. <ide> func (d *Driver) Cleanup() error { <del> if quotaEnabled { <del> if err := subvolDisableQuota(d.home); err != nil { <del> return err <del> } <add> if err := d.subvolDisableQuota(); err != nil { <add> return err <ide> } <ide> <ide> return mount.Unmount(d.home) <ide> func subvolDelete(dirpath, name string) error { <ide> return nil <ide> } <ide> <del>func subvolEnableQuota(path string) error { <del> dir, err := openDir(path) <add>func (d *Driver) subvolEnableQuota() error { <add> if d.quotaEnabled { <add> return nil <add> } <add> // In case quotaEnabled is not set, check qgroup and update quotaEnabled as needed <add> if _, err := subvolLookupQgroup(d.home); err == nil { <add> d.quotaEnabled = true <add> return nil <add> } <add> <add> dir, err := openDir(d.home) <ide> if err != nil { <ide> return err <ide> } <ide> func subvolEnableQuota(path string) error { <ide> return fmt.Errorf("Failed to enable btrfs quota for %s: %v", dir, errno.Error()) <ide> } <ide> <add> d.quotaEnabled = true <add> <ide> return nil <ide> } <ide> <del>func subvolDisableQuota(path string) error { <del> dir, err := openDir(path) <add>func (d *Driver) subvolDisableQuota() error { <add> if !d.quotaEnabled { <add> // In case quotaEnabled is not set, check qgroup and update quotaEnabled as needed <add> if _, err := subvolLookupQgroup(d.home); err != nil { <add> // quota is still not enabled <add> return nil <add> } <add> d.quotaEnabled = true <add> } <add> <add> dir, err := openDir(d.home) <ide> if err != nil { <ide> return err <ide> } <ide> func subvolDisableQuota(path string) error { <ide> return fmt.Errorf("Failed to disable btrfs quota for %s: %v", dir, errno.Error()) <ide> } <ide> <add> d.quotaEnabled = false <add> <ide> return nil <ide> } <ide> <del>func subvolRescanQuota(path string) error { <del> dir, err := openDir(path) <add>func (d *Driver) subvolRescanQuota() error { <add> if !d.quotaEnabled { <add> // In case quotaEnabled is not set, check qgroup and update quotaEnabled as needed <add> if _, err := subvolLookupQgroup(d.home); err != nil { <add> // quota is still not enabled <add> return nil <add> } <add> d.quotaEnabled = true <add> } <add> <add> dir, err := openDir(d.home) <ide> if err != nil { <ide> return err <ide> } <ide> func subvolLimitQgroup(path string, size uint64) error { <ide> return nil <ide> } <ide> <add>func subvolLookupQgroup(path string) (uint64, error) { <add> dir, err := openDir(path) <add> if err != nil { <add> return 0, err <add> } <add> defer closeDir(dir) <add> <add> var args C.struct_btrfs_ioctl_ino_lookup_args <add> args.objectid = C.BTRFS_FIRST_FREE_OBJECTID <add> <add> _, _, errno := syscall.Syscall(syscall.SYS_IOCTL, getDirFd(dir), C.BTRFS_IOC_INO_LOOKUP, <add> uintptr(unsafe.Pointer(&args))) <add> if errno != 0 { <add> return 0, fmt.Errorf("Failed to lookup qgroup for %s: %v", dir, errno.Error()) <add> } <add> if args.treeid == 0 { <add> return 0, fmt.Errorf("Invalid qgroup id for %s: 0", dir) <add> } <add> <add> return uint64(args.treeid), nil <add>} <add> <ide> func (d *Driver) subvolumesDir() string { <ide> return path.Join(d.home, "subvolumes") <ide> } <ide> func (d *Driver) setStorageSize(dir string, driver *Driver) error { <ide> return fmt.Errorf("btrfs: storage size cannot be less than %s", units.HumanSize(float64(d.options.minSpace))) <ide> } <ide> <del> if !quotaEnabled { <del> if err := subvolEnableQuota(d.home); err != nil { <del> return err <del> } <del> quotaEnabled = true <add> if err := d.subvolEnableQuota(); err != nil { <add> return err <ide> } <ide> <ide> if err := subvolLimitQgroup(dir, driver.options.size); err != nil { <ide> func (d *Driver) Remove(id string) error { <ide> if err := os.RemoveAll(dir); err != nil && !os.IsNotExist(err) { <ide> return err <ide> } <del> if err := subvolRescanQuota(d.home); err != nil { <add> if err := d.subvolRescanQuota(); err != nil { <ide> return err <ide> } <ide> return nil
1
Go
Go
parse runtime name
1a96cf95ca23b62ba71f9bd8e8a4fb176bcf243b
<ide><path>daemon/info_unix.go <ide> func (daemon *Daemon) fillPlatformInfo(v *types.Info, sysInfo *sysinfo.SysInfo) <ide> <ide> defaultRuntimeBinary := daemon.configStore.GetRuntime(v.DefaultRuntime).Path <ide> if rv, err := exec.Command(defaultRuntimeBinary, "--version").Output(); err == nil { <del> if _, commit, err := parseRuncVersion(string(rv)); err != nil { <add> if _, _, commit, err := parseRuntimeVersion(string(rv)); err != nil { <ide> logrus.Warnf("failed to parse %s version: %v", defaultRuntimeBinary, err) <ide> v.RuncCommit.ID = "N/A" <ide> } else { <ide> func (daemon *Daemon) fillPlatformVersion(v *types.Version) { <ide> defaultRuntime := daemon.configStore.GetDefaultRuntimeName() <ide> defaultRuntimeBinary := daemon.configStore.GetRuntime(defaultRuntime).Path <ide> if rv, err := exec.Command(defaultRuntimeBinary, "--version").Output(); err == nil { <del> if ver, commit, err := parseRuncVersion(string(rv)); err != nil { <add> if _, ver, commit, err := parseRuntimeVersion(string(rv)); err != nil { <ide> logrus.Warnf("failed to parse %s version: %v", defaultRuntimeBinary, err) <ide> } else { <ide> v.Components = append(v.Components, types.ComponentVersion{ <ide> func parseInitVersion(v string) (version string, commit string, err error) { <ide> return version, commit, err <ide> } <ide> <del>// parseRuncVersion parses the output of `runc --version` and extracts the <del>// "version" and "git commit" from the output. <add>// parseRuntimeVersion parses the output of `[runtime] --version` and extracts the <add>// "name", "version" and "git commit" from the output. <ide> // <ide> // Output example from `runc --version`: <ide> // <ide> // runc version 1.0.0-rc5+dev <ide> // commit: 69663f0bd4b60df09991c08812a60108003fa340 <ide> // spec: 1.0.0 <del>func parseRuncVersion(v string) (version string, commit string, err error) { <add>func parseRuntimeVersion(v string) (runtime string, version string, commit string, err error) { <ide> lines := strings.Split(strings.TrimSpace(v), "\n") <ide> for _, line := range lines { <ide> if strings.Contains(line, "version") { <ide> s := strings.Split(line, "version") <add> runtime = strings.TrimSpace(s[0]) <ide> version = strings.TrimSpace(s[len(s)-1]) <ide> continue <ide> } <ide> func parseRuncVersion(v string) (version string, commit string, err error) { <ide> if version == "" && commit == "" { <ide> err = errors.Errorf("unknown output format: %s", v) <ide> } <del> return version, commit, err <add> return runtime, version, commit, err <ide> } <ide> <ide> func (daemon *Daemon) cgroupNamespacesEnabled(sysInfo *sysinfo.SysInfo) bool { <ide><path>daemon/info_unix_test.go <ide> func TestParseInitVersion(t *testing.T) { <ide> } <ide> } <ide> <del>func TestParseRuncVersion(t *testing.T) { <add>func parseRuncVersion(t *testing.T) { <ide> tests := []struct { <ide> output string <add> runtime string <ide> version string <ide> commit string <ide> invalid bool <ide> runc version 1.0.0-rc5+dev <ide> commit: 69663f0bd4b60df09991c08812a60108003fa340 <ide> spec: 1.0.0 <ide> `, <add> runtime: "runc", <ide> version: "1.0.0-rc5+dev", <ide> commit: "69663f0bd4b60df09991c08812a60108003fa340", <ide> }, <ide> spec: 1.0.0 <ide> runc version 1.0.0-rc5+dev <ide> spec: 1.0.0 <ide> `, <add> runtime: "runc", <ide> version: "1.0.0-rc5+dev", <ide> }, <ide> { <ide> crun version 0.7 <ide> spec: 1.0.0 <ide> +SYSTEMD +SELINUX +CAP +SECCOMP +EBPF +YAJL <ide> `, <add> runtime: "crun", <ide> version: "0.7", <ide> }, <ide> { <ide> spec: 1.0.0 <ide> } <ide> <ide> for _, test := range tests { <del> version, commit, err := parseRuncVersion(string(test.output)) <add> runtime, version, commit, err := parseRuntimeVersion(string(test.output)) <ide> if test.invalid { <ide> assert.Check(t, is.ErrorContains(err, "")) <ide> } else { <ide> assert.Check(t, err) <ide> } <add> assert.Equal(t, test.runtime, runtime) <ide> assert.Equal(t, test.version, version) <ide> assert.Equal(t, test.commit, commit) <ide> }
2
Ruby
Ruby
clarify caveats usage
10970a5c54c0723126bef3582f9e9a63d25e1b54
<ide><path>Library/Homebrew/formula.rb <ide> def run_post_install <ide> @prefix_returns_versioned_prefix = false <ide> end <ide> <del> # Tell the user about any caveats regarding this package. <add> # Tell the user about any Homebrew-specific caveats or locations regarding <add> # this package. These should not contain setup instructions that would apply <add> # to installation through a different package manager on a different OS. <ide> # @return [String] <ide> # <pre>def caveats <ide> # <<-EOS.undent
1
Javascript
Javascript
improve semver range detection
a9993bbabf96299741d6a50e94da3b00e945490f
<ide><path>lib/sharing/utils.js <ide> const { join, dirname, readJson } = require("../util/fs"); <ide> */ <ide> exports.isRequiredVersion = str => { <ide> if (str === "*") return true; <del> return /^[\d^=<>~]/.test(str) || /\|\|/.test(str); <add> return /^[\d^=v<>~]/.test(str); <ide> }; <ide> <ide> /**
1
Text
Text
update my name in kickstarter-supporters
32c69ed789044cb898146eb0591f809ec09d0c83
<ide><path>docs/Kickstarter-Supporters.md <ide> These wonderful people supported our Kickstarter by giving us £10 or more: <ide> * [Andrew Brown](http://pvalu.es) <ide> * [Bethany Sumner](http://www.bethanysumner.com/) <ide> * [Orta](http://orta.io) <del>* [Michał Gołębiowski](https://github.com/mgol) <add>* [Michał Gołębiowski-Owczarek](https://github.com/mgol) <ide> * [Adam C. Foltzer](http://www.acfoltzer.net/) <ide> * [Steve Hiemstra](https://www.speg.com) <ide> * [Anton Sipos](http://www.softwarefuturism.com)
1
Python
Python
censor possibly secret settings. concerns
71649be305eb84fb1d073a9dcd27d29bd78ce1cc
<ide><path>celery/app/utils.py <ide> <ide> import os <ide> import platform as _platform <add>import re <ide> import types <ide> <ide> try: <ide> {human_settings} <ide> """ <ide> <add>HIDDEN_SETTINGS = re.compile( <add> 'API|TOKEN|KEY|SECRET|PASS|PROFANITIES_LIST|SIGNATURE|DATABASE', <add> re.IGNORECASE, <add>) <ide> <ide> class Settings(ConfigurationView): <ide> """Celery settings object.""" <ide> def _prepare_pickleable_changes(self): <ide> d = object.__getattribute__(d, 'obj') <ide> if isinstance(d, types.ModuleType): <ide> d = dict((k, v) for k, v in items(vars(d)) <del> if not k.startswith('__') and k.isupper()) <add> if not k.startswith('_') and k.isupper()) <ide> R.update(d) <ide> return R <ide> <ide> def humanize(self): <ide> configuration.""" <ide> return '\n'.join( <ide> '{0}: {1}'.format(key, pretty(value, width=50)) <del> for key, value in items(self.without_defaults())) <add> for key, value in items(filter_hidden_settings(dict( <add> (k, v) for k, v in items(self.without_defaults()) <add> if k.isupper() and not k.startswith('_'))))) <ide> <ide> <ide> class AppPickler(object): <ide> def _unpickle_app_v2(cls, kwargs): <ide> return cls(**kwargs) <ide> <ide> <add>def filter_hidden_settings(conf): <add> <add> def maybe_censor(key, value): <add> return '********' if HIDDEN_SETTINGS.search(key) else value <add> <add> return dict((k, maybe_censor(k, v)) for k, v in items(conf)) <add> <add> <ide> def bugreport(app): <ide> """Returns a string containing information useful in bug reports.""" <ide> import billiard
1
Ruby
Ruby
remove ruby 1.8 compatible codes
e423617d771b934da4c82ab30683d0a05aa20b45
<ide><path>Library/Homebrew/brew.rb <ide> std_trap = trap("INT") { exit! 130 } # no backtrace thanks <ide> <add># check ruby version before requiring any modules. <add>RUBY_TWO = RUBY_VERSION.split(".").first.to_i >= 2 <add>raise "Homebrew must be run under Ruby 2!" unless RUBY_TWO <add> <ide> require "pathname" <ide> HOMEBREW_LIBRARY_PATH = Pathname.new(__FILE__).realpath.parent <ide> $:.unshift(HOMEBREW_LIBRARY_PATH.to_s) <ide><path>Library/Homebrew/dev-cmd/test-bot.rb <ide> module Homebrew <ide> <ide> HOMEBREW_TAP_REGEX = %r{^([\w-]+)/homebrew-([\w-]+)$} <ide> <del> if ruby_has_encoding? <del> def fix_encoding!(str) <del> # Assume we are starting from a "mostly" UTF-8 string <del> str.force_encoding(Encoding::UTF_8) <del> return str if str.valid_encoding? <del> str.encode!(Encoding::UTF_16, :invalid => :replace) <del> str.encode!(Encoding::UTF_8) <del> end <del> elsif require "iconv" <del> def fix_encoding!(str) <del> Iconv.conv("UTF-8//IGNORE", "UTF-8", str) <del> end <del> else <del> def fix_encoding!(str) <del> str <del> end <add> def fix_encoding!(str) <add> # Assume we are starting from a "mostly" UTF-8 string <add> str.force_encoding(Encoding::UTF_8) <add> return str if str.valid_encoding? <add> str.encode!(Encoding::UTF_16, :invalid => :replace) <add> str.encode!(Encoding::UTF_8) <ide> end <ide> <ide> def resolve_test_tap <ide> def run <ide> verbose = ARGV.verbose? <ide> # Step may produce arbitrary output and we read it bytewise, so must <ide> # buffer it as binary and convert to UTF-8 once complete <del> output = ruby_has_encoding? ? "".encode!("BINARY") : "" <add> output = "".encode!("BINARY") <ide> working_dir = Pathname.new(@command.first == "git" ? @repository : Dir.pwd) <ide> read, write = IO.pipe <ide> <ide> def test_bot <ide> def sanitize_output_for_xml(output) <ide> unless output.empty? <ide> # Remove invalid XML CData characters from step output. <del> if ruby_has_encoding? <del> # This is the regex for valid XML chars, but only works in Ruby 2.0+ <del> # /[\x09\x0A\x0D\x20-\uD7FF\uE000-\uFFFD\u{10000}-\u{10FFFF}]/ <del> # For 1.9 compatibility, use the inverse of that, which stays under \u10000 <del> # invalid_xml_pat = /[\x00-\x08\x0B\x0C\x0E-\x1F\uD800-\uDFFF\uFFFE\uFFFF]/ <del> # But Ruby won't allow you to reference surrogates, so we have: <del> invalid_xml_pat = /[\x00-\x08\x0B\x0C\x0E-\x1F\uFFFE\uFFFF]/ <del> output = output.gsub(invalid_xml_pat, "\uFFFD") <del> else <del> # Invalid XML chars, as far as single-byte chars go <del> output = output.delete("\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f" \ <del> "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f") <del> end <add> invalid_xml_pat = /[^\x09\x0A\x0D\x20-\uD7FF\uE000-\uFFFD\u{10000}-\u{10FFFF}]/ <add> output = output.gsub(invalid_xml_pat, "\uFFFD") <ide> <ide> # Truncate to 1MB to avoid hitting CI limits <ide> if output.bytesize > MAX_STEP_OUTPUT_SIZE <ide><path>Library/Homebrew/extend/fileutils.rb <ide> def run <ide> Process.gid <ide> end <ide> begin <del> # group_id.to_s makes OS X 10.6.7 (ruby-1.8.7-p174) and earlier happy. <del> chown(nil, group_id.to_s, tmpdir) <add> chown(nil, group_id, tmpdir) <ide> rescue Errno::EPERM <ide> opoo "Failed setting group \"#{Etc.getgrgid(group_id).name}\" on #{tmpdir}" <ide> end <ide><path>Library/Homebrew/extend/pathname.rb <ide> def install_metafiles(from = Pathname.pwd) <ide> end <ide> end <ide> <del> # We redefine these private methods in order to add the /o modifier to <del> # the Regexp literals, which forces string interpolation to happen only <del> # once instead of each time the method is called. This is fixed in 1.9+. <del> if RUBY_VERSION <= "1.8.7" <del> # @private <del> alias_method :old_chop_basename, :chop_basename <del> <del> def chop_basename(path) <del> base = File.basename(path) <del> if /\A#{Pathname::SEPARATOR_PAT}?\z/o =~ base <del> return nil <del> else <del> return path[0, path.rindex(base)], base <del> end <del> end <del> private :chop_basename <del> <del> # @private <del> alias_method :old_prepend_prefix, :prepend_prefix <del> <del> def prepend_prefix(prefix, relpath) <del> if relpath.empty? <del> File.dirname(prefix) <del> elsif /#{SEPARATOR_PAT}/o =~ prefix <del> prefix = File.dirname(prefix) <del> prefix = File.join(prefix, "") if File.basename(prefix + "a") != "a" <del> prefix + relpath <del> else <del> prefix + relpath <del> end <del> end <del> private :prepend_prefix <del> elsif RUBY_VERSION == "2.0.0" <add> if RUBY_VERSION == "2.0.0" <ide> # https://bugs.ruby-lang.org/issues/9915 <ide> prepend Module.new { <ide> def inspect <ide><path>Library/Homebrew/global.rb <ide> <ide> HOMEBREW_REPOSITORY.extend(GitRepositoryExtension) <ide> <del>if RbConfig.respond_to?(:ruby) <del> RUBY_PATH = Pathname.new(RbConfig.ruby) <del>else <del> RUBY_PATH = Pathname.new(RbConfig::CONFIG["bindir"]).join( <del> RbConfig::CONFIG["ruby_install_name"] + RbConfig::CONFIG["EXEEXT"] <del> ) <del>end <add>RUBY_PATH = Pathname.new(RbConfig.ruby) <ide> RUBY_BIN = RUBY_PATH.dirname <del>RUBY_TWO = RUBY_VERSION.split(".").first.to_i >= 2 <del>raise "Homebrew must be run under Ruby 2!" unless RUBY_TWO <ide> <ide> HOMEBREW_USER_AGENT_CURL = ENV["HOMEBREW_USER_AGENT_CURL"] <ide> HOMEBREW_USER_AGENT_RUBY = "#{ENV["HOMEBREW_USER_AGENT"]} ruby/#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}" <ide><path>Library/Homebrew/utils.rb <ide> def number_readable(number) <ide> numstr <ide> end <ide> <del># True if this version of Ruby supports text encodings in its strings <del>def ruby_has_encoding? <del> String.method_defined?(:force_encoding) <del>end <del> <ide> # Truncates a text string to fit within a byte size constraint, <ide> # preserving character encoding validity. The returned string will <ide> # be not much longer than the specified max_bytes, though the exact <ide> def truncate_text_to_approximate_size(s, max_bytes, options = {}) <ide> <ide> glue = "\n[...snip...]\n" <ide> max_bytes_in = [max_bytes - glue.bytesize, 1].max <del> if ruby_has_encoding? <del> bytes = s.dup.force_encoding("BINARY") <del> glue_bytes = glue.encode("BINARY") <del> else <del> bytes = s <del> glue_bytes = glue <del> end <add> bytes = s.dup.force_encoding("BINARY") <add> glue_bytes = glue.encode("BINARY") <ide> n_front_bytes = (max_bytes_in * front_weight).floor <ide> n_back_bytes = max_bytes_in - n_front_bytes <ide> if n_front_bytes == 0 <ide> def truncate_text_to_approximate_size(s, max_bytes, options = {}) <ide> back = bytes[-n_back_bytes..-1] <ide> end <ide> out = front + glue_bytes + back <del> if ruby_has_encoding? <del> out.force_encoding("UTF-8") <del> out.encode!("UTF-16", :invalid => :replace) <del> out.encode!("UTF-8") <del> end <add> out.force_encoding("UTF-8") <add> out.encode!("UTF-16", :invalid => :replace) <add> out.encode!("UTF-8") <ide> out <ide> end
6
Ruby
Ruby
remove unnecessary constant
e9e11072d32707f903b66936efef8ecc9133dd8a
<ide><path>activesupport/lib/active_support/core_ext/big_decimal/conversions.rb <ide> <ide> module ActiveSupport <ide> module BigDecimalWithDefaultFormat #:nodoc: <del> DEFAULT_STRING_FORMAT = 'F' <del> <del> def to_s(format = nil) <del> super(format || DEFAULT_STRING_FORMAT) <add> def to_s(format = 'F') <add> super(format) <ide> end <ide> end <ide> end
1
Ruby
Ruby
fix bottling non-testing jobs
793a75f3f3e5b46639683df610cc819d796cbba8
<ide><path>Library/Homebrew/dev-cmd/test-bot.rb <ide> def test_ci_upload(tap) <ide> safe_system "brew", "pull", "--clean", pull_pr <ide> end <ide> <del> if ENV["UPSTREAM_BOTTLE_KEEP_OLD"] || ENV["BOT_PARAMS"].include?("--keep-old") <add> if ENV["UPSTREAM_BOTTLE_KEEP_OLD"] || ENV["BOT_PARAMS"].to_s.include?("--keep-old") <ide> system "brew", "bottle", "--merge", "--write", "--keep-old", *json_files <ide> else <ide> system "brew", "bottle", "--merge", "--write", *json_files
1
Text
Text
fix the language in engines guide
dd898fe6a015ab47405ac3851ba50ffd5bc1c22a
<ide><path>guides/source/engines.md <ide> module Blorgh <ide> end <ide> ``` <ide> <del>NOTE: The `ApplicationController` class being inherited from here is the <add>NOTE: The `ArticlesController` class inherits from <ide> `Blorgh::ApplicationController`, not an application's `ApplicationController`. <ide> <ide> The helper inside `app/helpers/blorgh/articles_helper.rb` is also namespaced:
1
PHP
PHP
fix typeerror in csrfprotectionmiddleware
4044e676b4ddfe898f109910021707caebc09d0d
<ide><path>src/Http/Middleware/CsrfProtectionMiddleware.php <ide> public function process(ServerRequestInterface $request, RequestHandlerInterface <ide> $cookieData = Hash::get($cookies, $this->_config['cookieName']); <ide> <ide> if (is_string($cookieData) && strlen($cookieData) > 0) { <del> $request = $request->withAttribute('csrfToken', $this->saltToken($cookieData)); <add> try { <add> $request = $request->withAttribute('csrfToken', $this->saltToken($cookieData)); <add> } catch (RuntimeException $e) { <add> $cookieData = null; <add> } <ide> } <ide> <ide> if ($method === 'GET' && $cookieData === null) { <ide> public function saltToken(string $token): string <ide> return $token; <ide> } <ide> $decoded = base64_decode($token, true); <add> if ($decoded === false) { <add> throw new RuntimeException('Invalid token data.'); <add> } <add> <ide> $length = strlen($decoded); <ide> $salt = Security::randomBytes($length); <ide> $salted = ''; <ide><path>tests/TestCase/Http/Middleware/CsrfProtectionMiddlewareTest.php <ide> public function testSafeMethodNoCsrfRequired($method) <ide> $this->assertInstanceOf(Response::class, $response); <ide> } <ide> <add> /** <add> * Test that the CSRF tokens are regenerated when token is not valid <add> * <add> * @return void <add> */ <add> public function testRegenerateTokenOnGetWithInvalidData() <add> { <add> $request = new ServerRequest([ <add> 'environment' => [ <add> 'REQUEST_METHOD' => 'GET', <add> ], <add> 'cookies' => ['csrfToken' => "\x20\x26"], <add> ]); <add> <add> $middleware = new CsrfProtectionMiddleware(); <add> /** @var \Cake\Http\Response $response */ <add> $response = $middleware->process($request, $this->_getRequestHandler()); <add> $this->assertInstanceOf(Response::class, $response); <add> $this->assertGreaterThan(32, strlen($response->getCookie('csrfToken')['value'])); <add> } <add> <ide> /** <ide> * Test that the CSRF tokens are set for redirect responses <ide> * <ide> public function testValidTokenRequestDataSalted($method) <ide> $middleware->process($request, $handler); <ide> } <ide> <add> /** <add> * Test that invalid string cookies are rejected. <add> * <add> * @return void <add> */ <add> public function testInvalidTokenStringCookies() <add> { <add> $this->expectException(InvalidCsrfTokenException::class); <add> $request = new ServerRequest([ <add> 'environment' => [ <add> 'REQUEST_METHOD' => 'POST', <add> ], <add> 'post' => ['_csrfToken' => ["\x20\x26"]], <add> 'cookies' => ['csrfToken' => ["\x20\x26"]], <add> ]); <add> $middleware = new CsrfProtectionMiddleware(); <add> $middleware->process($request, $this->_getRequestHandler()); <add> } <add> <ide> /** <ide> * Test that request non string cookies are ignored. <ide> * <ide> * @return void <ide> */ <ide> public function testInvalidTokenNonStringCookies() <ide> { <del> $this->expectException(\Cake\Http\Exception\InvalidCsrfTokenException::class); <add> $this->expectException(InvalidCsrfTokenException::class); <ide> $request = new ServerRequest([ <ide> 'environment' => [ <ide> 'REQUEST_METHOD' => 'POST',
2
Text
Text
remove oauth2 from docs
957700ecfb36322a8ea40ea473dc43ff1e92592f
<ide><path>docs/api-guide/authentication.md <ide> Unauthenticated responses that are denied permission will result in an `HTTP 401 <ide> <ide> **Note:** If you use `TokenAuthentication` in production you must ensure that your API is only available over `https` only. <ide> <del>## OAuth2Authentication <add><!--## OAuth2Authentication <ide> <ide> This authentication scheme uses the [OAuth 2.0][oauth] protocol to authenticate requests. OAuth is appropriate for server-server setups, such as when you want to allow a third-party service to access your API on a user's behalf. <ide> <ide> If successfully authenticated, `OAuth2Authentication` provides the following cre <ide> **TODO**: Note type of response (401 vs 403) <ide> <ide> **TODO**: Implement OAuth2Authentication, using django-oauth2-provider. <add>--> <ide> <ide> ## SessionAuthentication <ide>
1
PHP
PHP
apply fixes from styleci
09bb0ab08626df5cbb5b2dc01db8677bf988debd
<ide><path>src/Illuminate/Database/Eloquent/Relations/BelongsToMany.php <ide> public function touch() <ide> $key = $this->getRelated()->getKeyName(); <ide> <ide> $columns = [ <del> $this->related->getUpdatedAtColumn() => <del> $this->related->freshTimestampString() <add> $this->related->getUpdatedAtColumn() => $this->related->freshTimestampString(), <ide> ]; <ide> <ide> // If we actually have IDs for the relation, we will run the query to update all
1
Ruby
Ruby
exclude hardlinks from mach_o_files
d3ef56425a6b4c190317c2527137b97c0ff5daf8
<ide><path>Library/Homebrew/keg_relocate.rb <ide> def find_dylib(bad_name) <ide> end <ide> <ide> def mach_o_files <add> hardlinks = Set.new <ide> mach_o_files = [] <ide> path.find do |pn| <ide> next if pn.symlink? || pn.directory? <del> mach_o_files << pn if pn.dylib? || pn.mach_o_bundle? || pn.mach_o_executable? <add> next unless pn.dylib? || pn.mach_o_bundle? || pn.mach_o_executable? <add> # if we've already processed a file, ignore its hardlinks (which have the same dev ID and inode) <add> # this prevents relocations from being performed on a binary more than once <add> next unless hardlinks.add? [pn.stat.dev, pn.stat.ino] <add> mach_o_files << pn <ide> end <ide> <ide> mach_o_files
1
Javascript
Javascript
fix crash in xmlhttprequest example on android
16e4971121435879e4f621214948379ab4ea2734
<ide><path>RNTester/js/XHRExampleFormData.js <ide> class XHRExampleFormData extends React.Component<Object, Object> { <ide> _fetchRandomPhoto = () => { <ide> CameraRoll.getPhotos({ <ide> first: PAGE_SIZE, <del> groupTypes: 'All', <add> groupTypes: Platform.OS === 'ios' ? 'All' : undefined, <ide> assetType: 'All', <ide> }).then( <ide> data => {
1
Javascript
Javascript
improve `worker_threads ` coverage
ba5b5acaf10799206229793f71a4f6542235439f
<ide><path>test/parallel/test-worker-environmentdata.js <ide> 'use strict'; <add>// Flags: --expose-internals <ide> <ide> require('../common'); <ide> const { <ide> const { <ide> threadId, <ide> } = require('worker_threads'); <ide> <add>const { assignEnvironmentData } = require('internal/worker'); <add> <ide> const { <ide> deepStrictEqual, <ide> strictEqual, <ide> if (!process.env.HAS_STARTED_WORKER) { <ide> strictEqual(getEnvironmentData('foo'), 'bar'); <ide> deepStrictEqual(getEnvironmentData('hello'), { value: 'world' }); <ide> strictEqual(getEnvironmentData(1), undefined); <add> assignEnvironmentData(undefined); // It won't setup any key. <add> strictEqual(getEnvironmentData(undefined), undefined); <ide> <ide> // Recurse to make sure the environment data is inherited <ide> if (threadId <= 2) <ide><path>test/parallel/test-worker-heap-snapshot.js <add>'use strict'; <add> <add>const common = require('../common'); <add>const assert = require('assert'); <add>const { Worker } = require('worker_threads'); <add>const { once } = require('events'); <add> <add>// Ensure that worker.getHeapSnapshot() returns a valid JSON <add>(async () => { <add> const worker = new Worker('setInterval(() => {}, 1000);', { eval: true }); <add> await once(worker, 'online'); <add> const stream = await worker.getHeapSnapshot(); <add> assert.ok(JSON.parse(stream.read())); <add> <add> await worker.terminate(); <add>})().then(common.mustCall());
2
Ruby
Ruby
stop printing message
12c454822aff7320cee503c0708d30a99f4a0e0c
<ide><path>Library/Homebrew/cmd/postinstall.rb <ide> def run_post_install(formula) <ide> args << "--devel" <ide> end <ide> <del> Sandbox.print_sandbox_message if Sandbox.formula?(formula) <del> <ide> Utils.safe_fork do <ide> if Sandbox.formula?(formula) <ide> sandbox = Sandbox.new <ide><path>Library/Homebrew/dev-cmd/test.rb <ide> def test <ide> args << "--devel" <ide> end <ide> <del> Sandbox.print_sandbox_message if Sandbox.test? <del> <ide> Utils.safe_fork do <ide> if Sandbox.test? <ide> sandbox = Sandbox.new <ide><path>Library/Homebrew/formula_installer.rb <ide> def build <ide> #{formula.specified_path} <ide> ].concat(build_argv) <ide> <del> Sandbox.print_sandbox_message if Sandbox.formula?(formula) <del> <ide> Utils.safe_fork do <ide> # Invalidate the current sudo timestamp in case a build script calls sudo. <ide> # Travis CI's Linux sudoless workers have a weird sudo that fails here. <ide><path>Library/Homebrew/sandbox.rb <ide> def self.test? <ide> !ARGV.no_sandbox? <ide> end <ide> <del> def self.print_sandbox_message <del> return if @printed_sandbox_message <del> ohai "Using the sandbox" <del> @printed_sandbox_message = true <del> end <del> <ide> def initialize <ide> @profile = SandboxProfile.new <ide> end
4
Python
Python
add test for internal ip creation for gcp
7a5abfa9de3c9442a7db189bbd1f662b80c680cf
<ide><path>libcloud/test/compute/test_gce.py <ide> def test_ex_create_address(self): <ide> self.assertTrue(isinstance(address, GCEAddress)) <ide> self.assertEqual(address.name, address_name) <ide> <add> def test_ex_create_address_internal(self): <add> address_name = 'lcaddressinternal' <add> address = self.driver.ex_create_address(address_name, <add> region='us-central1', <add> address='10.128.0.12', <add> address_type='INTERNAL', <add> subnetwork='subnet-1') <add> print address <add> self.assertTrue(isinstance(address, GCEAddress)) <add> self.assertEqual(address.name, address_name) <add> self.assertEqual(address.address, '10.128.0.12') <add> self.assertEqual(address.addressType, 'INTERNAL') <add> <ide> def test_ex_create_backend(self): <ide> # Note: this is an internal object, no API call is made <ide> # and no fixture is needed specifically for GCEBackend, however <ide> def _regions_us_central1_addresses_testaddress(self, method, url, body, <ide> body = self.fixtures.load('regions_us-central1_addresses_testaddress.json') <ide> return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK]) <ide> <add> def _regions_us_central1_subnetworks_subnet_1(self, method, url, body, <add> headers): <add> body = self.fixtures.load('regions_us-central1_subnetworks_subnet_1.json') <add> return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK]) <add> <add> def _regions_us_central1_addresses_lcaddressinternal(self, method, url, body, <add> headers): <add> body = self.fixtures.load('regions_us-central1_addresses_lcaddressinternal.json') <add> return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK]) <add> <ide> def _regions_us_central1_forwardingRules(self, method, url, body, headers): <ide> if method == 'POST': <ide> body = self.fixtures.load(
1
PHP
PHP
add typehint for consistency
2f01353e7f72e1ac910f260af5a85e58e7946833
<ide><path>src/Illuminate/Support/helpers.php <ide> function array_has($array, $key) <ide> * @param mixed $default <ide> * @return mixed <ide> */ <del> function array_last($array, $callback = null, $default = null) <add> function array_last($array, callable $callback = null, $default = null) <ide> { <ide> return Arr::last($array, $callback, $default); <ide> }
1
PHP
PHP
catch the `stopexception` when execute a command
649c0a44df6677f38fe342782d5047066630009b
<ide><path>src/Console/CommandRunner.php <ide> public function run(array $argv, ConsoleIo $io = null) <ide> $result = $this->runShell($shell, $argv); <ide> } <ide> if ($shell instanceof Command) { <del> $result = $shell->run($argv, $io); <add> $result = $this->runCommand($shell, $argv, $io); <ide> } <ide> <ide> if ($result === null || $result === true) { <ide> protected function resolveName($commands, $io, $name) <ide> return $name; <ide> } <ide> <add> /** <add> * Execute a Command class. <add> * <add> * @param \Cake\Console\Command $command The command to run. <add> * @param array $argv The CLI arguments to invoke. <add> * @param \Cake\Console\ConsoleIo $io The console io <add> * @return int Exit code <add> */ <add> protected function runCommand(Command $command, array $argv, ConsoleIo $io) <add> { <add> try { <add> return $command->run($argv, $io); <add> } catch (StopException $e) { <add> return $e->getCode(); <add> } <add> } <add> <ide> /** <ide> * Execute a Shell class. <ide> *
1
Javascript
Javascript
reduce duplication in drawgrid
99596b04345ab79fc2bca9016779b2671db87bd7
<ide><path>src/core/core.scale.js <ide> export default class Scale extends Element { <ide> const items = me._gridLineItems || (me._gridLineItems = me._computeGridLineItems(chartArea)); <ide> let i, ilen; <ide> <add> const drawLine = (p1, p2, style) => { <add> if (!style.width || !style.color) { <add> return; <add> } <add> ctx.save(); <add> ctx.lineWidth = style.width; <add> ctx.strokeStyle = style.color; <add> ctx.setLineDash(style.borderDash || []); <add> ctx.lineDashOffset = style.borderDashOffset; <add> <add> ctx.beginPath(); <add> ctx.moveTo(p1.x, p1.y); <add> ctx.lineTo(p2.x, p2.y); <add> ctx.stroke(); <add> ctx.restore(); <add> }; <add> <ide> if (grid.display) { <ide> for (i = 0, ilen = items.length; i < ilen; ++i) { <ide> const item = items[i]; <del> const {color, tickColor, tickWidth, width} = item; <del> <del> if (width && color && grid.drawOnChartArea) { <del> ctx.save(); <del> ctx.lineWidth = width; <del> ctx.strokeStyle = color; <del> if (ctx.setLineDash) { <del> ctx.setLineDash(item.borderDash); <del> ctx.lineDashOffset = item.borderDashOffset; <del> } <ide> <del> ctx.beginPath(); <del> ctx.moveTo(item.x1, item.y1); <del> ctx.lineTo(item.x2, item.y2); <del> ctx.stroke(); <del> ctx.restore(); <add> if (grid.drawOnChartArea) { <add> drawLine( <add> {x: item.x1, y: item.y1}, <add> {x: item.x2, y: item.y2}, <add> item <add> ); <ide> } <ide> <del> if (tickWidth && tickColor && grid.drawTicks) { <del> ctx.save(); <del> ctx.lineWidth = tickWidth; <del> ctx.strokeStyle = tickColor; <del> if (ctx.setLineDash) { <del> ctx.setLineDash(item.tickBorderDash); <del> ctx.lineDashOffset = item.tickBorderDashOffset; <del> } <del> <del> ctx.beginPath(); <del> ctx.moveTo(item.tx1, item.ty1); <del> ctx.lineTo(item.tx2, item.ty2); <del> ctx.stroke(); <del> ctx.restore(); <add> if (grid.drawTicks) { <add> drawLine( <add> {x: item.tx1, y: item.ty1}, <add> {x: item.tx2, y: item.ty2}, <add> { <add> color: item.tickColor, <add> width: item.tickWidth, <add> borderDash: item.tickBorderDash, <add> borderDashOffset: item.tickBorderDashOffset <add> } <add> ); <ide> } <ide> } <ide> } <ide> export default class Scale extends Element { <ide> y2 = _alignPixel(chart, me.bottom, lastLineWidth) + lastLineWidth / 2; <ide> x1 = x2 = borderValue; <ide> } <del> <del> ctx.lineWidth = axisWidth; <del> ctx.strokeStyle = edgeOpts.borderColor; <del> ctx.beginPath(); <del> ctx.moveTo(x1, y1); <del> ctx.lineTo(x2, y2); <del> ctx.stroke(); <add> drawLine( <add> {x: x1, y: y1}, <add> {x: x2, y: y2}, <add> {width: axisWidth, color: edgeOpts.borderColor}); <ide> } <ide> } <ide>
1
Javascript
Javascript
update error message keywords
340161b9ff949f1b2be271548b9cab1581cb1e1e
<ide><path>test/parallel/test-module-loading-error.js <ide> const errorMessagesByPlatform = { <ide> win32: ['is not a valid Win32 application'], <ide> linux: ['file too short', 'Exec format error'], <ide> sunos: ['unknown file type', 'not an ELF file'], <del> darwin: ['file too short'], <add> darwin: ['file too short', 'not a mach-o file'], <ide> aix: ['Cannot load module', <ide> 'Cannot run a file that does not have a valid format.', <ide> 'Exec format error'],
1
Javascript
Javascript
use async/await in test-debugger-preserve-breaks
0098af1e371d3f1b161bad312571ce55a776c987
<ide><path>test/sequential/test-debugger-preserve-breaks.js <ide> const startCLI = require('../common/debugger'); <ide> const assert = require('assert'); <ide> const path = require('path'); <ide> <add>const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); <add>const script = path.relative(process.cwd(), scriptFullPath); <add> <ide> // Run after quit. <del>{ <del> const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); <del> const script = path.relative(process.cwd(), scriptFullPath); <add>const runTest = async () => { <ide> const cli = startCLI([script]); <del> <del> function onFatal(error) { <del> cli.quit(); <del> throw error; <add> try { <add> await cli.waitForInitialBreak(); <add> await cli.waitForPrompt(); <add> await cli.command('breakpoints'); <add> assert.match(cli.output, /No breakpoints yet/); <add> await cli.command('sb(2)'); <add> await cli.command('sb(3)'); <add> await cli.command('breakpoints'); <add> assert.ok(cli.output.includes(`#0 ${script}:2`)); <add> assert.ok(cli.output.includes(`#1 ${script}:3`)); <add> await cli.stepCommand('c'); // hit line 2 <add> await cli.stepCommand('c'); // hit line 3 <add> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 3 }); <add> await cli.command('restart'); <add> await cli.waitForInitialBreak(); <add> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 1 }); <add> await cli.stepCommand('c'); <add> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 2 }); <add> await cli.stepCommand('c'); <add> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 3 }); <add> await cli.command('breakpoints'); <add> const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; <add> assert.ok(cli.output.includes(`#0 ${script}:2`), msg); <add> assert.ok(cli.output.includes(`#1 ${script}:3`), msg); <add> } finally { <add> await cli.quit(); <ide> } <add>}; <ide> <del> return cli.waitForInitialBreak() <del> .then(() => cli.waitForPrompt()) <del> .then(() => cli.command('breakpoints')) <del> .then(() => { <del> assert.match(cli.output, /No breakpoints yet/); <del> }) <del> .then(() => cli.command('sb(2)')) <del> .then(() => cli.command('sb(3)')) <del> .then(() => cli.command('breakpoints')) <del> .then(() => { <del> assert.ok(cli.output.includes(`#0 ${script}:2`)); <del> assert.ok(cli.output.includes(`#1 ${script}:3`)); <del> }) <del> .then(() => cli.stepCommand('c')) // hit line 2 <del> .then(() => cli.stepCommand('c')) // hit line 3 <del> .then(() => { <del> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 3 }); <del> }) <del> .then(() => cli.command('restart')) <del> .then(() => cli.waitForInitialBreak()) <del> .then(() => { <del> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 1 }); <del> }) <del> .then(() => cli.stepCommand('c')) <del> .then(() => { <del> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 2 }); <del> }) <del> .then(() => cli.stepCommand('c')) <del> .then(() => { <del> assert.deepStrictEqual(cli.breakInfo, { filename: script, line: 3 }); <del> }) <del> .then(() => cli.command('breakpoints')) <del> .then(() => { <del> const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; <del> assert.ok(cli.output.includes(`#0 ${script}:2`), msg); <del> assert.ok(cli.output.includes(`#1 ${script}:3`), msg); <del> }) <del> .then(() => cli.quit()) <del> .then(null, onFatal); <del>} <add>runTest();
1
Javascript
Javascript
add date comparison to ember.compare
b3f2c9c3ff07658fc5b31a152ef17952a785a160
<ide><path>packages/ember-runtime/lib/core.js <ide> Ember.compare = function compare(v, w) { <ide> } <ide> return 0; <ide> <add> case 'date': <add> var vNum = v.getTime(); <add> var wNum = w.getTime(); <add> if (vNum < wNum) { return -1; } <add> if (vNum > wNum) { return 1; } <add> return 0; <add> <ide> default: <ide> return 0; <ide> } <ide> Ember.ORDER_DEFINITION = Ember.ENV.ORDER_DEFINITION || [ <ide> 'object', <ide> 'instance', <ide> 'function', <del> 'class' <add> 'class', <add> 'date' <ide> ]; <ide> <ide> /** <ide><path>packages/ember-runtime/tests/core/compare_test.js <ide> module("Ember.compare()", { <ide> v[11] = {a: 'hash'}; <ide> v[12] = Ember.Object.create(); <ide> v[13] = function (a) {return a;}; <add> v[14] = new Date('2012/01/01'); <add> v[15] = new Date('2012/06/06'); <ide> } <ide> }); <ide>
2
PHP
PHP
fix callback return for durationlimiter
d50929802e98a140018d42f2115c8d4ce8db6ced
<ide><path>src/Illuminate/Redis/Limiters/DurationLimiter.php <ide> public function block($timeout, $callback = null) <ide> } <ide> <ide> if (is_callable($callback)) { <del> $callback(); <add> return $callback(); <ide> } <ide> <ide> return true; <ide><path>tests/Redis/DurationLimiterTest.php <ide> public function testItFailsImmediatelyOrRetriesForAWhileBasedOnAGivenTimeout() <ide> $this->assertEquals([1, 3], $store); <ide> } <ide> <add> public function testItReturnsTheCallbackResult() <add> { <add> $limiter = new DurationLimiter($this->redis(), 'key', 1, 1); <add> <add> $result = $limiter->block(1, function () { <add> return 'foo'; <add> }); <add> <add> $this->assertEquals('foo', $result); <add> } <add> <ide> private function redis() <ide> { <ide> return $this->redis['phpredis']->connection();
2
Javascript
Javascript
add debug stuff
64f12f8ee103365a8585e958a22f2bd5d3375917
<ide><path>config/passport.js <ide> passport.deserializeUser(function(id, done) { <ide> }); <ide> <ide> function sendWelcomeEmail(user) { <del> var transporter = nodemailer.createTransport({ <del> service: 'Mandrill', <del> auth: { <del> user: secrets.mandrill.user, <del> pass: secrets.mandrill.password <del> } <del> }); <del> var mailOptions = { <del> to: user.email, <del> from: 'Team@freecodecamp.com', <del> subject: 'Welcome to Free Code Camp ' + user.name + '!', <del> text: 'Hello,\n\n' + <del> 'Welcome to Free Code Camp!' <del> }; <add> console.log('=================================================================================CALL==============================='); <add> //var transporter = nodemailer.createTransport({ <add> // service: 'Mandrill', <add> // auth: { <add> // user: secrets.mandrill.user, <add> // pass: secrets.mandrill.password <add> // } <add> //}); <add> //var mailOptions = { <add> // to: user.email, <add> // from: 'Team@freecodecamp.com', <add> // subject: 'Welcome to Free Code Camp ' + user.name + '!', <add> // text: 'Hello,\n\n' + <add> // 'Welcome to Free Code Camp!' <add> //}; <ide> } <ide> <ide> /** <ide> passport.use(new GoogleStrategy(secrets.google, function(req, accessToken, refre <ide> User.findOne({ google: profile.id }, function(err, existingUser) { <ide> if (existingUser) return done(null, existingUser); <ide> User.findOne({ email: profile._json.email }, function(err, existingEmailUser) { <add> console.log('debug'); <add> console.log(existingEmailUser); <add> console.log('debug'); <ide> var user = existingEmailUser || new User; <ide> user.email = user.email || profile._json.email; <ide> user.google = profile.id;
1
Javascript
Javascript
update error message
30f103f0044d881d0c00166e58c80b770514d224
<ide><path>src/geometries/ParametricGeometry.js <ide> function ParametricBufferGeometry( func, slices, stacks ) { <ide> <ide> if ( func.length < 3 ) { <ide> <del> console.error( 'THREE.ParametricGeometry: Function must now modify a Vector3 as third parameter.' ); <add> console.error( 'THREE.ParametricGeometry: "func" now has a third mandatory parameter, the result vector of the surface equation.' ); <ide> <ide> } <ide>
1
Text
Text
add commit format
f56101a26ad435dcf87fec6363fc18b0c7938e58
<ide><path>share/doc/homebrew/Migrating-A-Formula-To-A-Tap.md <ide> There are times when we may wish to migrate a formula from Homebrew's core (the main repository) into a tap (another repository). To do this: <ide> <ide> 1. Create a pull request to the new tap adding the formula file as-is from the main Homebrew repository. Fix any test failures that may occur due to the stricter requirements for new formulae than existing formula (e.g. `brew audit --strict` must pass for that formula). <del>2. Create a pull request to the main repository deleting the formula file and add it to `Library/Homebrew/tap_migrations.rb`. <add>2. Create a pull request to the main repository deleting the formula file and add it to `Library/Homebrew/tap_migrations.rb` with a commit message like `gv: migrating to homebrew/x11`. <ide> 3. Put a link for each pull request in the other pull request so the maintainers can merge them both at once. <ide> <ide> Congratulations, you've moved a formula to a tap!
1
Java
Java
add mention of shortcut methods in bodyinserters
ce895d7a84a851426a11548de3b942458540a413
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/BodyInserters.java <ide> public static <T> BodyInserter<T, ReactiveHttpOutputMessage> empty() { <ide> <ide> /** <ide> * Return a {@code BodyInserter} that writes the given single object. <add> * <p>Note also that <add> * {@link org.springframework.web.reactive.function.client.WebClient WebClient} and <add> * {@link org.springframework.web.reactive.function.server.ServerResponse ServerResponse} <add> * each offer a {@code syncBody(Object)} shortcut for providing an Object <add> * as the body. <ide> * @param body the body of the response <ide> * @return a {@code BodyInserter} that writes a single object <ide> */ <ide> public static <T> BodyInserter<T, ReactiveHttpOutputMessage> fromObject(T body) <ide> <ide> /** <ide> * Return a {@code BodyInserter} that writes the given {@link Publisher}. <add> * <p>Note also that <add> * {@link org.springframework.web.reactive.function.client.WebClient WebClient} and <add> * {@link org.springframework.web.reactive.function.server.ServerResponse ServerResponse} <add> * each offer {@code body} shortcut methods for providing a Publisher as the body. <ide> * @param publisher the publisher to stream to the response body <ide> * @param elementClass the class of elements contained in the publisher <ide> * @param <T> the type of the elements contained in the publisher <ide> public static <T, P extends Publisher<T>> BodyInserter<P, ReactiveHttpOutputMess <ide> <ide> /** <ide> * Return a {@code BodyInserter} that writes the given {@link Publisher}. <add> * <p>Note also that <add> * {@link org.springframework.web.reactive.function.client.WebClient WebClient} and <add> * {@link org.springframework.web.reactive.function.server.ServerResponse ServerResponse} <add> * each offer {@code body} shortcut methods for providing a Publisher as the body. <ide> * @param publisher the publisher to stream to the response body <ide> * @param typeReference the type of elements contained in the publisher <ide> * @param <T> the type of the elements contained in the publisher <ide> public static <T, S extends Publisher<ServerSentEvent<T>>> BodyInserter<S, Serve <ide> } <ide> <ide> /** <del> * Return a {@link FormInserter} that writes the given {@code MultiValueMap} as URL-encoded <del> * form data. Note that the returned inserter allows for additional entries to be added via <del> * {@link FormInserter#with(String, Object)}. <add> * Return a {@link FormInserter} that writes the given {@code MultiValueMap} <add> * as URL-encoded form data. The returned inserter allows for additional <add> * entries to be added via {@link FormInserter#with(String, Object)}. <ide> * <del> * <p><strong>Note:</strong> you can also use the {@code syncBody(Object)} <del> * method in the request builders of both the {@code WebClient} and <del> * {@code WebTestClient}. In that case setting the content type is not <del> * required. Just make sure the map contains String values only. <add> * <p>Note that you can also use the {@code syncBody(Object)} method in the <add> * request builders of both the {@code WebClient} and {@code WebTestClient}. <add> * In that case the setting of the content type is also not required, just <add> * be sure the map contains String values only or otherwise it would be <add> * interpreted as a multipart request. <ide> * <ide> * @param formData the form data to write to the output message <ide> * @return a {@code FormInserter} that writes form data <ide> public static FormInserter<String> fromFormData(MultiValueMap<String, String> fo <ide> } <ide> <ide> /** <del> * Return a {@link FormInserter} that writes the given key-value pair as URL-encoded <del> * form data. Note that the returned inserter allows for additional entries to be added via <del> * {@link FormInserter#with(String, Object)}. <add> * Return a {@link FormInserter} that writes the given key-value pair as <add> * URL-encoded form data. The returned inserter allows for additional <add> * entries to be added via {@link FormInserter#with(String, Object)}. <ide> * @param key the key to add to the form <ide> * @param value the value to add to the form <ide> * @return a {@code FormInserter} that writes form data <ide> public static FormInserter<String> fromFormData(String key, String value) { <ide> * MultipartBodyBuilder}. Also the returned inserter allows for additional <ide> * entries to be added via {@link FormInserter#with(String, Object)}. <ide> * <del> * <p><strong>Note:</strong> you can also use the {@code syncBody(Object)} <del> * method in the request builders of both the {@code WebClient} and <del> * {@code WebTestClient}. In that case setting the content type is optional. <del> * Just make sure the {@code MultiValueMap} contains at least one non-String <del> * value or otherwise it would be interpreted as plan form data. <add> * <p>Note that you can also use the {@code syncBody(Object)} method in the <add> * request builders of both the {@code WebClient} and {@code WebTestClient}. <add> * In that case the setting of the content type is also not required, just <add> * be sure the map contains at least one non-String value or otherwise, <add> * without a content-type header as a hint, it would be interpreted as a <add> * plain form data request. <ide> * <ide> * @param multipartData the form data to write to the output message <ide> * @return a {@code BodyInserter} that writes multipart data <ide> public static <T> FormInserter<T> fromMultipartData(String key, T value) { <ide> } <ide> <ide> /** <del> * Return a {@code BodyInserter} that writes the given {@code Publisher<DataBuffer>} to the body. <add> * Return a {@code BodyInserter} that writes the given <add> * {@code Publisher<DataBuffer>} to the body. <ide> * @param publisher the data buffer publisher to write <ide> * @param <T> the type of the publisher <ide> * @return a {@code BodyInserter} that writes directly to the body
1
Ruby
Ruby
use bind parameters for ranges in where clauses
6efb39456a8ee3569f3787cc2f26c041365a0e27
<ide><path>activerecord/lib/active_record/relation/predicate_builder.rb <ide> def initialize(table) <ide> register_handler(Class, ClassHandler.new(self)) <ide> register_handler(Base, BaseHandler.new(self)) <ide> register_handler(Range, RangeHandler.new(self)) <add> register_handler(RangeHandler::RangeWithBinds, RangeHandler.new(self)) <ide> register_handler(Relation, RelationHandler.new) <ide> register_handler(Array, ArrayHandler.new(self)) <ide> register_handler(AssociationQueryValue, AssociationQueryHandler.new(self)) <ide> def create_binds_for_hash(attributes) <ide> binds += bvs <ide> when Relation <ide> binds += value.bound_attributes <add> when Range <add> first = value.begin <add> last = value.end <add> unless first.respond_to?(:infinite?) && first.infinite? <add> binds << build_bind_param(column_name, first) <add> first = Arel::Nodes::BindParam.new <add> end <add> unless last.respond_to?(:infinite?) && last.infinite? <add> binds << build_bind_param(column_name, last) <add> last = Arel::Nodes::BindParam.new <add> end <add> <add> result[column_name] = RangeHandler::RangeWithBinds.new(first, last, value.exclude_end?) <ide> else <ide> if can_be_bound?(column_name, value) <ide> result[column_name] = Arel::Nodes::BindParam.new <del> binds << Relation::QueryAttribute.new(column_name.to_s, value, table.type(column_name)) <add> binds << build_bind_param(column_name, value) <ide> end <ide> end <ide> end <ide> def can_be_bound?(column_name, value) <ide> handler_for(value).is_a?(BasicObjectHandler) && <ide> !table.associated_with?(column_name) <ide> end <add> <add> def build_bind_param(column_name, value) <add> Relation::QueryAttribute.new(column_name.to_s, value, table.type(column_name)) <add> end <ide> end <ide> end <ide><path>activerecord/lib/active_record/relation/predicate_builder/range_handler.rb <ide> module ActiveRecord <ide> class PredicateBuilder <ide> class RangeHandler # :nodoc: <add> RangeWithBinds = Struct.new(:begin, :end, :exclude_end?) <add> <ide> def initialize(predicate_builder) <ide> @predicate_builder = predicate_builder <ide> end <ide> <ide> def call(attribute, value) <del> attribute.between(value) <add> if value.begin.respond_to?(:infinite?) && value.begin.infinite? <add> if value.end.respond_to?(:infinite?) && value.end.infinite? <add> attribute.not_in([]) <add> elsif value.exclude_end? <add> attribute.lt(value.end) <add> else <add> attribute.lteq(value.end) <add> end <add> elsif value.end.respond_to?(:infinite?) && value.end.infinite? <add> attribute.gteq(value.begin) <add> elsif value.exclude_end? <add> attribute.gteq(value.begin).and(attribute.lt(value.end)) <add> else <add> attribute.between(value) <add> end <ide> end <ide> <ide> protected
2