author
int64 4.98k
943k
| date
stringdate 2017-04-15 16:45:02
2022-02-25 15:32:15
| timezone
int64 -46,800
39.6k
| hash
stringlengths 40
40
| message
stringlengths 8
468
| mods
listlengths 1
16
| language
stringclasses 9
values | license
stringclasses 2
values | repo
stringclasses 119
values | original_message
stringlengths 12
491
| is_CCS
int64 1
1
| commit_type
stringclasses 129
values | commit_scope
stringlengths 1
44
⌀ |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
807,849
|
16.02.2018 12:47:08
| 28,800
|
37d4800dc36e420a57c4692ecb8b14442258cc19
|
chore: don't destructure lodash require
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const { escapeRegExp } = require(\"lodash\");\n+const _ = require(\"lodash\");\nconst hostedGitInfo = require(\"hosted-git-info\");\nclass GitVersionParser {\nconstructor(versionPrefix = \"v\") {\n- this._gitUrlPattern = new RegExp(`(.+?#${escapeRegExp(versionPrefix)})(.+)$`);\n+ this._gitUrlPattern = new RegExp(`(.+?#${_.escapeRegExp(versionPrefix)})(.+)$`);\n}\nparseVersion(version) {\n",
"new_path": "src/GitVersionParser.js",
"old_path": "src/GitVersionParser.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: don't destructure lodash require
| 1
|
chore
| null |
807,849
|
16.02.2018 12:49:39
| 28,800
|
e49e6fc9a154e5aba72d115996dcd2392041f149
|
chore: fixtures do not need version interpolation
|
[
{
"change_type": "MODIFY",
"diff": "{\n- \"lerna\": \"__TEST_VERSION__\",\n\"version\": \"1.0.0\"\n}\n",
"new_path": "test/fixtures/BootstrapCommand/ignored-scripts/lerna.json",
"old_path": "test/fixtures/BootstrapCommand/ignored-scripts/lerna.json"
},
{
"change_type": "MODIFY",
"diff": "\"package-4\": \"^1.0.0\"\n},\n\"devDependencies\": {\n- \"lerna\": \"__TEST_VERSION__\",\n\"@scoped/package-5\": \"^1.0.0\"\n}\n}\n",
"new_path": "test/fixtures/PackageUtilities/explicit-node-modules/package.json",
"old_path": "test/fixtures/PackageUtilities/explicit-node-modules/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"package-4\": \"^1.0.0\"\n},\n\"devDependencies\": {\n- \"lerna\": \"__TEST_VERSION__\",\n\"package-5\": \"^1.0.0\"\n}\n}\n",
"new_path": "test/fixtures/PackageUtilities/globstar/package.json",
"old_path": "test/fixtures/PackageUtilities/globstar/package.json"
},
{
"change_type": "MODIFY",
"diff": "{\n\"name\": \"mixed-globstar-monorepo\",\n- \"private\": true,\n- \"devDependencies\": {\n- \"lerna\": \"__TEST_VERSION__\"\n- }\n+ \"private\": true\n}\n",
"new_path": "test/fixtures/PackageUtilities/mixed-globstar/package.json",
"old_path": "test/fixtures/PackageUtilities/mixed-globstar/package.json"
},
{
"change_type": "MODIFY",
"diff": "{\n- \"lerna\": \"__TEST_VERSION__\",\n\"version\": \"1.0.0\"\n}\n",
"new_path": "test/fixtures/PublishCommand/snake-graph/lerna.json",
"old_path": "test/fixtures/PublishCommand/snake-graph/lerna.json"
},
{
"change_type": "MODIFY",
"diff": "@@ -16,17 +16,14 @@ async function copyFixture(targetDir, fixturePath) {\n}\n/**\n- * During fixture copy, replace \"__TEST_VERSION__\" with the current version\n- * and \"__TEST_PKG_URL__\" with the generated file-url.\n+ * During fixture copy, replace \"__TEST_PKG_URL__\" with the generated file-url.\n* This is primarily for integration tests, but doesn't hurt unit tests.\n*\n* @param {String} fileName source path of file being copied\n*/\nasync function transform(fileName) {\nconst original = await fs.readFile(fileName, \"utf8\");\n- const filtered = original\n- .replace(constants.__TEST_VERSION__, constants.LERNA_VERSION)\n- .replace(constants.__TEST_PKG_URL__, constants.LERNA_PKG_URL);\n+ const filtered = original.replace(constants.__TEST_PKG_URL__, constants.LERNA_PKG_URL);\nif (original !== filtered) {\nawait fs.writeFile(fileName, filtered, \"utf8\");\n",
"new_path": "test/helpers/copyFixture.js",
"old_path": "test/helpers/copyFixture.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: fixtures do not need version interpolation
| 1
|
chore
| null |
807,849
|
16.02.2018 12:54:56
| 28,800
|
98e755d39d9b7e7747ace59394de7c9091727c02
|
refactor(Command): don't bother filtering logged argv
|
[
{
"change_type": "MODIFY",
"diff": "@@ -90,7 +90,7 @@ class Command {\nlog.heading = \"lerna\";\nthis._argv = argv;\n- log.silly(\"argv\", filterFlags(argv));\n+ log.silly(\"argv\", argv);\nthis.lernaVersion = LERNA_VERSION;\nlog.info(\"version\", this.lernaVersion);\n@@ -447,14 +447,5 @@ function cleanStack(err, className) {\nreturn lines.slice(0, relevantIndex).join(\"\\n\");\n}\n-/**\n- * Passed argv from yargs, return an object that contains _only_\n- * what was passed on the command line, omitting undefined values\n- * and yargs spam.\n- */\n-function filterFlags(argv) {\n- return _.omit(_.omitBy(argv, _.isNil), [\"h\", \"help\", \"v\", \"version\", \"$0\"]);\n-}\n-\nmodule.exports = Command;\nmodule.exports.builder = builder;\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(Command): don't bother filtering logged argv
| 1
|
refactor
|
Command
|
807,849
|
16.02.2018 12:57:54
| 28,800
|
725d482fcf22694e4559de4409e8ed808da76443
|
refactor: use cached filter predicates instead of repeated _.find
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\nconst minimatch = require(\"minimatch\");\nconst path = require(\"path\");\nconst semver = require(\"semver\");\n@@ -35,6 +34,10 @@ function getForcedPackages(forcePublish) {\n}\nfunction makeDiffSince(rootPath, execOpts, ignorePatterns) {\n+ const ignoreFilters = new Set(\n+ Array.from(ignorePatterns || []).map(p => minimatch.filter(`!${p}`, { matchBase: true }))\n+ );\n+\nreturn function hasDiffSinceThatIsntIgnored(pkg, commits) {\nconst folder = path.relative(rootPath, pkg.location);\nconst diff = GitUtilities.diffSinceIn(commits, pkg.location, execOpts);\n@@ -45,10 +48,10 @@ function makeDiffSince(rootPath, execOpts, ignorePatterns) {\nlet changedFiles = diff.split(\"\\n\").map(file => file.replace(folder + path.sep, \"\"));\n- if (ignorePatterns) {\n- changedFiles = changedFiles.filter(\n- file => !_.find(ignorePatterns, pattern => minimatch(file, pattern, { matchBase: true }))\n- );\n+ if (ignoreFilters.size) {\n+ for (const ignored of ignoreFilters) {\n+ changedFiles = changedFiles.filter(ignored);\n+ }\n}\nreturn !!changedFiles.length;\n",
"new_path": "src/UpdatedPackagesCollector.js",
"old_path": "src/UpdatedPackagesCollector.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: use cached filter predicates instead of repeated _.find
| 1
|
refactor
| null |
807,849
|
16.02.2018 12:58:57
| 28,800
|
16bad6a58dac17f069ba726a3a92aee0d39a994c
|
refactor: use packageGraph to locate packages instead of _.find
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\nconst async = require(\"async\");\nconst dedent = require(\"dedent\");\nconst getPort = require(\"get-port\");\n@@ -302,10 +301,13 @@ class BootstrapCommand extends Command {\n*/\ngetDependenciesToInstall(tracker) {\n// find package by name\n- const findPackage = (name, version) =>\n- _.find(this.packages, pkg => pkg.name === name && (!version || semver.satisfies(pkg.version, version)));\n+ const findPackage = (name, version) => {\n+ const node = this.packageGraph.get(name);\n- const hasPackage = (name, version) => Boolean(findPackage(name, version));\n+ if (node && semver.satisfies(node.version, version)) {\n+ return node.pkg;\n+ }\n+ };\n// Configuration for what packages to hoist may be in lerna.json or it may\n// come in as command line options.\n@@ -370,7 +372,7 @@ class BootstrapCommand extends Command {\n)\n// match external and version mismatched local packages\n- .filter(dep => !hasPackage(dep.name, dep.version) || !hasMatchingDependency(pkg, dep))\n+ .filter(dep => !findPackage(dep.name, dep.version) || !hasMatchingDependency(pkg, dep))\n.forEach(({ name, version }) => {\n// Get the object for this package, auto-vivifying.\n@@ -434,19 +436,19 @@ class BootstrapCommand extends Command {\nreturn;\n}\n- dependents[version].forEach(pkg => {\n+ dependents[version].forEach(pkgName => {\nif (rootVersion) {\ntracker.warn(\n\"EHOIST_PKG_VERSION\",\n- `\"${pkg}\" package depends on ${name}@${version}, ` +\n+ `\"${pkgName}\" package depends on ${name}@${version}, ` +\n`which differs from the hoisted ${name}@${rootVersion}.`\n);\n}\n// only install dependency if it's not already installed\n- (leaves[pkg] || (leaves[pkg] = [])).push({\n+ (leaves[pkgName] || (leaves[pkgName] = [])).push({\ndependency: `${name}@${version}`,\n- isSatisfied: hasDependencyInstalled(findPackage(pkg), name),\n+ isSatisfied: hasDependencyInstalled(this.packageGraph.get(pkgName).pkg, name),\n});\n});\n});\n",
"new_path": "src/commands/BootstrapCommand.js",
"old_path": "src/commands/BootstrapCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: use packageGraph to locate packages instead of _.find
| 1
|
refactor
| null |
807,849
|
16.02.2018 13:00:19
| 28,800
|
2244354f9c247e5d586b0282a981fa7ce3a73bae
|
refactor: replace _.set with inline defaulting
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\nconst writeJsonFile = require(\"write-json-file\");\nconst writePkg = require(\"write-pkg\");\n@@ -118,8 +117,10 @@ class InitCommand extends Command {\nif (this.exact) {\n// ensure --exact is preserved for future init commands\n- const configKey = lernaJson.commands ? \"commands\" : \"command\";\n- _.set(lernaJson, `${configKey}.init.exact`, true);\n+ const commandConfig = lernaJson.commands || lernaJson.command || (lernaJson.command = {});\n+ const initConfig = commandConfig.init || (commandConfig.init = {});\n+\n+ initConfig.exact = true;\n}\nwriteJsonFile.sync(this.repository.lernaJsonLocation, lernaJson, { indent: 2 });\n",
"new_path": "src/commands/InitCommand.js",
"old_path": "src/commands/InitCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: replace _.set with inline defaulting
| 1
|
refactor
| null |
807,849
|
16.02.2018 13:00:50
| 28,800
|
e6609af95819b46459f49765bb6a35f03b669f4c
|
refactor: replace _.entries with Object.keys()
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\nconst async = require(\"async\");\nconst path = require(\"path\");\nconst readPkg = require(\"read-pkg\");\n@@ -21,9 +20,9 @@ function symlinkBinary(srcPackageRef, destPackageRef, callback) {\nconst srcPackage = resolvePackageRef(srcPackageRef);\nconst destPackage = resolvePackageRef(destPackageRef);\n- const actions = _.entries(srcPackage.bin)\n- .map(([name, file]) => ({\n- src: path.join(srcPackage.location, file),\n+ const actions = Object.keys(srcPackage.bin)\n+ .map(name => ({\n+ src: path.join(srcPackage.location, srcPackage.bin[name]),\ndst: path.join(destPackage.binLocation, name),\n}))\n.filter(({ src }) => FileSystemUtilities.existsSync(src))\n",
"new_path": "src/utils/symlink-binary.js",
"old_path": "src/utils/symlink-binary.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: replace _.entries with Object.keys()
| 1
|
refactor
| null |
807,849
|
16.02.2018 13:40:45
| 28,800
|
cd0d1b5462fac2b582e86a5da1579d9967e1c837
|
refactor(lerna-add): switch to cli-runner
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\nconst path = require(\"path\");\n-const execa = require(\"execa\");\nconst globby = require(\"globby\");\nconst loadJson = require(\"load-json-file\");\n+const pMap = require(\"p-map\");\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const runner = require(\"../helpers/cliRunner\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst pkgMatchers = require(\"../helpers/pkgMatchers\");\n-const cli = (args, options) => execa(LERNA_BIN, [\"add\", ...args], options);\n-\n-const loadFrom = cwd => filePath => loadJson(path.join(cwd, filePath));\n-\nexpect.extend(pkgMatchers);\n-const getPkgs = async cwd => {\n- const pkgs = await globby([\"packages/**/package.json\"], { cwd });\n- const load = loadFrom(cwd);\n- const manifests = await Promise.all(pkgs.map(pkg => load(pkg)));\n-\n- return _.keyBy(manifests, \"name\");\n-};\n-\ndescribe(\"lerna add\", () => {\ntest(\"add to all packages\", async () => {\nconst cwd = await initFixture(\"AddCommand/basic\");\n- await cli([\"@test/package-1\"], { cwd });\n- const pkgs = await getPkgs(cwd);\n- expect(pkgs[\"@test/package-1\"]).not.toDependOn(\"@test/package-1\");\n- expect(pkgs[\"@test/package-2\"]).toDependOn(\"@test/package-1\");\n- expect(pkgs[\"package-3\"]).toDependOn(\"@test/package-1\");\n- expect(pkgs[\"package-4\"]).toDependOn(\"@test/package-1\");\n+\n+ await runner(cwd)(\"add\", \"@test/package-1\");\n+\n+ const filePaths = await globby(\"packages/*/package.json\", { cwd });\n+ const [pkg1, pkg2, pkg3, pkg4] = await pMap(filePaths, fp => loadJson(path.join(cwd, fp)));\n+\n+ expect(pkg1).not.toDependOn(\"@test/package-1\");\n+ expect(pkg2).toDependOn(\"@test/package-1\");\n+ expect(pkg3).toDependOn(\"@test/package-1\");\n+ expect(pkg4).toDependOn(\"@test/package-1\");\n});\n});\n",
"new_path": "test/integration/lerna-add.test.js",
"old_path": "test/integration/lerna-add.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(lerna-add): switch to cli-runner
| 1
|
refactor
|
lerna-add
|
807,849
|
16.02.2018 13:43:08
| 28,800
|
9ed7f0b5f697f0ad2bedacc5b0ad4afeb286867f
|
refactor: replace _.escapeRegExp with escape-string-regexp
|
[
{
"change_type": "MODIFY",
"diff": "\"conventional-changelog-core\": \"^2.0.3\",\n\"conventional-recommended-bump\": \"^2.0.4\",\n\"dedent\": \"^0.7.0\",\n+ \"escape-string-regexp\": \"^1.0.5\",\n\"execa\": \"^0.9.0\",\n\"find-up\": \"^2.1.0\",\n\"fs-extra\": \"^5.0.0\",\n",
"new_path": "package.json",
"old_path": "package.json"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const _ = require(\"lodash\");\n+const escapeStringRegexp = require(\"escape-string-regexp\");\nconst hostedGitInfo = require(\"hosted-git-info\");\nclass GitVersionParser {\nconstructor(versionPrefix = \"v\") {\n- this._gitUrlPattern = new RegExp(`(.+?#${_.escapeRegExp(versionPrefix)})(.+)$`);\n+ this._gitUrlPattern = new RegExp(`(.+?#${escapeStringRegexp(versionPrefix)})(.+)$`);\n}\nparseVersion(version) {\n",
"new_path": "src/GitVersionParser.js",
"old_path": "src/GitVersionParser.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: replace _.escapeRegExp with escape-string-regexp
| 1
|
refactor
| null |
807,849
|
16.02.2018 15:41:23
| 28,800
|
82498a8e2949b49f3368076c093a141e84509684
|
chore: remove unused integration-lifecycle fixture
|
[
{
"change_type": "DELETE",
"diff": "-{\n- \"packages\": [\n- \"package-*\"\n- ],\n- \"version\": \"1.0.0\"\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/lerna.json"
},
{
"change_type": "DELETE",
"diff": "-index.js\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-1/.gitignore"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-let msg;\n-\n-try {\n- require(\"pify\");\n- msg = \"OK\";\n-} catch (ex) {\n- console.error(ex);\n- msg = \"FAIL\";\n-}\n-\n-module.exports = msg;\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-1/index.src.js"
},
{
"change_type": "DELETE",
"diff": "-{\n- \"name\": \"@integration/package-1\",\n- \"version\": \"1.0.0\",\n- \"private\": true,\n- \"scripts\": {\n- \"test\": \"echo package-1\",\n- \"prepublish\": \"cp index.src.js index.js\"\n- },\n- \"dependencies\": {\n- \"pify\": \"^2.0.0\"\n- },\n- \"devDependencies\": {\n- \"cash-cp\": \"^0.2.0\"\n- }\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-1/package.json"
},
{
"change_type": "DELETE",
"diff": "-#!/usr/bin/env node\n-\n-const msg = require(\"./\");\n-\n-console.log(\"cli\", msg);\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-2/cli.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-const msg = require(\"@integration/package-1\");\n-\n-module.exports = `package-2 ${msg}`;\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-2/index.js"
},
{
"change_type": "DELETE",
"diff": "-{\n- \"name\": \"@integration/package-2\",\n- \"version\": \"1.0.0\",\n- \"private\": true,\n- \"bin\": \"cli.js\",\n- \"scripts\": {\n- \"test\": \"echo package-2\"\n- },\n- \"dependencies\": {\n- \"@integration/package-1\": \"^1.0.0\",\n- \"pify\": \"^2.0.0\"\n- }\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-2/package.json"
},
{
"change_type": "DELETE",
"diff": "-#!/usr/bin/env node\n-\n-const msg = require(\"@integration/package-1\");\n-\n-console.log(\"package-3\", \"cli1\", msg);\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-3/cli1.js"
},
{
"change_type": "DELETE",
"diff": "-#!/usr/bin/env node\n-\n-const msg = require(\"@integration/package-2\");\n-\n-console.log(\"package-3\", \"cli2\", msg);\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-3/cli2.js"
},
{
"change_type": "DELETE",
"diff": "-{\n- \"name\": \"@integration/package-3\",\n- \"version\": \"1.0.0\",\n- \"private\": true,\n- \"bin\": {\n- \"package3-cli1\": \"cli1.js\",\n- \"package3-cli2\": \"cli2.js\"\n- },\n- \"scripts\": {\n- \"test\": \"package-2\"\n- },\n- \"dependencies\": {\n- \"@integration/package-1\": \"^1.0.0\",\n- \"@integration/package-2\": \"^1.0.0\"\n- },\n- \"devDependencies\": {\n- \"pify\": \"^1.0.0\"\n- }\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-3/package.json"
},
{
"change_type": "DELETE",
"diff": "-{\n- \"name\": \"package-4\",\n- \"version\": \"1.0.0\",\n- \"scripts\": {\n- \"test\": \"package3-cli1\",\n- \"posttest\": \"package3-cli2\"\n- },\n- \"devDependencies\": {\n- \"@integration/package-3\": \"^1.0.0\"\n- }\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package-4/package.json"
},
{
"change_type": "DELETE",
"diff": "-{\n- \"name\": \"integration-lifecycle\",\n- \"version\": \"0.0.0\",\n- \"private\": true,\n- \"scripts\": {\n- \"postinstall\": \"lerna bootstrap\",\n- \"test\": \"lerna run test\"\n- },\n- \"devDependencies\": {\n- \"lerna\": \"__TEST_PKG_URL__\"\n- }\n-}\n",
"new_path": null,
"old_path": "test/fixtures/BootstrapCommand/integration-lifecycle/package.json"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: remove unused integration-lifecycle fixture
| 1
|
chore
| null |
807,849
|
16.02.2018 16:37:25
| 28,800
|
f0c0ac96324e2496069ba8c01a6497e2c0d1a392
|
chore: test bootstrap git repo check in unit test
|
[
{
"change_type": "MODIFY",
"diff": "@@ -4,6 +4,9 @@ jest.mock(\"../src/utils/npm-install\");\njest.mock(\"../src/utils/npm-run-script\");\njest.mock(\"../src/utils/create-symlink\");\n+const fs = require(\"fs-extra\");\n+const path = require(\"path\");\n+\n// mocked or stubbed modules\nconst FileSystemUtilities = require(\"../src/FileSystemUtilities\");\nconst npmInstall = require(\"../src/utils/npm-install\");\n@@ -335,16 +338,6 @@ describe(\"BootstrapCommand\", () => {\n});\n});\n- describe(\"zero packages\", () => {\n- it(\"should succeed in repositories with zero packages\", async () => {\n- const testDir = await initFixture(\"BootstrapCommand/zero-pkgs\");\n-\n- const { exitCode } = await lernaBootstrap(testDir)();\n-\n- expect(exitCode).toBe(0);\n- });\n- });\n-\ndescribe(\"with registry config\", () => {\nit(\"should install packages from registry\", async () => {\nconst testDir = await initFixture(\"BootstrapCommand/registries\");\n@@ -449,4 +442,22 @@ describe(\"BootstrapCommand\", () => {\n}\n});\n});\n+\n+ it(\"succeeds in repositories with zero packages\", async () => {\n+ const testDir = await initFixture(\"BootstrapCommand/zero-pkgs\");\n+\n+ const { exitCode } = await lernaBootstrap(testDir)();\n+\n+ expect(exitCode).toBe(0);\n+ });\n+\n+ it(\"does not require an initialized git repo\", async () => {\n+ const testDir = await initFixture(\"BootstrapCommand/zero-pkgs\");\n+\n+ fs.remove(path.join(testDir, \".git\"));\n+\n+ const { exitCode } = await lernaBootstrap(testDir)();\n+\n+ expect(exitCode).toBe(0);\n+ });\n});\n",
"new_path": "test/BootstrapCommand.js",
"old_path": "test/BootstrapCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -48,18 +48,6 @@ package-3 cli1 OK\npackage-3 cli2 package-2 OK\n`;\n-exports[`lerna bootstrap from CLI git repo check is ignored by default: stderr 1`] = `\n-lerna info version __TEST_VERSION__\n-lerna info Bootstrapping 4 packages\n-lerna info lifecycle preinstall\n-lerna info Installing external dependencies\n-lerna info Symlinking packages and binaries\n-lerna info lifecycle postinstall\n-lerna info lifecycle prepublish\n-lerna info lifecycle prepare\n-lerna success Bootstrapped 4 packages\n-`;\n-\nexports[`lerna bootstrap from CLI passes remaining arguments + npmClientArgs to npm client 1`] = `install,--production,--no-optional`;\nexports[`lerna bootstrap from CLI passes remaining arguments to npm client 1`] = `install,--no-optional`;\n",
"new_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -5,11 +5,9 @@ const fs = require(\"fs-extra\");\nconst globby = require(\"globby\");\nconst normalizePath = require(\"normalize-path\");\nconst path = require(\"path\");\n-const tempy = require(\"tempy\");\nconst { LERNA_BIN } = require(\"../helpers/constants\");\nconst initFixture = require(\"../helpers/initFixture\");\n-const copyFixture = require(\"../helpers/copyFixture\");\ndescribe(\"lerna bootstrap\", () => {\nconst npmTest = cwd =>\n@@ -46,15 +44,6 @@ describe(\"lerna bootstrap\", () => {\nexpect(stderr).toMatchSnapshot(\"stderr\");\n});\n- test(\"git repo check is ignored by default\", async () => {\n- const cwd = tempy.directory();\n- await copyFixture(cwd, \"BootstrapCommand/integration\");\n- const args = [\"bootstrap\"];\n-\n- const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\n- expect(stderr).toMatchSnapshot(\"stderr\");\n- });\n-\ntest(\"--npm-client yarn\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\nconst args = [\"bootstrap\", \"--npm-client\", \"yarn\"];\n",
"new_path": "test/integration/lerna-bootstrap.test.js",
"old_path": "test/integration/lerna-bootstrap.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: test bootstrap git repo check in unit test
| 1
|
chore
| null |
807,849
|
16.02.2018 16:45:39
| 28,800
|
199c7524d6939072dc91c76651c164df1ee6ece3
|
chore: remove integration test already well-covered in units
|
[
{
"change_type": "MODIFY",
"diff": "@@ -51,16 +51,3 @@ package-3 cli2 package-2 OK\nexports[`lerna bootstrap from CLI passes remaining arguments + npmClientArgs to npm client 1`] = `install,--production,--no-optional`;\nexports[`lerna bootstrap from CLI passes remaining arguments to npm client 1`] = `install,--no-optional`;\n-\n-exports[`lerna bootstrap from CLI respects ignore flag: stderr 1`] = `\n-lerna info version __TEST_VERSION__\n-lerna info ignore @integration/package-1\n-lerna info Bootstrapping 3 packages\n-lerna info lifecycle preinstall\n-lerna info Installing external dependencies\n-lerna info Symlinking packages and binaries\n-lerna info lifecycle postinstall\n-lerna info lifecycle prepublish\n-lerna info lifecycle prepare\n-lerna success Bootstrapped 3 packages\n-`;\n",
"new_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -36,14 +36,6 @@ describe(\"lerna bootstrap\", () => {\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n- test(\"respects ignore flag\", async () => {\n- const cwd = await initFixture(\"BootstrapCommand/integration\");\n- const args = [\"bootstrap\", \"--ignore\", \"@integration/package-1\"];\n-\n- const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\n- expect(stderr).toMatchSnapshot(\"stderr\");\n- });\n-\ntest(\"--npm-client yarn\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\nconst args = [\"bootstrap\", \"--npm-client\", \"yarn\"];\n",
"new_path": "test/integration/lerna-bootstrap.test.js",
"old_path": "test/integration/lerna-bootstrap.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: remove integration test already well-covered in units
| 1
|
chore
| null |
807,849
|
16.02.2018 16:48:05
| 28,800
|
d0d378c2957e0d9497b175a96ab055b33362d873
|
chore: reduce nesting of lerna-bootstrap test cases
|
[
{
"change_type": "MODIFY",
"diff": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n-exports[`lerna bootstrap from CLI --npm-client yarn: lockfiles 1`] = `\n+exports[`lerna bootstrap --npm-client yarn: lockfiles 1`] = `\nArray [\npackage-1/yarn.lock,\npackage-2/yarn.lock,\n@@ -8,7 +8,7 @@ Array [\n]\n`;\n-exports[`lerna bootstrap from CLI --npm-client yarn: stderr 1`] = `\n+exports[`lerna bootstrap --npm-client yarn: stderr 1`] = `\nlerna info version __TEST_VERSION__\nlerna info Bootstrapping 4 packages\nlerna info lifecycle preinstall\n@@ -20,7 +20,7 @@ lerna info lifecycle prepare\nlerna success Bootstrapped 4 packages\n`;\n-exports[`lerna bootstrap from CLI --npm-client yarn: stdout 1`] = `\n+exports[`lerna bootstrap --npm-client yarn: stdout 1`] = `\npackage-1\npackage-2\ncli package-2 OK\n@@ -28,7 +28,7 @@ package-3 cli1 OK\npackage-3 cli2 package-2 OK\n`;\n-exports[`lerna bootstrap from CLI bootstraps all packages: stderr 1`] = `\n+exports[`lerna bootstrap bootstraps all packages: stderr 1`] = `\nlerna info version __TEST_VERSION__\nlerna info Bootstrapping 4 packages\nlerna info lifecycle preinstall\n@@ -40,7 +40,7 @@ lerna info lifecycle prepare\nlerna success Bootstrapped 4 packages\n`;\n-exports[`lerna bootstrap from CLI bootstraps all packages: stdout 1`] = `\n+exports[`lerna bootstrap bootstraps all packages: stdout 1`] = `\npackage-1\npackage-2\ncli package-2 OK\n@@ -48,6 +48,6 @@ package-3 cli1 OK\npackage-3 cli2 package-2 OK\n`;\n-exports[`lerna bootstrap from CLI passes remaining arguments + npmClientArgs to npm client 1`] = `install,--production,--no-optional`;\n+exports[`lerna bootstrap passes remaining arguments + npmClientArgs to npm client 1`] = `install,--production,--no-optional`;\n-exports[`lerna bootstrap from CLI passes remaining arguments to npm client 1`] = `install,--no-optional`;\n+exports[`lerna bootstrap passes remaining arguments to npm client 1`] = `install,--no-optional`;\n",
"new_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -24,7 +24,6 @@ describe(\"lerna bootstrap\", () => {\n{ cwd }\n);\n- describe(\"from CLI\", () => {\ntest(\"bootstraps all packages\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\nconst args = [\"bootstrap\"];\n@@ -72,4 +71,3 @@ describe(\"lerna bootstrap\", () => {\nexpect(npmDebugLog).toMatchSnapshot();\n});\n});\n-});\n",
"new_path": "test/integration/lerna-bootstrap.test.js",
"old_path": "test/integration/lerna-bootstrap.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: reduce nesting of lerna-bootstrap test cases
| 1
|
chore
| null |
807,849
|
16.02.2018 17:04:14
| 28,800
|
3bb0d7ecdd5d165a550ce8155a7dd7a6e8423fb7
|
chore: integration tests should only snapshot verbose things
|
[
{
"change_type": "MODIFY",
"diff": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n-exports[`lerna bootstrap --npm-client yarn: lockfiles 1`] = `\n-Array [\n- package-1/yarn.lock,\n- package-2/yarn.lock,\n- package-3/yarn.lock,\n-]\n-`;\n-\nexports[`lerna bootstrap --npm-client yarn: stderr 1`] = `\nlerna info version __TEST_VERSION__\nlerna info Bootstrapping 4 packages\n@@ -47,7 +39,3 @@ cli package-2 OK\npackage-3 cli1 OK\npackage-3 cli2 package-2 OK\n`;\n-\n-exports[`lerna bootstrap passes remaining arguments + npmClientArgs to npm client 1`] = `install,--production,--no-optional`;\n-\n-exports[`lerna bootstrap passes remaining arguments to npm client 1`] = `install,--no-optional`;\n",
"new_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-bootstrap.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -10,19 +10,8 @@ const { LERNA_BIN } = require(\"../helpers/constants\");\nconst initFixture = require(\"../helpers/initFixture\");\ndescribe(\"lerna bootstrap\", () => {\n- const npmTest = cwd =>\n- execa(\n- LERNA_BIN,\n- [\n- \"run\",\n- \"test\",\n- \"--\",\n- // arguments to npm test\n- \"--silent\",\n- \"--onload-script=false\",\n- ],\n- { cwd }\n- );\n+ // the \"--silent\" flag is passed to `npm run`\n+ const npmTest = cwd => execa.stdout(LERNA_BIN, [\"run\", \"test\", \"--\", \"--silent\"], { cwd });\ntest(\"bootstraps all packages\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\n@@ -31,7 +20,7 @@ describe(\"lerna bootstrap\", () => {\nconst stderr = await execa.stderr(LERNA_BIN, args, { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\n- const { stdout } = await npmTest(cwd);\n+ const stdout = await npmTest(cwd);\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n@@ -42,32 +31,34 @@ describe(\"lerna bootstrap\", () => {\nconst stderr = await execa.stderr(LERNA_BIN, args, { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\n- const lockfiles = await globby([\"package-*/yarn.lock\"], { cwd }).then(globbed =>\n- globbed.map(fp => normalizePath(fp))\n- );\n- expect(lockfiles).toMatchSnapshot(\"lockfiles\");\n+ const lockfiles = await globby([\"package-*/yarn.lock\"], { cwd });\n+ expect(lockfiles.map(fp => normalizePath(fp))).toEqual([\n+ \"package-1/yarn.lock\",\n+ \"package-2/yarn.lock\",\n+ \"package-3/yarn.lock\",\n+ ]);\n- const { stdout } = await npmTest(cwd);\n+ const stdout = await npmTest(cwd);\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n- test(\"passes remaining arguments to npm client\", async () => {\n+ test(\"--npm-client npm -- --no-optional\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/npm-client-args-1\");\nconst args = [\"bootstrap\", \"--npm-client\", path.resolve(cwd, \"npm\"), \"--\", \"--no-optional\"];\nawait execa(LERNA_BIN, args, { cwd });\n- const npmDebugLog = fs.readFileSync(path.resolve(cwd, \"npm-debug.log\")).toString();\n- expect(npmDebugLog).toMatchSnapshot();\n+ const npmDebugLog = await fs.readFile(path.resolve(cwd, \"npm-debug.log\"), \"utf8\");\n+ expect(npmDebugLog.split(\",\")).toEqual([\"install\", \"--no-optional\"]);\n});\n- test(\"passes remaining arguments + npmClientArgs to npm client\", async () => {\n+ test(\"--npm-client npm -- --no-optional extends durable npmClientArgs\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/npm-client-args-2\");\nconst args = [\"bootstrap\", \"--npm-client\", path.resolve(cwd, \"npm\"), \"--\", \"--no-optional\"];\nawait execa(LERNA_BIN, args, { cwd });\n- const npmDebugLog = fs.readFileSync(path.resolve(cwd, \"npm-debug.log\")).toString();\n- expect(npmDebugLog).toMatchSnapshot();\n+ const npmDebugLog = await fs.readFile(path.resolve(cwd, \"npm-debug.log\"), \"utf8\");\n+ expect(npmDebugLog.split(\",\")).toEqual([\"install\", \"--production\", \"--no-optional\"]);\n});\n});\n",
"new_path": "test/integration/lerna-bootstrap.test.js",
"old_path": "test/integration/lerna-bootstrap.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: integration tests should only snapshot verbose things
| 1
|
chore
| null |
807,849
|
16.02.2018 17:14:23
| 28,800
|
8847e27a2aa15eb44d0538141a8749fe25d04989
|
integration: remove --onload-script=false
|
[
{
"change_type": "MODIFY",
"diff": "exports[`lerna run my-script --parallel: stderr 1`] = `\nlerna info version __TEST_VERSION__\n-lerna info run in 2 package(s): npm run my-script --silent --onload-script=false\n+lerna info run in 2 package(s): npm run my-script --silent\nlerna success run Ran npm script 'my-script' in packages:\nlerna success - package-1\nlerna success - package-3\n@@ -24,7 +24,7 @@ lerna success - package-4\nexports[`lerna run test --parallel: stderr 1`] = `\nlerna info version __TEST_VERSION__\n-lerna info run in 4 package(s): npm run test --silent --onload-script=false\n+lerna info run in 4 package(s): npm run test --silent\nlerna success run Ran npm script 'test' in packages:\nlerna success - package-1\nlerna success - package-2\n",
"new_path": "test/integration/__snapshots__/lerna-run.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-run.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -20,7 +20,6 @@ describe(\"lerna run\", () => {\n// args below tell npm to be quiet\n\"--\",\n\"--silent\",\n- \"--onload-script=false\",\n];\nconst { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\nexpect(stdout).toBe(\"package-1\");\n@@ -38,7 +37,6 @@ describe(\"lerna run\", () => {\n// args below tell npm to be quiet\n\"--\",\n\"--silent\",\n- \"--onload-script=false\",\n];\nconst { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\nexpect(stdout).toBe(\"package-4\");\n@@ -55,7 +53,6 @@ describe(\"lerna run\", () => {\n// args below tell npm to be quiet\n\"--\",\n\"--silent\",\n- \"--onload-script=false\",\n];\nconst { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\nexpect(stdout).toMatchSnapshot(\"stdout\");\n@@ -71,7 +68,6 @@ describe(\"lerna run\", () => {\n// args below tell npm to be quiet\n\"--\",\n\"--silent\",\n- \"--onload-script=false\",\n];\nconst { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\n@@ -92,7 +88,6 @@ describe(\"lerna run\", () => {\n// args below tell npm to be quiet\n\"--\",\n\"--silent\",\n- \"--onload-script=false\",\n];\nconst { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\n",
"new_path": "test/integration/lerna-run.test.js",
"old_path": "test/integration/lerna-run.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
integration: remove --onload-script=false
| 1
|
integration
| null |
807,849
|
16.02.2018 17:15:08
| 28,800
|
3945eda906718a7e9fcd1b514c6b95455b8a32fa
|
chore: shuffle around integration test assertion helpers
|
[
{
"change_type": "MODIFY",
"diff": "@@ -8,7 +8,9 @@ const path = require(\"path\");\nconst { LERNA_BIN } = require(\"../helpers/constants\");\nconst initFixture = require(\"../helpers/initFixture\");\n-const serializeTestRoot = (match, testDir, subPath) => normalizePath(path.join(\"__TEST_ROOTDIR__\", subPath));\n+describe(\"lerna clean\", () => {\n+ const serializeTestRoot = (match, testDir, subPath) =>\n+ normalizePath(path.join(\"__TEST_ROOTDIR__\", subPath));\nconst normalizeLog = cwd => {\n// lol windows paths often look like escaped slashes, so re-re-escape them :P\n@@ -17,13 +19,11 @@ const normalizeLog = cwd => {\nreturn stderr => stderr.replace(dirPath, serializeTestRoot);\n};\n-describe(\"lerna clean\", () => {\ntest(\"global\", async () => {\nconst cwd = await initFixture(\"CleanCommand/basic\");\nconst args = [\"clean\", \"--yes\", \"--concurrency=1\"];\n- const { stderr } = await execa(LERNA_BIN, args, { cwd });\n-\n+ const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\nexpect(normalizeLog(cwd)(stderr)).toMatchSnapshot(\"stderr\");\nconst found = await globby([\"package-*/node_modules\"], { cwd });\n",
"new_path": "test/integration/lerna-clean.test.js",
"old_path": "test/integration/lerna-clean.test.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -8,17 +8,15 @@ const tempy = require(\"tempy\");\nconst { LERNA_BIN } = require(\"../helpers/constants\");\nconst initFixture = require(\"../helpers/initFixture\");\n+describe(\"lerna init\", () => {\nconst parsePackageJson = cwd => loadJsonFile(path.join(cwd, \"package.json\"));\n-\nconst parseLernaJson = cwd => loadJsonFile(path.join(cwd, \"lerna.json\"));\n-\nconst loadMetaData = cwd => Promise.all([parsePackageJson(cwd), parseLernaJson(cwd)]);\n-describe(\"lerna init\", () => {\ntest(\"initializes empty directory\", async () => {\nconst cwd = tempy.directory();\n- const { stderr } = await execa(LERNA_BIN, [\"init\"], { cwd });\n+ const stderr = await execa.stderr(LERNA_BIN, [\"init\"], { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\nconst [packageJson, lernaJson] = await loadMetaData(cwd);\n@@ -29,7 +27,7 @@ describe(\"lerna init\", () => {\ntest(\"updates existing metadata\", async () => {\nconst cwd = await initFixture(\"InitCommand/updates\");\n- const { stderr } = await execa(LERNA_BIN, [\"init\", \"--exact\"], { cwd });\n+ const stderr = await execa.stderr(LERNA_BIN, [\"init\", \"--exact\"], { cwd });\nexpect(stderr).toMatchSnapshot(\"stderr\");\nconst [packageJson, lernaJson] = await loadMetaData(cwd);\n",
"new_path": "test/integration/lerna-init.test.js",
"old_path": "test/integration/lerna-init.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: shuffle around integration test assertion helpers
| 1
|
chore
| null |
807,849
|
16.02.2018 17:28:25
| 28,800
|
57e9ac2d45991524ef6b7bf609d7cdc2584889d4
|
chore: remove irrelevant npm scripts in integration fixtures
|
[
{
"change_type": "MODIFY",
"diff": "{\n\"name\": \"integration\",\n- \"private\": true,\n- \"scripts\": {\n- \"clean\": \"lerna clean --yes\"\n- },\n- \"devDependencies\": {\n- \"lerna\": \"__TEST_PKG_URL__\"\n- }\n+ \"private\": true\n}\n",
"new_path": "test/fixtures/CleanCommand/integration/package.json",
"old_path": "test/fixtures/CleanCommand/integration/package.json"
},
{
"change_type": "MODIFY",
"diff": "{\n\"name\": \"integration\",\n\"version\": \"0.0.0\",\n- \"private\": true,\n- \"scripts\": {\n- \"lp\": \"lerna publish --skip-npm --cd-version=major --yes\"\n- },\n- \"devDependencies\": {\n- \"lerna\": \"__TEST_PKG_URL__\"\n- }\n+ \"private\": true\n}\n",
"new_path": "test/fixtures/PublishCommand/integration/package.json",
"old_path": "test/fixtures/PublishCommand/integration/package.json"
},
{
"change_type": "MODIFY",
"diff": "{\n\"name\": \"integration-lifecycle\",\n\"version\": \"1.0.0\",\n- \"private\": true,\n- \"scripts\": {\n- \"test\": \"lerna run test --scope=package-1\"\n- },\n- \"devDependencies\": {\n- \"lerna\": \"__TEST_PKG_URL__\"\n- }\n+ \"private\": true\n}\n",
"new_path": "test/fixtures/RunCommand/integration-lifecycle/package.json",
"old_path": "test/fixtures/RunCommand/integration-lifecycle/package.json"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: remove irrelevant npm scripts in integration fixtures
| 1
|
chore
| null |
807,849
|
16.02.2018 17:29:23
| 28,800
|
c13ec55094bdda10b81761e80c03ea5644b87bf6
|
chore: stop interpolating unused file url to packed tarball
|
[
{
"change_type": "MODIFY",
"diff": "@@ -15,7 +15,7 @@ const callsBack = require(\"./helpers/callsBack\");\nconst initFixture = require(\"./helpers/initFixture\");\nconst loggingOutput = require(\"./helpers/loggingOutput\");\nconst updateLernaConfig = require(\"./helpers/updateLernaConfig\");\n-const LERNA_VERSION = require(\"../package.json\").version;\n+const { LERNA_VERSION } = require(\"./helpers/constants\");\n// file under test\nconst Command = require(\"../src/Command\");\n",
"new_path": "test/Command.js",
"old_path": "test/Command.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\nconst path = require(\"path\");\n-const fileUrl = require(\"file-url\");\nconst pkg = require(\"../../package.json\");\n-const LERNA_ROOTDIR = path.resolve(__dirname, \"../..\");\n-const LERNA_PKG_TGZ = path.resolve(LERNA_ROOTDIR, `lerna-${pkg.version}.tgz`);\n-\n/**\n* Shared constants for tests\n*/\nexports.LERNA_VERSION = pkg.version;\n-exports.LERNA_PKG_URL = fileUrl(LERNA_PKG_TGZ);\n-exports.LERNA_BIN = path.resolve(LERNA_ROOTDIR, pkg.bin.lerna);\n+exports.LERNA_BIN = path.resolve(__dirname, \"../..\", pkg.bin.lerna);\n-// placeholders used in fixture JSON files, replaced during tests\n+// placeholders for serializing snapshots\nexports.__TEST_VERSION__ = \"__TEST_VERSION__\";\n-exports.__TEST_PKG_URL__ = \"__TEST_PKG_URL__\";\n",
"new_path": "test/helpers/constants.js",
"old_path": "test/helpers/constants.js"
},
{
"change_type": "MODIFY",
"diff": "const fs = require(\"fs-extra\");\nconst path = require(\"path\");\n-const globby = require(\"globby\");\n-const constants = require(\"./constants\");\nmodule.exports = copyFixture;\nasync function copyFixture(targetDir, fixturePath) {\nconst fixtureDir = path.resolve(__dirname, `../fixtures/${fixturePath}`);\nawait fs.copy(fixtureDir, targetDir);\n-\n- const jsonFiles = await globby([\"./package.json\", \"**/lerna.json\"], { cwd: targetDir, absolute: true });\n- await Promise.all(jsonFiles.map(fileName => transform(fileName)));\n-}\n-\n-/**\n- * During fixture copy, replace \"__TEST_PKG_URL__\" with the generated file-url.\n- * This is primarily for integration tests, but doesn't hurt unit tests.\n- *\n- * @param {String} fileName source path of file being copied\n- */\n-async function transform(fileName) {\n- const original = await fs.readFile(fileName, \"utf8\");\n- const filtered = original.replace(constants.__TEST_PKG_URL__, constants.LERNA_PKG_URL);\n-\n- if (original !== filtered) {\n- await fs.writeFile(fileName, filtered, \"utf8\");\n- }\n}\n",
"new_path": "test/helpers/copyFixture.js",
"old_path": "test/helpers/copyFixture.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: stop interpolating unused file url to packed tarball
| 1
|
chore
| null |
807,849
|
16.02.2018 17:29:53
| 28,800
|
2c8b950084f4662c530e016ca4b3fc75a522218a
|
test: remove preintegration lifecycle script
|
[
{
"change_type": "MODIFY",
"diff": "\"scripts\": {\n\"ci\": \"npm test -- --coverage --verbose && npm run integration\",\n\"fix\": \"npm run lint -- --fix\",\n- \"preintegration\": \"npm pack\",\n\"integration\": \"cross-env LC_ALL=en-US NODE_ENV=test jest --config jest.integration.js\",\n\"lint\": \"eslint . --ignore-path .gitignore --cache\",\n\"pretest\": \"npm run lint\",\n",
"new_path": "package.json",
"old_path": "package.json"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
test: remove preintegration lifecycle script
| 1
|
test
| null |
807,849
|
16.02.2018 17:30:56
| 28,800
|
62eb5e7f8b445984d37fd2afbf385cc65e0366d7
|
chore: remove irrelevant eslint overrides
|
[
{
"change_type": "MODIFY",
"diff": "-/* eslint node/no-unsupported-features: [\"error\", { version: 4 }] */\n-// this file is not transpiled by Jest when required in serializePlaceholders.js\n-\n\"use strict\";\nconst path = require(\"path\");\n",
"new_path": "test/helpers/constants.js",
"old_path": "test/helpers/constants.js"
},
{
"change_type": "MODIFY",
"diff": "-/* eslint node/no-unsupported-features: [\"error\", { version: 4 }] */\n-// this file is not transpiled by Jest when configured in \"snapshotSerializers\"\n-\n\"use strict\";\nconst normalizeNewline = require(\"normalize-newline\");\n",
"new_path": "test/helpers/serializePlaceholders.js",
"old_path": "test/helpers/serializePlaceholders.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: remove irrelevant eslint overrides
| 1
|
chore
| null |
807,849
|
16.02.2018 17:45:04
| 28,800
|
f0a2a6b474dad43533225a706e28fa3ca973a4e0
|
chore: don't use mocks in integration tests
|
[
{
"change_type": "MODIFY",
"diff": "// Jest Snapshot v1, https://goo.gl/fbAQLP\nexports[`lerna publish fixed mode --conventional-commits recommends versions for each publish 1`] = `\n+Array [\nChanges:\n- package-1: 1.0.0 => 1.0.1\n- package-2: 1.0.0 => 1.0.1\n-\n+,\nChanges:\n- package-1: 1.0.1 => 1.1.0\n-\n+,\nChanges:\n- package-2: 1.1.0 => 1.2.0\n-\n+,\nChanges:\n- package-1: 1.2.0 => 2.0.0\n- package-2: 1.2.0 => 2.0.0\n-\n+,\n+]\n`;\nexports[`lerna publish independent mode --conventional-commits changelog 1`] = `\n",
"new_path": "test/integration/__snapshots__/lerna-publish.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-publish.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -10,8 +10,7 @@ const tempWrite = require(\"temp-write\");\nconst path = require(\"path\");\nconst os = require(\"os\");\n-const runner = require(\"../helpers/cliRunner\");\n-const consoleOutput = require(\"../helpers/consoleOutput\");\n+const { LERNA_BIN } = require(\"../helpers/constants\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst loadPkgManifests = require(\"../helpers/loadPkgManifests\");\n@@ -27,6 +26,8 @@ expect.addSnapshotSerializer({\n},\n});\n+const runner = cwd => (...args) => execa(LERNA_BIN, args, { cwd });\n+\nconst lastCommitMessage = cwd =>\nexeca.stdout(\"git\", [\"log\", \"-1\", \"--format=%B\"], { cwd }).then(normalizeNewline);\n@@ -59,9 +60,6 @@ describe(\"lerna publish\", () => {\nif (process.cwd() !== currentDirectory) {\nprocess.chdir(currentDirectory);\n}\n-\n- // consoleOutput creates a mock\n- jest.clearAllMocks();\n});\ntest(\"exit 0 when no updates\", async () => {\n@@ -70,18 +68,18 @@ describe(\"lerna publish\", () => {\nawait execa(\"git\", [\"tag\", \"-a\", \"v1.0.0\", \"-m\", \"v1.0.0\"], { cwd });\n- const { exitCode } = await runner(cwd)(...args);\n+ const { code, stdout } = await runner(cwd)(...args);\n- expect(exitCode).toBe(0);\n- expect(consoleOutput()).toBe(\"\");\n+ expect(code).toBe(0);\n+ expect(stdout).toBe(\"\");\n});\ntest(\"updates fixed versions\", async () => {\nconst cwd = await initFixture(\"PublishCommand/normal\");\nconst args = [\"publish\", \"--skip-npm\", \"--cd-version=patch\", \"--yes\"];\n- await runner(cwd)(...args);\n- expect(consoleOutput()).toMatchSnapshot(\"stdout\");\n+ const { stdout } = await runner(cwd)(...args);\n+ expect(stdout).toMatchSnapshot(\"stdout\");\nconst [allPackageJsons, commitMessage] = await Promise.all([\nloadPkgManifests(cwd),\n@@ -108,24 +106,24 @@ describe(\"lerna publish\", () => {\nconst cwd = await initFixture(\"PublishCommand/normal\");\nconst args = [\"publish\", \"--canary\", \"--skip-npm\", \"--yes\"];\n- await runner(cwd)(...args);\n- expect(consoleOutput()).toMatchSnapshot(\"stdout\");\n+ const { stdout } = await runner(cwd)(...args);\n+ expect(stdout).toMatchSnapshot(\"stdout\");\n});\ntest(\"uses meta suffix from canary flag\", async () => {\nconst cwd = await initFixture(\"PublishCommand/normal\");\nconst args = [\"publish\", \"--canary=beta\", \"--skip-npm\", \"--yes\"];\n- await runner(cwd)(...args);\n- expect(consoleOutput()).toMatchSnapshot(\"stdout\");\n+ const { stdout } = await runner(cwd)(...args);\n+ expect(stdout).toMatchSnapshot(\"stdout\");\n});\ntest(\"updates independent versions\", async () => {\nconst cwd = await initFixture(\"PublishCommand/independent\");\nconst args = [\"publish\", \"--skip-npm\", \"--cd-version=major\", \"--yes\"];\n- await runner(cwd)(...args);\n- expect(consoleOutput()).toMatchSnapshot(\"stdout\");\n+ const { stdout } = await runner(cwd)(...args);\n+ expect(stdout).toMatchSnapshot(\"stdout\");\nconst [allPackageJsons, commitMessage] = await Promise.all([\nloadPkgManifests(cwd),\n@@ -152,26 +150,26 @@ describe(\"lerna publish\", () => {\n];\n// publish patch (all)\n- await lerna(...args);\n+ const { stdout: out1 } = await lerna(...args);\nawait commitChangeToPackage(cwd, \"package-1\", \"feat: foo\", { foo: true });\n// publish minor (package-1)\n- await lerna(...args);\n+ const { stdout: out2 } = await lerna(...args);\nawait commitChangeToPackage(cwd, \"package-2\", \"feat: bar\", { bar: true });\n// publish minor (package-2)\n- await lerna(...args);\n+ const { stdout: out3 } = await lerna(...args);\nawait commitChangeToPackage(cwd, \"package-2\", `fix: flip${os.EOL}${os.EOL}BREAKING CHANGE: yup`, {\nbar: false,\n});\n// publish major (force all)\n- await lerna(...args, \"--force-publish\");\n+ const { stdout: out4 } = await lerna(...args, \"--force-publish\");\n- expect(consoleOutput()).toMatchSnapshot();\n+ expect([out1, out2, out3, out4]).toMatchSnapshot();\n});\n[\"normal\", \"independent\"].forEach(flavor =>\n@@ -192,8 +190,8 @@ describe(\"lerna publish\", () => {\n// conventional-recommended-bump is incapable of accepting cwd config :P\nprocess.chdir(cwd);\n- await runner(cwd)(...args);\n- expect(consoleOutput()).toMatchSnapshot();\n+ const { stdout } = await runner(cwd)(...args);\n+ expect(stdout).toMatchSnapshot();\nconst changelogFilePaths = await globby([\"CHANGELOG.md\"], { cwd, absolute: true, matchBase: true });\nconst changelogContents = await Promise.all(changelogFilePaths.map(fp => fs.readFile(fp, \"utf8\")));\n",
"new_path": "test/integration/lerna-publish.test.js",
"old_path": "test/integration/lerna-publish.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: don't use mocks in integration tests
| 1
|
chore
| null |
807,849
|
16.02.2018 17:46:28
| 28,800
|
9dbb9d66a7bee47eafaf632b7e0633ea351b3643
|
chore: add --ci to jest executions in ci lifecycle
|
[
{
"change_type": "MODIFY",
"diff": "\"src\"\n],\n\"scripts\": {\n- \"ci\": \"npm test -- --coverage --verbose && npm run integration\",\n+ \"ci\": \"npm test -- --ci --coverage --verbose && npm run integration -- --ci\",\n\"fix\": \"npm run lint -- --fix\",\n\"integration\": \"cross-env LC_ALL=en-US NODE_ENV=test jest --config jest.integration.js\",\n\"lint\": \"eslint . --ignore-path .gitignore --cache\",\n",
"new_path": "package.json",
"old_path": "package.json"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: add --ci to jest executions in ci lifecycle
| 1
|
chore
| null |
807,849
|
16.02.2018 17:54:37
| 28,800
|
f0ca38f075a5b9d4061019df9dcfcc3df6d4373a
|
test: don't re-test conventional-recommended-bump for them
|
[
{
"change_type": "MODIFY",
"diff": "// Jest Snapshot v1, https://goo.gl/fbAQLP\n-exports[`lerna publish fixed mode --conventional-commits recommends versions for each publish 1`] = `\n-Array [\n-\n-Changes:\n- - package-1: 1.0.0 => 1.0.1\n- - package-2: 1.0.0 => 1.0.1\n-,\n-\n-Changes:\n- - package-1: 1.0.1 => 1.1.0\n-,\n-\n-Changes:\n- - package-2: 1.1.0 => 1.2.0\n-,\n-\n-Changes:\n- - package-1: 1.2.0 => 2.0.0\n- - package-2: 1.2.0 => 2.0.0\n-,\n-]\n-`;\n-\nexports[`lerna publish independent mode --conventional-commits changelog 1`] = `\nChanges:\n",
"new_path": "test/integration/__snapshots__/lerna-publish.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-publish.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -134,44 +134,6 @@ describe(\"lerna publish\", () => {\nexpect(commitMessage).toMatchSnapshot(\"commit\");\n});\n- test(\"fixed mode --conventional-commits recommends versions for each publish\", async () => {\n- const cwd = await initFixture(\"PublishCommand/normal-no-inter-dependencies\", \"chore: Init repo\");\n-\n- // conventional-recommended-bump is incapable of accepting cwd config :P\n- process.chdir(cwd);\n-\n- const lerna = runner(cwd);\n- const args = [\n- \"publish\",\n- \"--conventional-commits\",\n- // \"--skip-git\", Note: git is not skipped to ensure creating tags for each publish execution works\n- \"--skip-npm\",\n- \"--yes\",\n- ];\n-\n- // publish patch (all)\n- const { stdout: out1 } = await lerna(...args);\n-\n- await commitChangeToPackage(cwd, \"package-1\", \"feat: foo\", { foo: true });\n-\n- // publish minor (package-1)\n- const { stdout: out2 } = await lerna(...args);\n-\n- await commitChangeToPackage(cwd, \"package-2\", \"feat: bar\", { bar: true });\n-\n- // publish minor (package-2)\n- const { stdout: out3 } = await lerna(...args);\n-\n- await commitChangeToPackage(cwd, \"package-2\", `fix: flip${os.EOL}${os.EOL}BREAKING CHANGE: yup`, {\n- bar: false,\n- });\n-\n- // publish major (force all)\n- const { stdout: out4 } = await lerna(...args, \"--force-publish\");\n-\n- expect([out1, out2, out3, out4]).toMatchSnapshot();\n- });\n-\n[\"normal\", \"independent\"].forEach(flavor =>\ntest(`${flavor} mode --conventional-commits changelog`, async () => {\nconst cwd = await initFixture(`PublishCommand/${flavor}`, \"feat: init repo\");\n",
"new_path": "test/integration/lerna-publish.test.js",
"old_path": "test/integration/lerna-publish.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
test: don't re-test conventional-recommended-bump for them
| 1
|
test
| null |
807,849
|
16.02.2018 17:56:56
| 28,800
|
7f4702d2deae8972b715d2df31d277f8269666dc
|
chore: don't test yet another integration well-covered by units
|
[
{
"change_type": "MODIFY",
"diff": "@@ -470,14 +470,3 @@ Changes:\n- package-5: 1.0.0 => 1.1.0-alpha.SHA (private)\n`;\n-\n-exports[`lerna publish uses meta suffix from canary flag: stdout 1`] = `\n-\n-Changes:\n- - package-1: 1.0.0 => 1.1.0-beta.SHA\n- - package-2: 1.0.0 => 1.1.0-beta.SHA\n- - package-3: 1.0.0 => 1.1.0-beta.SHA\n- - package-4: 1.0.0 => 1.1.0-beta.SHA\n- - package-5: 1.0.0 => 1.1.0-beta.SHA (private)\n-\n-`;\n",
"new_path": "test/integration/__snapshots__/lerna-publish.test.js.snap",
"old_path": "test/integration/__snapshots__/lerna-publish.test.js.snap"
},
{
"change_type": "MODIFY",
"diff": "@@ -110,14 +110,6 @@ describe(\"lerna publish\", () => {\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n- test(\"uses meta suffix from canary flag\", async () => {\n- const cwd = await initFixture(\"PublishCommand/normal\");\n- const args = [\"publish\", \"--canary=beta\", \"--skip-npm\", \"--yes\"];\n-\n- const { stdout } = await runner(cwd)(...args);\n- expect(stdout).toMatchSnapshot(\"stdout\");\n- });\n-\ntest(\"updates independent versions\", async () => {\nconst cwd = await initFixture(\"PublishCommand/independent\");\nconst args = [\"publish\", \"--skip-npm\", \"--cd-version=major\", \"--yes\"];\n",
"new_path": "test/integration/lerna-publish.test.js",
"old_path": "test/integration/lerna-publish.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: don't test yet another integration well-covered by units
| 1
|
chore
| null |
807,849
|
16.02.2018 18:19:28
| 28,800
|
ba794660e66242bff7bbd04397c5932468d6769a
|
chore: DRY up integration tests with cli-runner
|
[
{
"change_type": "ADD",
"diff": "+\"use strict\";\n+\n+const execa = require(\"execa\");\n+const { LERNA_BIN } = require(\"./constants\");\n+\n+module.exports = runner;\n+\n+function runner(cwd, env) {\n+ const opts = {\n+ cwd,\n+ env: Object.assign({ CI: true }, env),\n+ };\n+\n+ return (...args) => execa(LERNA_BIN, args, opts);\n+}\n",
"new_path": "test/helpers/cli-runner.js",
"old_path": null
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-jest.mock(\"is-ci\", () => true);\n-\n-const log = require(\"npmlog\");\n-const cli = require(\"../../src/cli\");\n-\n-// silence logs\n-log.level = \"silent\";\n-\n-module.exports = runner;\n-\n-function runner(cwd) {\n- // create a _new_ yargs instance every time cwd changes to avoid singleton pollution\n- const instance = cli([], cwd)\n- .exitProcess(false)\n- .detectLocale(false)\n- .showHelpOnFail(false)\n- .wrap(null);\n-\n- return (...argv) =>\n- new Promise((resolve, reject) => {\n- const yargsMeta = {};\n-\n- const context = {\n- cwd,\n- onResolved: result => {\n- Object.assign(result, yargsMeta);\n- resolve(result);\n- },\n- onRejected: result => {\n- Object.assign(result, yargsMeta);\n- // tests expect errors thrown to indicate failure,\n- // _not_ just non-zero exitCode\n- reject(result);\n- },\n- };\n-\n- const parseFn = (yargsError, parsedArgv, yargsOutput) => {\n- // this is synchronous, before the async handlers resolve\n- Object.assign(yargsMeta, { parsedArgv, yargsOutput });\n- };\n-\n- // workaround wonky yargs-parser configuration not being read during tests\n- // hackDoubleDash(args, context);\n-\n- instance\n- .fail((msg, err) => {\n- // since yargs 10.1.0, this is the only way to catch handler rejection\n- // _and_ yargs validation exceptions when using async command handlers\n- const actual = err || new Error(msg);\n- // backfill exitCode for test convenience\n- yargsMeta.exitCode = \"exitCode\" in actual ? actual.exitCode : 1;\n- context.onRejected(actual);\n- })\n- .parse(argv, context, parseFn);\n- });\n-}\n",
"new_path": null,
"old_path": "test/helpers/cliRunner.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -5,7 +5,7 @@ const globby = require(\"globby\");\nconst loadJson = require(\"load-json-file\");\nconst pMap = require(\"p-map\");\n-const runner = require(\"../helpers/cliRunner\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst pkgMatchers = require(\"../helpers/pkgMatchers\");\n@@ -15,7 +15,7 @@ describe(\"lerna add\", () => {\ntest(\"add to all packages\", async () => {\nconst cwd = await initFixture(\"AddCommand/basic\");\n- await runner(cwd)(\"add\", \"@test/package-1\");\n+ await cliRunner(cwd)(\"add\", \"@test/package-1\");\nconst filePaths = await globby(\"packages/*/package.json\", { cwd });\nconst [pkg1, pkg2, pkg3, pkg4] = await pMap(filePaths, fp => loadJson(path.join(cwd, fp)));\n",
"new_path": "test/integration/lerna-add.test.js",
"old_path": "test/integration/lerna-add.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\nconst fs = require(\"fs-extra\");\nconst globby = require(\"globby\");\nconst normalizePath = require(\"normalize-path\");\nconst path = require(\"path\");\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\ndescribe(\"lerna bootstrap\", () => {\n- // the \"--silent\" flag is passed to `npm run`\n- const npmTest = cwd => execa.stdout(LERNA_BIN, [\"run\", \"test\", \"--\", \"--silent\"], { cwd });\n-\ntest(\"bootstraps all packages\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\n- const args = [\"bootstrap\"];\n+ const lerna = cliRunner(cwd);\n- const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\n+ const { stderr } = await lerna(\"bootstrap\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\n- const stdout = await npmTest(cwd);\n+ // the \"--silent\" flag is passed to `npm run`\n+ const { stdout } = await lerna(\"run\", \"test\", \"--\", \"--silent\");\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\ntest(\"--npm-client yarn\", async () => {\nconst cwd = await initFixture(\"BootstrapCommand/integration\");\n- const args = [\"bootstrap\", \"--npm-client\", \"yarn\"];\n+ const lerna = cliRunner(cwd);\n- const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\n+ const { stderr } = await lerna(\"bootstrap\", \"--npm-client\", \"yarn\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\nconst lockfiles = await globby([\"package-*/yarn.lock\"], { cwd });\n@@ -38,7 +35,8 @@ describe(\"lerna bootstrap\", () => {\n\"package-3/yarn.lock\",\n]);\n- const stdout = await npmTest(cwd);\n+ // the \"--silent\" flag is passed to `npm run`\n+ const { stdout } = await lerna(\"run\", \"test\", \"--\", \"--silent\");\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n@@ -46,7 +44,7 @@ describe(\"lerna bootstrap\", () => {\nconst cwd = await initFixture(\"BootstrapCommand/npm-client-args-1\");\nconst args = [\"bootstrap\", \"--npm-client\", path.resolve(cwd, \"npm\"), \"--\", \"--no-optional\"];\n- await execa(LERNA_BIN, args, { cwd });\n+ await cliRunner(cwd)(...args);\nconst npmDebugLog = await fs.readFile(path.resolve(cwd, \"npm-debug.log\"), \"utf8\");\nexpect(npmDebugLog.split(\",\")).toEqual([\"install\", \"--no-optional\"]);\n@@ -56,7 +54,7 @@ describe(\"lerna bootstrap\", () => {\nconst cwd = await initFixture(\"BootstrapCommand/npm-client-args-2\");\nconst args = [\"bootstrap\", \"--npm-client\", path.resolve(cwd, \"npm\"), \"--\", \"--no-optional\"];\n- await execa(LERNA_BIN, args, { cwd });\n+ await cliRunner(cwd)(...args);\nconst npmDebugLog = await fs.readFile(path.resolve(cwd, \"npm-debug.log\"), \"utf8\");\nexpect(npmDebugLog.split(\",\")).toEqual([\"install\", \"--production\", \"--no-optional\"]);\n",
"new_path": "test/integration/lerna-bootstrap.test.js",
"old_path": "test/integration/lerna-bootstrap.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\nconst globby = require(\"globby\");\nconst normalizePath = require(\"normalize-path\");\nconst path = require(\"path\");\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\ndescribe(\"lerna clean\", () => {\n@@ -23,7 +22,7 @@ describe(\"lerna clean\", () => {\nconst cwd = await initFixture(\"CleanCommand/basic\");\nconst args = [\"clean\", \"--yes\", \"--concurrency=1\"];\n- const stderr = await execa.stderr(LERNA_BIN, args, { cwd });\n+ const { stderr } = await cliRunner(cwd)(...args);\nexpect(normalizeLog(cwd)(stderr)).toMatchSnapshot(\"stderr\");\nconst found = await globby([\"package-*/node_modules\"], { cwd });\n",
"new_path": "test/integration/lerna-clean.test.js",
"old_path": "test/integration/lerna-clean.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\n-\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst initExecTest = require(\"../helpers/initExecTest\");\n@@ -22,7 +20,7 @@ describe(\"lerna exec\", () => {\n\"-1\",\n];\n- const { stdout } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout } = await cliRunner(cwd, env)(...args);\nexpect(stdout).toMatchSnapshot();\n});\n@@ -36,7 +34,7 @@ describe(\"lerna exec\", () => {\n// no args to exec-test\n];\n- const { stdout } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout } = await cliRunner(cwd, env)(...args);\nexpect(stdout).toMatchSnapshot();\n});\n@@ -49,7 +47,7 @@ describe(\"lerna exec\", () => {\nprocess.platform === \"win32\" ? \"%LERNA_PACKAGE_NAME%\" : \"$LERNA_PACKAGE_NAME\",\n];\n- const { stdout } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot();\n});\n@@ -63,7 +61,7 @@ describe(\"lerna exec\", () => {\n\"-C\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout, stderr } = await cliRunner(cwd, env)(...args);\nexpect(stderr).toMatch(EXEC_TEST_COMMAND);\n// order is non-deterministic, so assert individually\n@@ -83,7 +81,7 @@ describe(\"lerna exec\", () => {\n\"-C\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout, stderr } = await cliRunner(cwd, env)(...args);\nexpect(stderr).toMatch(EXEC_TEST_COMMAND);\n// order is non-deterministic, so assert individually\n@@ -97,7 +95,7 @@ describe(\"lerna exec\", () => {\nconst cwd = await initFixture(\"ExecCommand/basic\");\nconst args = [\"exec\", EXEC_TEST_COMMAND, \"--stream\", \"-C\"];\n- const { stdout } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout } = await cliRunner(cwd, env)(...args);\n// order is non-deterministic, so assert individually\nexpect(stdout).toMatch(\"package-1: file-1.js\");\n@@ -110,7 +108,7 @@ describe(\"lerna exec\", () => {\nconst cwd = await initFixture(\"ExecCommand/basic\");\nconst args = [\"exec\", \"--stream\", EXEC_TEST_COMMAND, \"-C\"];\n- const { stdout } = await execa(LERNA_BIN, args, { cwd, env });\n+ const { stdout } = await cliRunner(cwd, env)(...args);\n// order is non-deterministic, so assert individually\nexpect(stdout).toMatch(\"package-1: file-1.js\");\n@@ -123,7 +121,7 @@ describe(\"lerna exec\", () => {\nconst cwd = await initFixture(\"ExecCommand/basic\");\nconst args = [\"exec\", \"--bail=false\", \"--concurrency=1\", \"--\", \"npm\", \"run\", \"fail-or-succeed\"];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stderr).toMatch(\"Failed at the package-1@1.0.0 fail-or-succeed script\");\nexpect(stdout).toMatch(\"failure!\");\nexpect(stdout).toMatch(\"success!\");\n@@ -133,7 +131,7 @@ describe(\"lerna exec\", () => {\nconst cwd = await initFixture(\"ExecCommand/basic\");\nconst args = [\"exec\", \"--no-bail\", \"--concurrency=1\", \"--\", \"npm\", \"run\", \"fail-or-succeed\"];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stderr).toMatch(\"Failed at the package-1@1.0.0 fail-or-succeed script\");\nexpect(stdout).toMatch(\"failure!\");\nexpect(stdout).toMatch(\"success!\");\n",
"new_path": "test/integration/lerna-exec.test.js",
"old_path": "test/integration/lerna-exec.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\n-\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst loadPkgManifests = require(\"../helpers/loadPkgManifests\");\n@@ -13,9 +11,7 @@ describe(\"lerna import\", () => {\ninitFixture(\"ImportCommand/basic\"),\n]);\n- const args = [\"import\", externalPath, \"--yes\"];\n-\n- await execa(LERNA_BIN, args, { cwd });\n+ await cliRunner(cwd)(\"import\", externalPath, \"--yes\");\nconst allPackageJsons = await loadPkgManifests(cwd);\nexpect(allPackageJsons).toMatchSnapshot();\n",
"new_path": "test/integration/lerna-import.test.js",
"old_path": "test/integration/lerna-import.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\nconst loadJsonFile = require(\"load-json-file\");\nconst path = require(\"path\");\nconst tempy = require(\"tempy\");\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\ndescribe(\"lerna init\", () => {\n@@ -16,7 +15,7 @@ describe(\"lerna init\", () => {\ntest(\"initializes empty directory\", async () => {\nconst cwd = tempy.directory();\n- const stderr = await execa.stderr(LERNA_BIN, [\"init\"], { cwd });\n+ const { stderr } = await cliRunner(cwd)(\"init\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\nconst [packageJson, lernaJson] = await loadMetaData(cwd);\n@@ -27,7 +26,7 @@ describe(\"lerna init\", () => {\ntest(\"updates existing metadata\", async () => {\nconst cwd = await initFixture(\"InitCommand/updates\");\n- const stderr = await execa.stderr(LERNA_BIN, [\"init\", \"--exact\"], { cwd });\n+ const { stderr } = await cliRunner(cwd)(\"init\", \"--exact\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\nconst [packageJson, lernaJson] = await loadMetaData(cwd);\n",
"new_path": "test/integration/lerna-init.test.js",
"old_path": "test/integration/lerna-init.test.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -10,7 +10,7 @@ const tempWrite = require(\"temp-write\");\nconst path = require(\"path\");\nconst os = require(\"os\");\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\nconst loadPkgManifests = require(\"../helpers/loadPkgManifests\");\n@@ -26,8 +26,6 @@ expect.addSnapshotSerializer({\n},\n});\n-const runner = cwd => (...args) => execa(LERNA_BIN, args, { cwd });\n-\nconst lastCommitMessage = cwd =>\nexeca.stdout(\"git\", [\"log\", \"-1\", \"--format=%B\"], { cwd }).then(normalizeNewline);\n@@ -68,7 +66,7 @@ describe(\"lerna publish\", () => {\nawait execa(\"git\", [\"tag\", \"-a\", \"v1.0.0\", \"-m\", \"v1.0.0\"], { cwd });\n- const { code, stdout } = await runner(cwd)(...args);\n+ const { code, stdout } = await cliRunner(cwd)(...args);\nexpect(code).toBe(0);\nexpect(stdout).toBe(\"\");\n@@ -78,7 +76,7 @@ describe(\"lerna publish\", () => {\nconst cwd = await initFixture(\"PublishCommand/normal\");\nconst args = [\"publish\", \"--skip-npm\", \"--cd-version=patch\", \"--yes\"];\n- const { stdout } = await runner(cwd)(...args);\n+ const { stdout } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot(\"stdout\");\nconst [allPackageJsons, commitMessage] = await Promise.all([\n@@ -97,7 +95,7 @@ describe(\"lerna publish\", () => {\nawait execa(\"git\", [\"tag\", \"v1.0.0\", \"-m\", \"v1.0.0\"], { cwd });\nawait commitChangeToPackage(cwd, \"package-1\", \"change\", { change: true });\n- await runner(cwd)(...args);\n+ await cliRunner(cwd)(...args);\nexpect(await loadPkgManifests(cwd)).toMatchSnapshot();\n});\n@@ -106,7 +104,7 @@ describe(\"lerna publish\", () => {\nconst cwd = await initFixture(\"PublishCommand/normal\");\nconst args = [\"publish\", \"--canary\", \"--skip-npm\", \"--yes\"];\n- const { stdout } = await runner(cwd)(...args);\n+ const { stdout } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot(\"stdout\");\n});\n@@ -114,7 +112,7 @@ describe(\"lerna publish\", () => {\nconst cwd = await initFixture(\"PublishCommand/independent\");\nconst args = [\"publish\", \"--skip-npm\", \"--cd-version=major\", \"--yes\"];\n- const { stdout } = await runner(cwd)(...args);\n+ const { stdout } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot(\"stdout\");\nconst [allPackageJsons, commitMessage] = await Promise.all([\n@@ -144,7 +142,7 @@ describe(\"lerna publish\", () => {\n// conventional-recommended-bump is incapable of accepting cwd config :P\nprocess.chdir(cwd);\n- const { stdout } = await runner(cwd)(...args);\n+ const { stdout } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot();\nconst changelogFilePaths = await globby([\"CHANGELOG.md\"], { cwd, absolute: true, matchBase: true });\n@@ -160,7 +158,7 @@ describe(\"lerna publish\", () => {\nawait execa(\"git\", [\"tag\", \"v1.0.0\", \"-m\", \"v1.0.0\"], { cwd });\nawait commitChangeToPackage(cwd, \"package-1\", \"feat(package-1): Add foo\", { foo: true });\n- await runner(cwd)(\"publish\", \"--cd-version=major\", \"--skip-npm\", \"--yes\");\n+ await cliRunner(cwd)(\"publish\", \"--cd-version=major\", \"--skip-npm\", \"--yes\");\nexpect(\nawait execa.stdout(\"git\", [\"show\", \"--unified=0\", \"--ignore-space-at-eol\", \"--format=%s\"], { cwd })\n",
"new_path": "test/integration/lerna-publish.test.js",
"old_path": "test/integration/lerna-publish.test.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const execa = require(\"execa\");\n-\n-const { LERNA_BIN } = require(\"../helpers/constants\");\n+const cliRunner = require(\"../helpers/cli-runner\");\nconst initFixture = require(\"../helpers/initFixture\");\n/**\n@@ -21,7 +19,7 @@ describe(\"lerna run\", () => {\n\"--\",\n\"--silent\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stdout).toBe(\"package-1\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\n});\n@@ -38,7 +36,7 @@ describe(\"lerna run\", () => {\n\"--\",\n\"--silent\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stdout).toBe(\"package-4\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\n});\n@@ -54,7 +52,7 @@ describe(\"lerna run\", () => {\n\"--\",\n\"--silent\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stdout).toMatchSnapshot(\"stdout\");\nexpect(stderr).toMatchSnapshot(\"stderr\");\n});\n@@ -69,7 +67,7 @@ describe(\"lerna run\", () => {\n\"--\",\n\"--silent\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stderr).toMatchSnapshot(\"stderr\");\n// order is non-deterministic, so assert each item seperately\n@@ -89,7 +87,7 @@ describe(\"lerna run\", () => {\n\"--\",\n\"--silent\",\n];\n- const { stdout, stderr } = await execa(LERNA_BIN, args, { cwd });\n+ const { stdout, stderr } = await cliRunner(cwd)(...args);\nexpect(stderr).toMatchSnapshot(\"stderr\");\n// order is non-deterministic, so assert each item seperately\n",
"new_path": "test/integration/lerna-run.test.js",
"old_path": "test/integration/lerna-run.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: DRY up integration tests with cli-runner
| 1
|
chore
| null |
815,745
|
16.02.2018 21:05:18
| -7,200
|
f94f4e6a0225b4a076cf38a5caffde19c1a5c7c3
|
fix: IE styling issues
|
[
{
"change_type": "MODIFY",
"diff": "@@ -62,6 +62,7 @@ $color-selected: #f5faff;\nleft: 0;\npadding-left: 10px;\npadding-right: 50px;\n+ top: 5px;\n}\n}\n}\n@@ -112,6 +113,7 @@ $color-selected: #f5faff;\npadding-left: 3px;\n}\n.ng-placeholder {\n+ top: 5px;\npadding-bottom: 5px;\npadding-left: 3px;\n}\n",
"new_path": "src/themes/default.theme.scss",
"old_path": "src/themes/default.theme.scss"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
fix: IE styling issues
| 1
|
fix
| null |
679,913
|
17.02.2018 01:12:42
| 0
|
6f6e7e5be3cc7879285e0ca9703005a642f296be
|
feat(atom): add getIn/setIn/updateIn
|
[
{
"change_type": "MODIFY",
"diff": "import { isArray } from \"@thi.ng/checks/is-array\";\nimport { isString } from \"@thi.ng/checks/is-string\";\n+import { SwapFn } from \"./api\";\n+\nfunction compS(k, f) {\nreturn (s, v) => ({ ...s, [k]: f((s || {})[k], v) });\n}\n@@ -91,3 +93,52 @@ export function setter(path: PropertyKey | PropertyKey[]) {\n}\nreturn f;\n}\n+\n+/**\n+ * Immediate use getter, i.e. same as: `getter(path)(state)`.\n+ *\n+ * ```\n+ * getIn({a: {b: {c: 23}}}, \"a.b.c\");\n+ * // 23\n+ * ```\n+ *\n+ * @param state\n+ * @param path\n+ */\n+export function getIn(state: any, path: PropertyKey | PropertyKey[]) {\n+ return getter(path)(state);\n+}\n+\n+/**\n+ * Immediate use setter, i.e. same as: `setter(path)(state, val)`.\n+ *\n+ * ```\n+ * setIn({}, \"a.b.c\", 23);\n+ * // {a: {b: {c: 23}}}\n+ * ```\n+ *\n+ * @param state\n+ * @param path\n+ */\n+export function setIn(state: any, path: PropertyKey | PropertyKey[], val: any) {\n+ return setter(path)(state, val);\n+}\n+\n+/**\n+ * Similar to `setIn()`, but applies given function to current path\n+ * value (incl. any additional/optional arguments passed to `updateIn`)\n+ * and uses result as new value. Does not modify original state (unless\n+ * given function does so itself).\n+ *\n+ * ```\n+ * add = (x, y) => x + y;\n+ * updateIn({a: {b: {c: 23}}}, \"a.b.c\", add, 10);\n+ * // {a: {b: {c: 33}}}\n+ * ```\n+ *\n+ * @param state\n+ * @param path\n+ */\n+export function updateIn(state: any, path: PropertyKey | PropertyKey[], fn: SwapFn<any>, ...args: any[]) {\n+ return setter(path)(state, fn.apply(null, [getter(path)(state), ...args]));\n+}\n",
"new_path": "packages/atom/src/path.ts",
"old_path": "packages/atom/src/path.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(atom): add getIn/setIn/updateIn
| 1
|
feat
|
atom
|
679,913
|
17.02.2018 14:28:12
| 0
|
398c32af1cac98db5db7623ba6af10767ef697b9
|
refactor(atom): don't mutate getter/setter path args, update updateIn
|
[
{
"change_type": "MODIFY",
"diff": "@@ -34,9 +34,9 @@ function compG(k, f) {\n*/\nexport function getter(path: PropertyKey | PropertyKey[]) {\nconst ks = isArray(path) ? path : isString(path) ? path.split(\".\") : [path];\n- const kl = ks.pop();\n+ const kl = ks[ks.length - 1];\nlet f = (s) => s ? s[kl] : undefined;\n- for (let i = ks.length - 1; i >= 0; i--) {\n+ for (let i = ks.length - 2; i >= 0; i--) {\nf = compG(ks[i], f);\n}\nreturn f;\n@@ -86,9 +86,9 @@ export function getter(path: PropertyKey | PropertyKey[]) {\n*/\nexport function setter(path: PropertyKey | PropertyKey[]) {\nconst ks = isArray(path) ? path : isString(path) ? path.split(\".\") : [path];\n- const kl = ks.pop();\n+ const kl = ks[ks.length - 1];\nlet f = (s, v) => ({ ...(s || {}), [kl]: v });\n- for (let i = ks.length - 1; i >= 0; i--) {\n+ for (let i = ks.length - 2; i >= 0; i--) {\nf = compS(ks[i], f);\n}\nreturn f;\n@@ -140,5 +140,6 @@ export function setIn(state: any, path: PropertyKey | PropertyKey[], val: any) {\n* @param path\n*/\nexport function updateIn(state: any, path: PropertyKey | PropertyKey[], fn: SwapFn<any>, ...args: any[]) {\n- return setter(path)(state, fn.apply(null, [getter(path)(state), ...args]));\n+ args.unshift(getIn(state, path));\n+ return setter(path)(state, fn.apply(null, args));\n}\n",
"new_path": "packages/atom/src/path.ts",
"old_path": "packages/atom/src/path.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(atom): don't mutate getter/setter path args, update updateIn
| 1
|
refactor
|
atom
|
679,913
|
17.02.2018 14:30:00
| 0
|
b5ee8e45e76c760cb2d746888fe8b248c8175350
|
refactor(atom): use as default History predicate
|
[
{
"change_type": "MODIFY",
"diff": "import { Predicate2, Watch } from \"@thi.ng/api/api\";\n+import { equiv } from \"@thi.ng/api/equiv\";\nimport { IAtom, SwapFn } from \"./api\";\n@@ -22,12 +23,12 @@ export class History<T> implements\n/**\n* @param state parent state\n* @param maxLen max size of undo stack\n- * @param changed predicate to determine changed values (default `!==`)\n+ * @param changed predicate to determine changed values (default `!equiv(a,b)`)\n*/\nconstructor(state: IAtom<T>, maxLen = 100, changed?: Predicate2<T>) {\nthis.state = state;\nthis.maxLen = maxLen;\n- this.changed = changed || ((a, b) => a !== b);\n+ this.changed = changed || ((a, b) => !equiv(a, b));\nthis.clear();\n}\n",
"new_path": "packages/atom/src/history.ts",
"old_path": "packages/atom/src/history.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(atom): use @thi.ng/api/equiv as default History predicate
| 1
|
refactor
|
atom
|
679,913
|
17.02.2018 14:31:14
| 0
|
7d50c1ca27402c883d4a376e456f07a3c42d2cb0
|
refactor(examples): simplify todo atom handling
|
[
{
"change_type": "MODIFY",
"diff": "*/\nimport { IObjectOf } from \"@thi.ng/api/api\";\n-import { equiv } from \"@thi.ng/api/equiv\";\nimport * as atom from \"@thi.ng/atom\";\nimport { start } from \"@thi.ng/hiccup-dom/start\";\nimport { iterator } from \"@thi.ng/transducers/iterator\";\n@@ -25,16 +24,16 @@ interface Task {\n// central app state (immutable)\nconst db = new atom.Atom({ tasks: {}, nextID: 0 });\n// attach undo/redo history for `tasks` branch (arbitrary undo limit of 100 steps)\n-const tasks = new atom.History<IObjectOf<Task>>(new atom.Cursor(db, \"tasks\"), 100, (a, b) => !equiv(a, b));\n+const tasks = new atom.History<IObjectOf<Task>>(new atom.Cursor(db, \"tasks\"), 100);\n// cursor for direct access to `nextID`\nconst nextID = new atom.Cursor<number>(db, \"nextID\");\n// state updaters\n// each applies it's updates via the history atom wrapper\n// the `atom.setter` calls produce an immutable update function for given paths\n-const addNewTask = () => tasks.swap((s) => atom.setter(nextID.swap((id) => id + 1))(s, { body: \"\", done: false }));\n-const toggleTask = (id) => tasks.swap((tasks) => atom.setter([id, \"done\"])(tasks, !tasks[id].done));\n-const updateTask = (id, body) => tasks.swap((tasks) => atom.setter([id, \"body\"])(tasks, body));\n+const addNewTask = () => tasks.swap((tasks) => atom.setIn(tasks, nextID.swap((id) => id + 1), { body: \"\", done: false }));\n+const toggleTask = (id) => tasks.swap((tasks) => atom.updateIn(tasks, [id, \"done\"], done => !done));\n+const updateTask = (id, body) => tasks.swap((tasks) => atom.setIn(tasks, [id, \"body\"], body));\n// single task component\n// the text field uses lifecycle hooks to set keyboard focus for new tasks\n",
"new_path": "examples/todo-list/src/index.ts",
"old_path": "examples/todo-list/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(examples): simplify todo atom handling
| 1
|
refactor
|
examples
|
679,913
|
17.02.2018 22:43:18
| 0
|
fbc819edb027fa01512e19be8d54187cd69e6f14
|
fix(atom): empty path handling getter/setter
|
[
{
"change_type": "MODIFY",
"diff": "@@ -11,6 +11,10 @@ function compG(k, f) {\nreturn (s) => s ? f(s[k]) : undefined;\n}\n+function toPath(path: PropertyKey | PropertyKey[]) {\n+ return isArray(path) ? path : isString(path) ? path.split(\".\") : [path];\n+}\n+\n/**\n* Composes a getter function for given nested lookup path.\n* If `path` is given as string, it will be split using `.`.\n@@ -20,6 +24,11 @@ function compG(k, f) {\n* If any intermediate key is not present in the given obj,\n* descent stops and the function returns `undefined`.\n*\n+ * If `path` is an empty array, the returned getter will simply\n+ * return the given state arg (identity function).\n+ *\n+ * Also see: `getIn()`\n+ *\n* ```\n* g = getter(\"a.b.c\");\n* // or\n@@ -33,7 +42,8 @@ function compG(k, f) {\n* @param path\n*/\nexport function getter(path: PropertyKey | PropertyKey[]) {\n- const ks = isArray(path) ? path : isString(path) ? path.split(\".\") : [path];\n+ const ks = toPath(path);\n+ if (ks.length > 0) {\nconst kl = ks[ks.length - 1];\nlet f = (s) => s ? s[kl] : undefined;\nfor (let i = ks.length - 2; i >= 0; i--) {\n@@ -41,6 +51,8 @@ export function getter(path: PropertyKey | PropertyKey[]) {\n}\nreturn f;\n}\n+ return (s) => s;\n+}\n/**\n* Composes a setter function for given nested lookup path.\n@@ -52,6 +64,11 @@ export function getter(path: PropertyKey | PropertyKey[]) {\n* If any intermediate key is not present in the given obj,\n* creates a plain empty object for that key and descends further.\n*\n+ * If `path` is an empty array, the returned setter will simply\n+ * return the new value.\n+ *\n+ * Also see: `setIn()`, `updateIn()`, `deleteIn()`\n+ *\n* ```\n* s = setter(\"a.b.c\");\n* // or\n@@ -85,7 +102,8 @@ export function getter(path: PropertyKey | PropertyKey[]) {\n* @param path\n*/\nexport function setter(path: PropertyKey | PropertyKey[]) {\n- const ks = isArray(path) ? path : isString(path) ? path.split(\".\") : [path];\n+ const ks = toPath(path);\n+ if (ks.length > 0) {\nconst kl = ks[ks.length - 1];\nlet f = (s, v) => ({ ...(s || {}), [kl]: v });\nfor (let i = ks.length - 2; i >= 0; i--) {\n@@ -93,6 +111,8 @@ export function setter(path: PropertyKey | PropertyKey[]) {\n}\nreturn f;\n}\n+ return (_, v) => v;\n+}\n/**\n* Immediate use getter, i.e. same as: `getter(path)(state)`.\n@@ -143,3 +163,4 @@ export function updateIn(state: any, path: PropertyKey | PropertyKey[], fn: Swap\nargs.unshift(getIn(state, path));\nreturn setter(path)(state, fn.apply(null, args));\n}\n+\n",
"new_path": "packages/atom/src/path.ts",
"old_path": "packages/atom/src/path.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
fix(atom): empty path handling getter/setter
| 1
|
fix
|
atom
|
679,913
|
17.02.2018 22:43:53
| 0
|
b593a9b903f71095c49d76dc18e8bf2e4a5371b5
|
feat(atom): add deleteIn()
|
[
{
"change_type": "MODIFY",
"diff": "@@ -164,3 +164,24 @@ export function updateIn(state: any, path: PropertyKey | PropertyKey[], fn: Swap\nreturn setter(path)(state, fn.apply(null, args));\n}\n+/**\n+ * Uses `updateIn()` and returns updated state with key for given path removed.\n+ * Does not modify original state.\n+ *\n+ * Returns `undefined` if `path` is an empty array.\n+ *\n+ * ```\n+ * deleteIn({a:{b:{c: 23}}}, \"a.b.c\");\n+ * // {a: {b: {}}}\n+ * ```\n+ *\n+ * @param state\n+ * @param path\n+ */\n+export function deleteIn(state: any, path: PropertyKey | PropertyKey[]) {\n+ const ks = [...toPath(path)];\n+ if (ks.length > 0) {\n+ const k = ks.pop();\n+ return updateIn(state, ks, (x) => { x = { ...x }; delete x[k]; return x; });\n+ }\n+}\n",
"new_path": "packages/atom/src/path.ts",
"old_path": "packages/atom/src/path.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(atom): add deleteIn()
| 1
|
feat
|
atom
|
679,913
|
17.02.2018 23:02:36
| 0
|
2e34f389ac455be5f42a1cd616c3e5e872b431a7
|
refactor(atom): update history reset/swap, record before applying val
|
[
{
"change_type": "MODIFY",
"diff": "@@ -85,8 +85,8 @@ export class History<T> implements\n*/\nreset(val: T) {\nconst prev = this.state.deref();\n- this.state.reset(val);\nthis.changed(prev, val) && this.record(prev);\n+ this.state.reset(val);\nreturn val;\n}\n@@ -98,10 +98,7 @@ export class History<T> implements\n* @param val\n*/\nswap(fn: SwapFn<T>, ...args: any[]): T {\n- const prev = this.state.deref();\n- const curr = this.state.swap.apply(this.state, [fn, ...args]);\n- this.changed(prev, curr) && this.record(prev);\n- return curr;\n+ return this.reset(fn.apply(null, [this.state.deref(), ...args]));\n}\n/**\n@@ -119,6 +116,8 @@ export class History<T> implements\nif (this.history.length >= this.maxLen) {\nthis.history.shift();\n}\n+ // check for arg given and not if `state == null`\n+ // we want to allow null/undefined as possible values\nthis.history.push(arguments.length > 0 ? state : this.state.deref());\nthis.future.length = 0;\n}\n",
"new_path": "packages/atom/src/history.ts",
"old_path": "packages/atom/src/history.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(atom): update history reset/swap, record before applying val
| 1
|
refactor
|
atom
|
679,913
|
18.02.2018 14:23:30
| 0
|
63a4953027381266009040f69496a344430a5c2c
|
refactor(transducers): add/update range2d/range3d arity handling
|
[
{
"change_type": "MODIFY",
"diff": "import { range } from \"./range\";\n+export function range2d(toX: number, toY: number): IterableIterator<[number, number]>;\nexport function range2d(fromX: number, toX: number, fromY: number, toY: number): IterableIterator<[number, number]>;\nexport function range2d(fromX: number, toX: number, fromY: number, toY: number, stepX: number, stepY: number): IterableIterator<[number, number]>;\nexport function* range2d(...args: number[]) {\n- let [fromX, toX, fromY, toY] = args;\n- let stepX, stepY;\n+ let fromX, toX, fromY, toY, stepX, stepY;\nswitch (args.length) {\n- case 4:\n- break;\ncase 6:\nstepX = args[4];\nstepY = args[5];\n+ case 4:\n+ [fromX, toX, fromY, toY] = args;\n+ break;\n+ case 2:\n+ [toX, toY] = args;\n+ fromX = fromY = 0;\nbreak;\ndefault:\nthrow new Error(`invalid arity: ${args.length}`);\n",
"new_path": "packages/transducers/src/iter/range2d.ts",
"old_path": "packages/transducers/src/iter/range2d.ts"
},
{
"change_type": "MODIFY",
"diff": "import { range } from \"./range\";\n+export function range3d(toX: number, toY: number, toZ: number): IterableIterator<[number, number, number]>;\nexport function range3d(fromX: number, toX: number, fromY: number, toY: number, fromZ: number, toZ: number): IterableIterator<[number, number, number]>;\nexport function range3d(fromX: number, toX: number, fromY: number, toY: number, fromZ: number, toZ: number, stepX: number, stepY: number, stepZ: number): IterableIterator<[number, number, number]>;\nexport function* range3d(...args: number[]) {\n- let [fromX, toX, fromY, toY, fromZ, toZ] = args;\n- let stepX, stepY, stepZ;\n+ let fromX, toX, fromY, toY, fromZ, toZ, stepX, stepY, stepZ;\nswitch (args.length) {\n- case 6:\n- break;\ncase 9:\nstepX = args[6];\nstepY = args[7];\nstepZ = args[8];\n+ case 6:\n+ [fromX, toX, fromY, toY, fromZ, toZ] = args;\n+ break;\n+ case 3:\n+ [toX, toY, toZ] = args;\n+ fromX = fromY = fromZ = 0;\nbreak;\ndefault:\nthrow new Error(`invalid arity: ${args.length}`);\n",
"new_path": "packages/transducers/src/iter/range3d.ts",
"old_path": "packages/transducers/src/iter/range3d.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(transducers): add/update range2d/range3d arity handling
| 1
|
refactor
|
transducers
|
807,849
|
18.02.2018 18:04:15
| 28,800
|
e93970458d1bc75c4b14de5ec45b73a1fd985406
|
refactor: splitVersion -> npm-package-arg
|
[
{
"change_type": "MODIFY",
"diff": "const path = require(\"path\");\nconst dedent = require(\"dedent\");\n+const npa = require(\"npm-package-arg\");\nconst packageJson = require(\"package-json\");\nconst readPkg = require(\"read-pkg\");\nconst semver = require(\"semver\");\n@@ -9,7 +10,6 @@ const writePkg = require(\"write-pkg\");\nconst BootstrapCommand = require(\"./BootstrapCommand\");\nconst Command = require(\"../Command\");\n-const splitVersion = require(\"../utils/splitVersion\");\nconst ValidationError = require(\"../utils/ValidationError\");\nexports.command = \"add [pkgNames..]\";\n@@ -39,11 +39,10 @@ class AddCommand extends Command {\n}\ninitialize(callback) {\n- const pkgs = this.options.pkgNames\n- .filter(input => typeof input === \"string\" && input.trim() !== \"\")\n- .map(input => splitVersion(input) || [input, \"latest\"])\n- .filter(split => Array.isArray(split))\n- .map(([name, versionRange = \"latest\"]) => ({ name, versionRange }));\n+ const pkgs = this.options.pkgNames.map(input => {\n+ const { name, fetchSpec: versionRange } = npa(input);\n+ return { name, versionRange };\n+ });\nif (pkgs.length === 0) {\nconst err = new ValidationError(\"EINPUT\", \"Missing list of packages to add to your project.\");\n",
"new_path": "src/commands/AddCommand.js",
"old_path": "src/commands/AddCommand.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\nconst log = require(\"npmlog\");\n+const npa = require(\"npm-package-arg\");\nconst onExit = require(\"signal-exit\");\nconst path = require(\"path\");\nconst writePkg = require(\"write-pkg\");\nconst ChildProcessUtilities = require(\"../ChildProcessUtilities\");\nconst FileSystemUtilities = require(\"../FileSystemUtilities\");\n-const splitVersion = require(\"./splitVersion\");\nconst getExecOpts = require(\"./get-npm-exec-opts\");\nmodule.exports = npmInstall;\n@@ -78,7 +78,7 @@ function installInDir(directory, dependencies, config, callback) {\n// Construct a basic fake package.json with just the deps we need to install.\nconst tempJson = {\ndependencies: dependencies.reduce((obj, dep) => {\n- const [pkg, version] = splitVersion(dep);\n+ const { name: pkg, rawSpec: version } = npa(dep);\nobj[pkg] = version || \"*\";\nreturn obj;\n}, {}),\n",
"new_path": "src/utils/npm-install.js",
"old_path": "src/utils/npm-install.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-module.exports = splitVersion;\n-\n-// Take a dep like \"foo@^1.0.0\".\n-// Return a tuple like [\"foo\", \"^1.0.0\"].\n-// Handles scoped packages.\n-// Returns undefined for version if none specified.\n-function splitVersion(dep) {\n- return dep.match(/^(@?[^@]+)(?:@(.+))?/).slice(1, 3);\n-}\n",
"new_path": null,
"old_path": "src/utils/splitVersion.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -331,7 +331,7 @@ describe(\"npm-install\", () => {\nit(\"cleans up synchronously after writeFile error\", done => {\nconst directory = path.normalize(\"/test/npm-install-deps/writeError\");\n- const dependencies = [\"I'm just here so we don't exit early\"];\n+ const dependencies = [\"just-here-so-we-dont-exit-early\"];\nconst config = {};\nwritePkg.mockRejectedValueOnce(new Error(\"Unable to write file\"));\n@@ -354,7 +354,7 @@ describe(\"npm-install\", () => {\nit(\"cleans up synchronously after client install error\", done => {\nconst directory = path.normalize(\"/test/npm-install-deps/clientError\");\n- const dependencies = [\"I'm just here so we don't exit early\"];\n+ const dependencies = [\"just-here-so-we-dont-exit-early\"];\nconst config = {};\nChildProcessUtilities.exec.mockRejectedValueOnce(new Error(\"Unable to install dependency\"));\n",
"new_path": "test/utils-npm-install.test.js",
"old_path": "test/utils-npm-install.test.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor: splitVersion -> npm-package-arg
| 1
|
refactor
| null |
807,849
|
18.02.2018 18:18:37
| 28,800
|
d2f6d1b579202c97903cafc85043bdc1ec68ad36
|
chore: rename src/utils camelCase -> kebab-case
|
[
{
"change_type": "MODIFY",
"diff": "@@ -9,12 +9,12 @@ const GitUtilities = require(\"./GitUtilities\");\nconst GitVersionParser = require(\"./GitVersionParser\");\nconst PackageGraph = require(\"./PackageGraph\");\nconst Repository = require(\"./Repository\");\n-const writeLogFile = require(\"./utils/writeLogFile\");\n+const writeLogFile = require(\"./utils/write-log-file\");\nconst UpdatedPackagesCollector = require(\"./UpdatedPackagesCollector\");\nconst VersionSerializer = require(\"./VersionSerializer\");\nconst collectPackages = require(\"./utils/collect-packages\");\nconst filterPackages = require(\"./utils/filter-packages\");\n-const ValidationError = require(\"./utils/ValidationError\");\n+const ValidationError = require(\"./utils/validation-error\");\n// handle log.success()\nlog.addLevel(\"success\", 3001, { fg: \"green\", bold: true });\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -10,7 +10,7 @@ const path = require(\"path\");\nconst semver = require(\"semver\");\nconst FileSystemUtilities = require(\"./FileSystemUtilities\");\n-const ValidationError = require(\"./utils/ValidationError\");\n+const ValidationError = require(\"./utils/validation-error\");\nconst cfgCache = new Map();\n",
"new_path": "src/ConventionalCommitUtilities.js",
"old_path": "src/ConventionalCommitUtilities.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -7,7 +7,7 @@ const loadJsonFile = require(\"load-json-file\");\nconst log = require(\"npmlog\");\nconst path = require(\"path\");\n-const ValidationError = require(\"./utils/ValidationError\");\n+const ValidationError = require(\"./utils/validation-error\");\nconst Package = require(\"./Package\");\nconst DEFAULT_PACKAGE_GLOB = \"packages/*\";\n",
"new_path": "src/Repository.js",
"old_path": "src/Repository.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -10,7 +10,7 @@ const writePkg = require(\"write-pkg\");\nconst BootstrapCommand = require(\"./BootstrapCommand\");\nconst Command = require(\"../Command\");\n-const ValidationError = require(\"../utils/ValidationError\");\n+const ValidationError = require(\"../utils/validation-error\");\nexports.command = \"add [pkgNames..]\";\n",
"new_path": "src/commands/AddCommand.js",
"old_path": "src/commands/AddCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -17,7 +17,7 @@ const hasMatchingDependency = require(\"../utils/has-matching-dependency\");\nconst runParallelBatches = require(\"../utils/run-parallel-batches\");\nconst symlinkBinary = require(\"../utils/symlink-binary\");\nconst symlinkDependencies = require(\"../utils/symlink-dependencies\");\n-const ValidationError = require(\"../utils/ValidationError\");\n+const ValidationError = require(\"../utils/validation-error\");\nexports.handler = function handler(argv) {\n// eslint-disable-next-line no-use-before-define\n",
"new_path": "src/commands/BootstrapCommand.js",
"old_path": "src/commands/BootstrapCommand.js"
},
{
"change_type": "MODIFY",
"diff": "const ChildProcessUtilities = require(\"../ChildProcessUtilities\");\nconst Command = require(\"../Command\");\nconst GitUtilities = require(\"../GitUtilities\");\n-const ValidationError = require(\"../utils/ValidationError\");\n+const ValidationError = require(\"../utils/validation-error\");\nexports.handler = function handler(argv) {\n// eslint-disable-next-line no-use-before-define\n",
"new_path": "src/commands/DiffCommand.js",
"old_path": "src/commands/DiffCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -6,7 +6,7 @@ const ChildProcessUtilities = require(\"../ChildProcessUtilities\");\nconst Command = require(\"../Command\");\nconst batchPackages = require(\"../utils/batch-packages\");\nconst runParallelBatches = require(\"../utils/run-parallel-batches\");\n-const ValidationError = require(\"../utils/ValidationError\");\n+const ValidationError = require(\"../utils/validation-error\");\nexports.handler = function handler(argv) {\n// eslint-disable-next-line no-use-before-define\n",
"new_path": "src/commands/ExecCommand.js",
"old_path": "src/commands/ExecCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -24,7 +24,7 @@ const npmDistTag = require(\"../utils/npm-dist-tag\");\nconst npmPublish = require(\"../utils/npm-publish\");\nconst npmRunScript = require(\"../utils/npm-run-script\");\nconst batchPackages = require(\"../utils/batch-packages\");\n-const ValidationError = require(\"../utils/ValidationError\");\n+const ValidationError = require(\"../utils/validation-error\");\nexports.handler = function handler(argv) {\n// eslint-disable-next-line no-use-before-define\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
},
{
"change_type": "MODIFY",
"diff": "const log = require(\"npmlog\");\nconst PackageGraph = require(\"../PackageGraph\");\n-const ValidationError = require(\"./ValidationError\");\n+const ValidationError = require(\"./validation-error\");\nmodule.exports = batchPackages;\n",
"new_path": "src/utils/batch-packages.js",
"old_path": "src/utils/batch-packages.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -5,7 +5,7 @@ const loadJsonFile = require(\"load-json-file\");\nconst path = require(\"path\");\nconst Package = require(\"../Package\");\n-const ValidationError = require(\"./ValidationError\");\n+const ValidationError = require(\"./validation-error\");\nmodule.exports = collectPackages;\n",
"new_path": "src/utils/collect-packages.js",
"old_path": "src/utils/collect-packages.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\nconst matchPackageName = require(\"./match-package-name\");\n-const ValidationError = require(\"./ValidationError\");\n+const ValidationError = require(\"./validation-error\");\nmodule.exports = filterPackages;\n",
"new_path": "src/utils/filter-packages.js",
"old_path": "src/utils/filter-packages.js"
},
{
"change_type": "RENAME",
"diff": "",
"new_path": "src/utils/validation-error.js",
"old_path": "src/utils/ValidationError.js"
},
{
"change_type": "RENAME",
"diff": "@@ -26,6 +26,7 @@ function writeLogFile(cwd) {\n});\n});\n+ // this must be synchronous because it is called before process exit\nwriteFileAtomic.sync(path.join(cwd, \"lerna-debug.log\"), logOutput);\n// truncate log after writing\n",
"new_path": "src/utils/write-log-file.js",
"old_path": "src/utils/writeLogFile.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
chore: rename src/utils camelCase -> kebab-case
| 1
|
chore
| null |
679,913
|
18.02.2018 19:53:12
| 0
|
48f8bb857b29f19edf046756b1c73c882a7ef270
|
fix(transducers): update imports `step()`
|
[
{
"change_type": "MODIFY",
"diff": "import { Transducer } from \"./api\";\nimport { push } from \"./rfn/push\";\n-import { isReduced } from \"./index\";\n+import { isReduced } from \"./reduced\";\n/**\n* Single-step transducer execution wrapper.\n",
"new_path": "packages/transducers/src/step.ts",
"old_path": "packages/transducers/src/step.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
fix(transducers): update imports `step()`
| 1
|
fix
|
transducers
|
679,913
|
18.02.2018 19:53:48
| 0
|
d7b1d0dfaae2af7d4c0d93eecde79cdc5f008a92
|
feat(transducers): add movingMedian() xform
|
[
{
"change_type": "ADD",
"diff": "+import { Comparator } from \"@thi.ng/api/api\";\n+import { compare } from \"@thi.ng/api/compare\";\n+\n+import { Transducer } from \"../api\";\n+import { comp } from \"../func/comp\";\n+import { identity } from \"../func/identity\";\n+\n+import { map } from \"./map\";\n+import { partition } from \"./partition\";\n+\n+export function movingMedian<A, B>(n: number, key: ((x: A) => B) = <any>identity, cmp: Comparator<B> = compare): Transducer<A, A> {\n+ const m = n >> 1;\n+ return comp(partition(n, 1, true), map((window) => [...window].sort((a, b) => cmp(key(a), key(b)))[m]));\n+}\n",
"new_path": "packages/transducers/src/xform/moving-median.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(transducers): add movingMedian() xform
| 1
|
feat
|
transducers
|
679,913
|
18.02.2018 19:55:04
| 0
|
ab8a85583cadba5e7e524ad586247004044ca042
|
feat(transducers): add convolve2d xform & types
|
[
{
"change_type": "MODIFY",
"diff": "@@ -16,4 +16,8 @@ export interface StructField extends Array<any> {\n[2]?: Fn<any[], any>;\n}\n+export type ConvolutionKernel1D = [number, number][];\n+export type ConvolutionKernel2D = [number, [number, number]][];\n+export type ConvolutionKernel3D = [number, [number, number, number]][];\n+\nexport const SEMAPHORE = Symbol(\"SEMAPHORE\");\n",
"new_path": "packages/transducers/src/api.ts",
"old_path": "packages/transducers/src/api.ts"
},
{
"change_type": "ADD",
"diff": "+import { ConvolutionKernel2D, Transducer } from \"../api\";\n+import { transduce } from \"../transduce\";\n+import { range2d } from \"../iter/range2d\";\n+import { tuples } from \"../iter/tuples\";\n+\n+import { add } from \"../rfn/add\";\n+\n+import { map } from \"./map\";\n+\n+export function buildKernel2d(weights: Iterable<number>, w: number, h: number): ConvolutionKernel2D {\n+ const w2 = w >> 1;\n+ const h2 = h >> 1;\n+ return [...tuples(weights, range2d(-w2, w2 + 1, -h2, h2 + 1))];\n+}\n+\n+function kernelLookup2d(src, x, y, width, height, wrap) {\n+ return wrap ?\n+ ([w, [ox, oy]]) => {\n+ const xx = x < -ox ? width + ox : x >= width - ox ? ox - 1 : x + ox;\n+ const yy = y < -oy ? height + oy : y >= height - oy ? oy - 1 : y + oy;\n+ return w * src[yy * width + xx];\n+ } :\n+ ([w, [ox, oy]]) => {\n+ return (x < -ox || y < -oy || x >= width - ox || y >= height - oy) ? 0 : w * src[(y + oy) * width + x + ox];\n+ }\n+}\n+\n+export function convolve2d(src: number[], width: number, height: number, kernel: ConvolutionKernel2D, wrap?: boolean): Transducer<number[], number>;\n+export function convolve2d(src: number[], width: number, height: number, weights: number[], kwidth: number, kheight: number, wrap?: boolean): Transducer<number[], number>;\n+export function convolve2d(src: number[], width: number, height: number, ...args: any[]): Transducer<number[], number> {\n+ let kernel;\n+ let wrap = false;\n+ switch (args.length) {\n+ case 1:\n+ case 2:\n+ [kernel, wrap] = args;\n+ break;\n+ case 4:\n+ wrap = args[3];\n+ case 3:\n+ kernel = buildKernel2d.apply(null, args);\n+ break;\n+ default:\n+ throw new Error(`illegal arity: ${args.length + 3}`);\n+ }\n+ return map(\n+ ([x, y]) =>\n+ transduce(\n+ map(kernelLookup2d(src, x, y, width, height, wrap)),\n+ add(),\n+ kernel\n+ )\n+ );\n+}\n",
"new_path": "packages/transducers/src/xform/convolve.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(transducers): add convolve2d xform & types
| 1
|
feat
|
transducers
|
679,913
|
18.02.2018 19:58:48
| 0
|
d023bd5fde8c13d50f6a21dd15656dfaf1ae95df
|
chore(transducers): update re-exports, readme
|
[
{
"change_type": "MODIFY",
"diff": "@@ -18,7 +18,7 @@ Lightweight transducer implementations for ES6 / TypeScript (~24KB minified, ful\n## About\n-This library provides altogether ~85 transducers, reducers and sequence\n+This library provides altogether ~90 transducers, reducers and sequence\ngenerators (iterators) for composing data transformation pipelines.\nThe overall concept and many of the core functions offered here are directly\n@@ -568,6 +568,8 @@ reducer and optional initial accumulator/result.\n#### `cat<T>(): Transducer<T[], T>`\n+#### `convolve2d(src: number[], width: number, height: number, weights: number[], kwidth: number, kheight: number, wrap?: boolean): Transducer<number[], number>`\n+\n#### `dedupe<T>(equiv?: (a: T, b: T) => boolean): Transducer<T, T>`\n#### `delayed<T>(t: number): Transducer<T, Promise<T>>`\n@@ -614,6 +616,8 @@ reducer and optional initial accumulator/result.\n#### `movingAverage(n: number): Transducer<number, number>`\n+#### `movingMedian<A, B>(n: number, key?: ((x: A) => B), cmp?: Comparator<B>): Transducer<A, A>`\n+\n#### `multiplex<T, A, B>(a: Transducer<T, A>, b: Transducer<T, B>...): Transducer<T, [A, B...]>`\n#### `multiplexObj<A, B>(xforms: IObjectOf<Transducer<A, any>>, rfn?: Reducer<B, [PropertyKey, any]>): Transducer<A, B>`\n@@ -722,6 +726,10 @@ reducer and optional initial accumulator/result.\n#### `range(from?: number, to?: number, step?: number): IterableIterator<number>`\n+#### `range2d(x1: number, x2: number, y1: number, y2: number, stepx?: number, stepy?: number): IterableIterator<number>`\n+\n+#### `range3d(x1: number, x2: number, y1: number, y2: number, z1: number, z2: number, stepx?: number, stepy?: number, stepz?: number): IterableIterator<number>`\n+\n#### `repeat<T>(x: T, n?: number): IterableIterator<T>`\n#### `repeatedly<T>(fn: () => T, n?: number): IterableIterator<T>`\n",
"new_path": "packages/transducers/README.md",
"old_path": "packages/transducers/README.md"
},
{
"change_type": "MODIFY",
"diff": "@@ -32,6 +32,7 @@ export * from \"./xform/base64\";\nexport * from \"./xform/benchmark\";\nexport * from \"./xform/bits\";\nexport * from \"./xform/cat\";\n+export * from \"./xform/convolve\";\nexport * from \"./xform/dedupe\";\nexport * from \"./xform/delayed\";\nexport * from \"./xform/distinct\";\n@@ -55,6 +56,7 @@ export * from \"./xform/map-nth\";\nexport * from \"./xform/map\";\nexport * from \"./xform/mapcat\";\nexport * from \"./xform/moving-average\";\n+export * from \"./xform/moving-median\";\nexport * from \"./xform/multiplex\";\nexport * from \"./xform/multiplex-obj\";\nexport * from \"./xform/noop\";\n",
"new_path": "packages/transducers/src/index.ts",
"old_path": "packages/transducers/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
chore(transducers): update re-exports, readme
| 1
|
chore
|
transducers
|
679,913
|
18.02.2018 20:08:27
| 0
|
d6d3310d404eb4bad4be7f6c830d9553318c300d
|
feat(examples): add cellular automata example
|
[
{
"change_type": "ADD",
"diff": "+# Cellular automata\n+\n+[Live demo](http://demo.thi.ng/umbrella/cellular-automata/)\n+\n+```\n+git clone https://github.com/thi-ng/umbrella.git\n+cd umbrella/examples/cellular-automata\n+yarn install\n+yarn dev\n+```\n+\n+Once webpack has completed building, refresh your browser...\n+\n+## Example configurations\n+\n+- [Conway (default)](http://demo.thi.ng/umbrella/cellular-automata/#000100000-001100000)\n+- [Maze](http://demo.thi.ng/umbrella/cellular-automata/#000100000-001110000)\n+- [Disappearing maze](http://demo.thi.ng/umbrella/cellular-automata/#000111111-000001111)\n+- [Blobs](http://demo.thi.ng/umbrella/cellular-automata/#000010000-000011111)\n",
"new_path": "examples/cellular-automata/README.md",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+<!DOCTYPE html>\n+<html>\n+\n+<head>\n+ <style>\n+ html,\n+ pre {\n+ font: 14px/11px monospace;\n+ }\n+\n+ label {\n+ display: inline-block;\n+ width: 5rem;\n+ }\n+ </style>\n+</head>\n+\n+<body>\n+ <div id=\"app\"></div>\n+ <script type=\"text/javascript\" src=\"bundle.js\"></script>\n+</body>\n+\n+</html>\n\\ No newline at end of file\n",
"new_path": "examples/cellular-automata/index.html",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+{\n+ \"name\": \"game-of-life\",\n+ \"version\": \"0.0.1\",\n+ \"repository\": \"https://github.com/thi-ng/umbrella\",\n+ \"author\": \"Karsten Schmidt <k+npm@thi.ng>\",\n+ \"license\": \"Apache-2.0\",\n+ \"scripts\": {\n+ \"build\": \"yarn clean && webpack\",\n+ \"clean\": \"rm -rf bundle.*\",\n+ \"dev\": \"open index.html && webpack -w\"\n+ },\n+ \"devDependencies\": {\n+ \"ts-loader\": \"^3.3.1\",\n+ \"typescript\": \"^2.7.1\",\n+ \"webpack\": \"^3.10.0\"\n+ },\n+ \"dependencies\": {\n+ \"@thi.ng/api\": \"^2.0.2\",\n+ \"@thi.ng/hiccup-dom\": \"^1.0.4\",\n+ \"@thi.ng/rstream\": \"^0.9.4\",\n+ \"@thi.ng/transducers\": \"^1.1.0\"\n+ }\n+}\n\\ No newline at end of file\n",
"new_path": "examples/cellular-automata/package.json",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+import { start } from \"@thi.ng/hiccup-dom\";\n+\n+import { transduce } from \"@thi.ng/transducers/transduce\";\n+import { comp } from \"@thi.ng/transducers/func/comp\";\n+import { range2d } from \"@thi.ng/transducers/iter/range2d\";\n+import { repeatedly } from \"@thi.ng/transducers/iter/repeatedly\";\n+import { push } from \"@thi.ng/transducers/rfn/push\";\n+import { str } from \"@thi.ng/transducers/rfn/str\";\n+import { bits } from \"@thi.ng/transducers/xform/bits\";\n+import { convolve2d, buildKernel2d } from \"@thi.ng/transducers/xform/convolve\";\n+import { map } from \"@thi.ng/transducers/xform/map\";\n+import { multiplex } from \"@thi.ng/transducers/xform/multiplex\";\n+import { partition } from \"@thi.ng/transducers/xform/partition\";\n+\n+const W = 128;\n+const H = 48;\n+\n+// 3x3 convolution kernel (Moore neighborhood)\n+const kernel = buildKernel2d([1, 1, 1, 1, 0, 1, 1, 1, 1], 3, 3);\n+// seed grid with 50% noise\n+let grid = [...repeatedly(() => Math.random() < 0.5 ? 1 : 0, W * H)];\n+// Conway CA default state rules [[dead], [alive]]\n+// (essentially this is a compressed finite state machine)\n+let rules = [[0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0]];\n+\n+// parse rules from location hash (2 groups of 9 bits each)\n+if (location.hash.length === 20) {\n+ rules = transduce(comp(map((x: string) => parseInt(x, 2)), bits(9), partition(9)), push(), location.hash.substr(1).split(\"-\"));\n+}\n+\n+// apply convolution & CA rules\n+// this produces the next generation of the CA\n+// we're using `multiplex` to produce a tuple of `[orig-cell-value, neighbor-count]`\n+export const convolve = (src, wrap = true) =>\n+ transduce(\n+ comp(\n+ multiplex(map((p) => src[p[0] + p[1] * W]), convolve2d(src, W, H, kernel, wrap)),\n+ map(([alive, neighbors]) => rules[alive][neighbors])\n+ ),\n+ push(),\n+ range2d(W, H)\n+ );\n+\n+// format grid values as string\n+const format = (src) =>\n+ transduce(\n+ comp(\n+ map((x: number) => x ? \"\\u2588\" : \" \"),\n+ partition(W),\n+ map(x => x.join(\"\"))\n+ ),\n+ str(\"\\n\"),\n+ src\n+ );\n+\n+// event handler for rule edits\n+const setRule = (i, j, s) => {\n+ rules[i][j] = s ? 1 : 0;\n+ location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\");\n+};\n+\n+// single checkbox component\n+const checkbox = (x, onchange) => [\"input\", { type: \"checkbox\", checked: !!x, onchange }];\n+\n+// component for single CA rule group (alive / dead FSM)\n+const ruleBoxes = (prefix, i) =>\n+ [\"div\",\n+ [\"label\", prefix],\n+ ...rules[i].map((rule, j) => checkbox(rule, (e) => setRule(i, j, e.target.checked))),\n+ ];\n+\n+// define & start main app component\n+start(\"app\", () => {\n+ return [\"div\",\n+ ruleBoxes(\"birth\", 0),\n+ ruleBoxes(\"survive\", 1),\n+ [\"pre\", format(grid = convolve(grid))]\n+ ];\n+});\n",
"new_path": "examples/cellular-automata/src/index.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+{\n+ \"extends\": \"../../tsconfig.json\",\n+ \"compilerOptions\": {\n+ \"outDir\": \".\"\n+ },\n+ \"include\": [\n+ \"./src/**/*.ts\"\n+ ]\n+}\n",
"new_path": "examples/cellular-automata/tsconfig.json",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+module.exports = {\n+ entry: \"./src/index.ts\",\n+ output: {\n+ path: __dirname,\n+ filename: \"bundle.js\"\n+ },\n+ resolve: {\n+ extensions: [\".ts\", \".js\"]\n+ },\n+ module: {\n+ loaders: [{ test: /\\.ts$/, loader: \"ts-loader\" }]\n+ },\n+ node: {\n+ process: false,\n+ setImmediate: false\n+ }\n+};\n",
"new_path": "examples/cellular-automata/webpack.config.js",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(examples): add cellular automata example
| 1
|
feat
|
examples
|
679,913
|
18.02.2018 20:24:22
| 0
|
5c106cfa3df7b37eea277ae32c416861e90b907a
|
docs(examples): add more CA config links
|
[
{
"change_type": "MODIFY",
"diff": "@@ -17,3 +17,6 @@ Once webpack has completed building, refresh your browser...\n- [Maze](http://demo.thi.ng/umbrella/cellular-automata/#000100000-001110000)\n- [Disappearing maze](http://demo.thi.ng/umbrella/cellular-automata/#000111111-000001111)\n- [Blobs](http://demo.thi.ng/umbrella/cellular-automata/#000010000-000011111)\n+- [Dots](http://demo.thi.ng/umbrella/cellular-automata/#000001111-111111110)\n+- [Growth](http://demo.thi.ng/umbrella/cellular-automata/#000101111-000001111)\n+- [Noisy growth](http://demo.thi.ng/umbrella/cellular-automata/#100101000-000001111)\n",
"new_path": "examples/cellular-automata/README.md",
"old_path": "examples/cellular-automata/README.md"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
docs(examples): add more CA config links
| 1
|
docs
|
examples
|
679,913
|
18.02.2018 20:36:18
| 0
|
ca1caae51652784ea7d8cbb8d9188405d895acf0
|
refactor(rstream): don't throw in unsubscribe() if no parent
|
[
{
"change_type": "MODIFY",
"diff": "@@ -81,9 +81,8 @@ export class Subscription<A, B> implements\nif (!sub) {\nif (this.parent) {\nreturn this.parent.unsubscribe(this);\n- } else {\n- throw new Error(\"subscription has no parent\");\n}\n+ return true;\n}\nif (this.subs) {\nDEBUG && console.log(this.id, \"unsub\", sub.id);\n",
"new_path": "packages/rstream/src/subscription.ts",
"old_path": "packages/rstream/src/subscription.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(rstream): don't throw in unsubscribe() if no parent
| 1
|
refactor
|
rstream
|
679,913
|
18.02.2018 20:49:48
| 0
|
4942e2ea232878a08a3189ffd60bb38192ec109b
|
feat(rstream): fix support infinite StreamMerge's, update ctor
BREAKING CHANGE: StreamMerge ctor now accepts an options object
only (`StreamMergeOpts`).
|
[
{
"change_type": "MODIFY",
"diff": "@@ -74,11 +74,13 @@ setTimeout(()=> raf.done(), 10000);\n### Stream merging\n```typescript\n-new rs.StreamMerge([\n+new rs.StreamMerge({\n+ src: [\nrs.fromEvent(document, \"mousemove\"),\nrs.fromEvent(document, \"mousedown\"),\nrs.fromEvent(document, \"mouseup\"),\n-])\n+ ]\n+})\n// add event transformer\n.subscribe(tx.map((e) => [e.type, [e.clientX, e.clientY]]))\n// add debug subscription\n@@ -134,8 +136,8 @@ hist.redo(); // 1st\n// theme: light\n// { theme: 'light', mode: false }\n-// update another part of the app state (SPREAD, DON'T MUTATE!)\n-app.swap((state) => ({...state, session: {user: \"asterix\"}}));\n+// update another part of the app state (DON'T MUTATE!)\n+app.swap((state) => atom.setIn(state, \"session.user\", \"asterix\"));\n// user: asterix\n// { ui: { theme: 'light', mode: false },\n// foo: 'bar',\n",
"new_path": "packages/rstream/README.md",
"old_path": "packages/rstream/README.md"
},
{
"change_type": "MODIFY",
"diff": "-import { isFunction } from \"@thi.ng/checks/is-function\";\n-import { isString } from \"@thi.ng/checks/is-string\";\n+import { IID } from \"@thi.ng/api/api\";\nimport { Transducer } from \"@thi.ng/transducers/api\";\nimport { ISubscribable, State } from \"./api\";\nimport { Subscription } from \"./subscription\";\n+export interface StreamMergeOpts<A, B> extends IID<string> {\n+ src: Iterable<ISubscribable<A>>;\n+ xform: Transducer<A, B>;\n+ close: boolean;\n+}\n+\n+/**\n+ * Subscription type consuming inputs from multiple inputs and passing\n+ * received values on to any subscribers. Input streams can be added and\n+ * removed dynamically. By default, the StreamMerge calls `done()` when\n+ * the last active input is done, but this behavior can be overridden via\n+ * the `close` constructor option (set to `false`).\n+ */\nexport class StreamMerge<A, B> extends Subscription<A, B> {\nsources: ISubscribable<A>[];\nwrappedSources: Subscription<A, any>[];\n+ autoClose: boolean;\n- constructor(sources: Iterable<ISubscribable<A>>, id?: string);\n- constructor(xform: Transducer<A, B>, id?: string);\n- constructor(sources: Iterable<ISubscribable<A>>, xform: Transducer<A, B>, id?: string);\n- constructor(...args: any[]) {\n- let id = isString(args[args.length - 1]) ? args.pop() : `streammerge-${Subscription.NEXT_ID++}`;\n- let src, xform;\n- switch (args.length) {\n- case 2:\n- src = args[0];\n- xform = args[1];\n- break;\n- case 1:\n- if (isFunction(args[0])) {\n- xform = args[0];\n- } else {\n- src = args[0];\n- }\n- break;\n- default:\n- throw new Error(`illegal arity ${args.length}`);\n- }\n- super(null, xform, null, id);\n+ constructor(opts?: Partial<StreamMergeOpts<A, B>>) {\n+ opts = opts || {};\n+ super(null, opts.xform, null, opts.id || `streammerge-${Subscription.NEXT_ID++}`);\nthis.sources = [];\nthis.wrappedSources = [];\n- if (src) {\n- for (let s of src) {\n- this.add(s);\n- }\n+ this.autoClose = opts.close !== false;\n+ if (opts.src) {\n+ this.addAll(opts.src);\n}\n}\nadd(src: ISubscribable<A>) {\nthis.ensureState();\n- this.sources.push(src);\nthis.wrappedSources.push(\nsrc.subscribe({\nnext: (x) => this.next(x),\ndone: () => this.markDone(src)\n}));\n+ this.sources.push(src);\n+ }\n+\n+ addAll(src: Iterable<ISubscribable<A>>) {\n+ for (let s of src) {\n+ this.add(s);\n+ }\n}\nremove(src: ISubscribable<A>) {\n@@ -60,6 +59,12 @@ export class StreamMerge<A, B> extends Subscription<A, B> {\n}\n}\n+ removeAll(src: Iterable<ISubscribable<A>>) {\n+ for (let s of src) {\n+ this.remove(s);\n+ }\n+ }\n+\nunsubscribe(sub?: Subscription<B, any>) {\nif (!sub) {\nfor (let s of this.wrappedSources) {\n@@ -86,7 +91,7 @@ export class StreamMerge<A, B> extends Subscription<A, B> {\nprotected markDone(src: ISubscribable<A>) {\nthis.remove(src);\n- if (!this.sources.length) {\n+ if (this.autoClose && !this.sources.length) {\nthis.done();\n}\n}\n",
"new_path": "packages/rstream/src/stream-merge.ts",
"old_path": "packages/rstream/src/stream-merge.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -21,11 +21,13 @@ describe(\"StreamMerge\", () => {\n};\nbeforeEach(() => {\n- src = new rs.StreamMerge<number, number>([\n+ src = new rs.StreamMerge<number, number>({\n+ src: [\nrs.fromIterable([1, 2]),\nrs.fromIterable([10, 20, 30, 40]),\nrs.fromIterable([100, 200, 300])\n- ]);\n+ ]\n+ });\n});\nit(\"merges all inputs\", (done) => {\n@@ -35,12 +37,20 @@ describe(\"StreamMerge\", () => {\n});\nit(\"merges dynamic inputs\", (done) => {\n- src = new rs.StreamMerge([]);\n+ src = new rs.StreamMerge();\nsrc.add(rs.fromIterable([1, 2, 3, 4], 10));\nsrc.add(rs.fromIterable([10, 20], 5));\nsrc.subscribe(check([1, 2, 3, 4, 10, 20], done));\n});\n+ it(\"merges dynamic inputs (synchronous)\", (done) => {\n+ src = new rs.StreamMerge({ close: false });\n+ src.subscribe(check([1, 2, 3, 4, 10, 20], done));\n+ src.add(rs.fromIterableSync([1, 2, 3, 4]));\n+ src.add(rs.fromIterableSync([10, 20]));\n+ src.done();\n+ });\n+\nit(\"stops when no more subs\", () => {\nassert(src.getState() === rs.State.IDLE);\nlet sub1 = src.subscribe({});\n@@ -52,13 +62,13 @@ describe(\"StreamMerge\", () => {\n});\nit(\"applies transducer\", (done) => {\n- src = new rs.StreamMerge<number, number>(\n- [\n+ src = new rs.StreamMerge<number, number>({\n+ src: [\nrs.fromIterable([1, 2]),\nrs.fromIterable([10, 20])\n],\n- tx.mapcat((x: number) => [x, x + 1])\n- );\n+ xform: tx.mapcat((x: number) => [x, x + 1])\n+ });\nsrc.subscribe(check([1, 2, 2, 3, 10, 11, 20, 21], done));\n});\n",
"new_path": "packages/rstream/test/stream-merge.ts",
"old_path": "packages/rstream/test/stream-merge.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(rstream): fix #8, support infinite StreamMerge's, update ctor
BREAKING CHANGE: StreamMerge ctor now accepts an options object
only (`StreamMergeOpts`).
| 1
|
feat
|
rstream
|
679,913
|
18.02.2018 20:56:16
| 0
|
5a728cb974552696458f70170d6137fefea9d90e
|
refactor(rstream-log): update Logger ctor due to changes in StreamMerge
mark Logger instance as infinite stream merge (non-closing)
|
[
{
"change_type": "MODIFY",
"diff": "@@ -25,7 +25,7 @@ export class Logger extends StreamMerge<LogEntry, LogEntry> implements\nsrc = args[1];\nlevel = args[2] !== undefined ? args[2] : level;\n}\n- super(src, id);\n+ super({ src, id, close: false });\nthis.level = level;\n}\n",
"new_path": "packages/rstream-log/src/logger.ts",
"old_path": "packages/rstream-log/src/logger.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(rstream-log): update Logger ctor due to changes in StreamMerge
- mark Logger instance as infinite stream merge (non-closing)
| 1
|
refactor
|
rstream-log
|
679,913
|
18.02.2018 22:51:30
| 0
|
fde83d599b8834951ed7d4dd3b23807bdba18470
|
build(examples): update example deps
|
[
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/api\": \"^2.0.2\",\n- \"@thi.ng/hiccup-dom\": \"^1.0.4\",\n- \"@thi.ng/rstream\": \"^0.9.4\",\n- \"@thi.ng/transducers\": \"^1.1.0\"\n+ \"@thi.ng/api\": \"^2.0.3\",\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/rstream\": \"^1.0.0\",\n+ \"@thi.ng/transducers\": \"^1.2.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/cellular-automata/package.json",
"old_path": "examples/cellular-automata/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/dashboard/package.json",
"old_path": "examples/dashboard/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/hdom-basics/package.json",
"old_path": "examples/hdom-basics/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/api\": \"^2.0.1\",\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\",\n- \"@thi.ng/rstream\": \"^0.9.3\",\n- \"@thi.ng/transducers\": \"^1.0.7\"\n+ \"@thi.ng/api\": \"^2.0.3\",\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/rstream\": \"^1.0.0\",\n+ \"@thi.ng/transducers\": \"^1.2.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/hdom-benchmark/package.json",
"old_path": "examples/hdom-benchmark/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/checks\": \"^1.1.6\",\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\"\n+ \"@thi.ng/checks\": \"^1.2.1\",\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/json-components/package.json",
"old_path": "examples/json-components/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\",\n- \"@thi.ng/transducers\": \"^1.0.7\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/transducers\": \"^1.2.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/svg-particles/package.json",
"old_path": "examples/svg-particles/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/atom\": \"^0.5.2\",\n- \"@thi.ng/hiccup-dom\": \"^1.0.1\",\n- \"@thi.ng/transducers\": \"^1.0.7\"\n+ \"@thi.ng/atom\": \"^0.6.0\",\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/transducers\": \"^1.2.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/todo-list/package.json",
"old_path": "examples/todo-list/package.json"
},
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/hiccup-dom\": \"^1.0.3\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/webgl/package.json",
"old_path": "examples/webgl/package.json"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
build(examples): update example deps
| 1
|
build
|
examples
|
679,913
|
18.02.2018 23:14:12
| 0
|
df93af6d9efd39f2ad58678755b0ba35e9ff44bc
|
feat(examples): add randomize buttons (CA)
|
[
{
"change_type": "MODIFY",
"diff": "@@ -15,19 +15,29 @@ import { partition } from \"@thi.ng/transducers/xform/partition\";\nconst W = 128;\nconst H = 48;\n+let grid;\n+let rules;\n// 3x3 convolution kernel (Moore neighborhood)\nconst kernel = buildKernel2d([1, 1, 1, 1, 0, 1, 1, 1, 1], 3, 3);\n-// seed grid with 50% noise\n-let grid = [...repeatedly(() => Math.random() < 0.5 ? 1 : 0, W * H)];\n-// Conway CA default state rules [[dead], [alive]]\n-// (essentially this is a compressed finite state machine)\n-let rules = [[0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0]];\n// parse rules from location hash (2 groups of 9 bits each)\n+// (essentially this is a compressed finite state machine)\nif (location.hash.length === 20) {\nrules = transduce(comp(map((x: string) => parseInt(x, 2)), bits(9), partition(9)), push(), location.hash.substr(1).split(\"-\"));\n+} else {\n+ // Conway CA default state rules [[dead], [alive]]\n+ rules = [[0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0]];\n}\n+const setHash = () => (location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\"));\n+const randomSeq = (num, prob = 0.5) => [...repeatedly(() => Math.random() < prob ? 1 : 0, num)];\n+const randomizeGrid = (prob = 0.5) => (grid = randomSeq(W * H, prob));\n+const randomizeRules = () => {\n+ rules = [randomSeq(9), randomSeq(9)];\n+ randomizeGrid();\n+ setHash();\n+};\n+\n// apply convolution & CA rules\n// this produces the next generation of the CA\n// we're using `multiplex` to produce a tuple of `[orig-cell-value, neighbor-count]`\n@@ -56,7 +66,7 @@ const format = (src) =>\n// event handler for rule edits\nconst setRule = (i, j, s) => {\nrules[i][j] = s ? 1 : 0;\n- location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\");\n+ setHash();\n};\n// single checkbox component\n@@ -69,11 +79,16 @@ const ruleBoxes = (prefix, i) =>\n...rules[i].map((rule, j) => checkbox(rule, (e) => setRule(i, j, e.target.checked))),\n];\n+// seed grid with noise\n+randomizeGrid();\n+\n// define & start main app component\nstart(\"app\", () => {\nreturn [\"div\",\nruleBoxes(\"birth\", 0),\nruleBoxes(\"survive\", 1),\n+ [\"div\", [\"button\", { onclick: () => randomizeRules() }, \"randomize rules\"]],\n+ [\"div\", [\"button\", { onclick: () => randomizeGrid() }, \"reset grid\"]],\n[\"pre\", format(grid = convolve(grid))]\n];\n});\n",
"new_path": "examples/cellular-automata/src/index.ts",
"old_path": "examples/cellular-automata/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(examples): add randomize buttons (CA)
| 1
|
feat
|
examples
|
807,849
|
19.02.2018 12:20:04
| 28,800
|
c78ba56aee7ce3ab42715e50ea8d5fc317ba4006
|
refactor(VersionSerializer): inline parser, use npm-package-arg
GitVersionParser is now just an instance method of VersionSerializer,
with full coverage and more robust npm-package-arg usage.
|
[
{
"change_type": "MODIFY",
"diff": "\"glob-parent\": \"^3.1.0\",\n\"globby\": \"^7.1.1\",\n\"graceful-fs\": \"^4.1.11\",\n- \"hosted-git-info\": \"^2.5.0\",\n\"import-local\": \"^1.0.0\",\n\"inquirer\": \"^5.1.0\",\n\"is-ci\": \"^1.0.10\",\n",
"new_path": "package.json",
"old_path": "package.json"
},
{
"change_type": "MODIFY",
"diff": "@@ -6,7 +6,6 @@ const log = require(\"npmlog\");\nconst ChildProcessUtilities = require(\"./ChildProcessUtilities\");\nconst GitUtilities = require(\"./GitUtilities\");\n-const GitVersionParser = require(\"./GitVersionParser\");\nconst PackageGraph = require(\"./PackageGraph\");\nconst Repository = require(\"./Repository\");\nconst writeLogFile = require(\"./utils/write-log-file\");\n@@ -280,12 +279,10 @@ class Command {\nconst packageGraph = new PackageGraph(packages, { graphType: \"allDependencies\" });\nif (useGitVersion) {\n- const versionParser = new GitVersionParser(gitVersionPrefix);\n-\npackages.forEach(pkg => {\npkg.versionSerializer = new VersionSerializer({\nlocalDependencies: packageGraph.get(pkg.name).localDependencies,\n- versionParser,\n+ tagVersionPrefix: gitVersionPrefix,\n});\n});\n}\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-const escapeStringRegexp = require(\"escape-string-regexp\");\n-const hostedGitInfo = require(\"hosted-git-info\");\n-\n-class GitVersionParser {\n- constructor(versionPrefix = \"v\") {\n- this._gitUrlPattern = new RegExp(`(.+?#${escapeStringRegexp(versionPrefix)})(.+)$`);\n- }\n-\n- parseVersion(version) {\n- const gitInfo = hostedGitInfo.fromUrl(version);\n- let targetMatches;\n-\n- if (gitInfo && gitInfo.committish) {\n- targetMatches = this._gitUrlPattern.exec(version);\n- }\n-\n- return {\n- prefix: targetMatches ? targetMatches[1] : null,\n- version: targetMatches ? targetMatches[2] : version,\n- };\n- }\n-}\n-\n-module.exports = GitVersionParser;\n",
"new_path": null,
"old_path": "src/GitVersionParser.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n+const escapeStringRegexp = require(\"escape-string-regexp\");\n+const npa = require(\"npm-package-arg\");\n+\nclass VersionSerializer {\n- constructor({ localDependencies, versionParser }) {\n+ constructor({ localDependencies, tagVersionPrefix = \"v\" }) {\nthis._localDependencies = localDependencies;\n- this._versionParser = versionParser;\n- this._dependenciesKeys = [\"dependencies\", \"devDependencies\"];\n+ this._gitUrlPattern = new RegExp(`(.+?#${escapeStringRegexp(tagVersionPrefix)})(.+)$`);\nthis._strippedPrefixes = new Map();\n}\nserialize(pkg) {\n- this._dependenciesKeys.forEach(key => {\n- this._prependPrefix(pkg[key] || {});\n- });\n+ this._prependPrefix(pkg.dependencies || {});\n+ this._prependPrefix(pkg.devDependencies || {});\nreturn pkg;\n}\ndeserialize(pkg) {\n- this._dependenciesKeys.forEach(key => {\n- this._stripPrefix(pkg[key] || {});\n- });\n+ this._stripPrefix(pkg.dependencies || {});\n+ this._stripPrefix(pkg.devDependencies || {});\nreturn pkg;\n}\n@@ -37,7 +37,7 @@ class VersionSerializer {\n_stripPrefix(dependencies) {\nObject.keys(dependencies).forEach(name => {\nif (this._localDependencies.has(name)) {\n- const result = this._versionParser.parseVersion(dependencies[name]);\n+ const result = this._parseVersion(name, dependencies[name]);\nif (result.prefix) {\n// eslint-disable-next-line no-param-reassign\n@@ -47,6 +47,22 @@ class VersionSerializer {\n}\n});\n}\n+\n+ _parseVersion(name, version) {\n+ // passing name to disambiguate deprecated \"git scp\"-style URLs\n+ const result = npa.resolve(name, version);\n+\n+ let targetMatches;\n+\n+ if (result.gitCommittish) {\n+ targetMatches = this._gitUrlPattern.exec(version);\n+ }\n+\n+ return {\n+ prefix: targetMatches ? targetMatches[1] : null,\n+ version: targetMatches ? targetMatches[2] : version,\n+ };\n+ }\n}\nmodule.exports = VersionSerializer;\n",
"new_path": "src/VersionSerializer.js",
"old_path": "src/VersionSerializer.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-// file under test\n-const GitVersionParser = require(\"../src/GitVersionParser\");\n-\n-describe(\"GitVersionParser\", () => {\n- describe(\"parseVersion - without prefix\", () => {\n- const parser = new GitVersionParser(\"\");\n-\n- it(\"should work for semver version\", () => {\n- expect(parser.parseVersion(\"0.0.2\")).toEqual({\n- prefix: null,\n- version: \"0.0.2\",\n- });\n-\n- expect(parser.parseVersion(\"~0.0.2\")).toEqual({\n- prefix: null,\n- version: \"~0.0.2\",\n- });\n- });\n-\n- it(\"should work for git url\", () => {\n- expect(parser.parseVersion(\"github:user-foo/project-foo#v0.0.1\")).toEqual({\n- prefix: \"github:user-foo/project-foo#\",\n- version: \"v0.0.1\",\n- });\n-\n- expect(parser.parseVersion(\"git@github.com:user-foo/project-foo#0.0.5\")).toEqual({\n- prefix: \"git@github.com:user-foo/project-foo#\",\n- version: \"0.0.5\",\n- });\n- });\n- });\n-\n- describe(\"parseVersion - with version prefix\", () => {\n- const parser = new GitVersionParser(\"v\");\n-\n- it(\"should work for semver version\", () => {\n- expect(parser.parseVersion(\"0.0.2\")).toEqual({\n- prefix: null,\n- version: \"0.0.2\",\n- });\n-\n- expect(parser.parseVersion(\"~0.0.2\")).toEqual({\n- prefix: null,\n- version: \"~0.0.2\",\n- });\n- });\n-\n- it(\"should work for git url\", () => {\n- expect(parser.parseVersion(\"github:user-foo/project-foo#v0.0.1\")).toEqual({\n- prefix: \"github:user-foo/project-foo#v\",\n- version: \"0.0.1\",\n- });\n-\n- expect(parser.parseVersion(\"git@github.com:user-foo/project-foo#0.0.5\")).toEqual({\n- prefix: null,\n- version: \"git@github.com:user-foo/project-foo#0.0.5\",\n- });\n-\n- expect(parser.parseVersion(\"git@github.com:user-foo/project-foo#v0.0.5\")).toEqual({\n- prefix: \"git@github.com:user-foo/project-foo#v\",\n- version: \"0.0.5\",\n- });\n- });\n- });\n-});\n",
"new_path": null,
"old_path": "test/GitVersionParser.js"
},
{
"change_type": "MODIFY",
"diff": "const VersionSerializer = require(\"../src/VersionSerializer\");\ndescribe(\"VersionSerializer\", () => {\n- let serializer;\n-\n- beforeEach(() => {\n- const parser = {\n- parseVersion(version) {\n- const chunks = version.split(\"#\");\n- return {\n- prefix: chunks.length > 1 ? `${chunks[0]}#` : null,\n- version: chunks.length > 1 ? chunks[1] : version,\n- };\n- },\n- };\n- serializer = new VersionSerializer({\n+ describe(\"deserialize\", () => {\n+ it(\"should not touch versions parser does not recognize\", () => {\n+ const serializer = new VersionSerializer({\nlocalDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n- versionParser: parser,\n+ tagVersionPrefix: \"v\", // default\n});\n- });\n-\n- describe(\"deserialize\", () => {\n- it(\"should use version parser for inter-package dependencies only\", () => {\n- const mockParser = {\n- parseVersion: jest.fn().mockReturnValue({\n- prefix: null,\n- version: \"0.0.1\",\n- }),\n+ const pkg = {\n+ name: \"my-package-1\",\n+ dependencies: {\n+ \"external-dep\": \"^1.0.0\",\n+ \"my-package-2\": \"^1.0.0\",\n+ \"my-package-3\": \"^1.0.0\",\n+ },\n};\n- serializer = new VersionSerializer({\n+ expect(serializer.deserialize(pkg)).toEqual(pkg);\n+ });\n+\n+ it(\"should extract versions recognized by parser\", () => {\n+ const serializer = new VersionSerializer({\nlocalDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n- versionParser: mockParser,\n});\n- const pkg = {\n+ expect(\n+ serializer.deserialize({\nname: \"my-package-1\",\nversion: \"1.0.0\",\ndependencies: {\n- \"my-dependency\": \"^1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#v1.0.0\",\n},\ndevDependencies: {\n- \"my-package-2\": \"^1.0.0\",\n- \"my-package-3\": \"^1.0.0\",\n+ // shorthand\n+ \"my-package-2\": \"github:user/my-package-2#v1.0.0\",\n+ // deprecated \"git scp\"-style\n+ \"my-package-3\": \"git@github.com:user/my-package-3#v1.0.0\",\n},\npeerDependencies: {\n\"my-package-3\": \">=1.0.0\",\n},\n- };\n-\n- serializer.deserialize(pkg);\n- expect(mockParser.parseVersion).toHaveBeenCalledTimes(2);\n- });\n-\n- it(\"should not touch versions parser does not recognize\", () => {\n- const pkg = {\n+ })\n+ ).toEqual({\nname: \"my-package-1\",\n+ version: \"1.0.0\",\ndependencies: {\n- \"my-dependency\": \"^1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#v1.0.0\",\n},\ndevDependencies: {\n- \"my-package-2\": \"^1.0.0\",\n- \"my-package-3\": \"^1.0.0\",\n+ \"my-package-2\": \"1.0.0\",\n+ \"my-package-3\": \"1.0.0\",\n},\npeerDependencies: {\n\"my-package-3\": \">=1.0.0\",\n},\n- };\n+ });\n+ });\n- expect(serializer.deserialize(pkg)).toEqual(pkg);\n+ it(\"supports custom tag version prefix\", () => {\n+ const serializer = new VersionSerializer({\n+ localDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n+ tagVersionPrefix: \"\",\n});\n- it(\"should extract versions recognized by parser\", () => {\nexpect(\nserializer.deserialize({\nname: \"my-package-1\",\nversion: \"1.0.0\",\n- dependencies: {\n- \"my-dependency\": \"dont-touch-this#1.0.0\",\n- },\ndevDependencies: {\n- \"my-package-2\": \"bbb#1.0.0\",\n- \"my-package-3\": \"ccc#1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#1.0.0\",\n+ // shorthand\n+ \"my-package-2\": \"github:user/my-package-2#1.0.0\",\n+ // deprecated \"git scp\"-style\n+ \"my-package-3\": \"git@github.com:user/my-package-3#1.0.0\",\n},\npeerDependencies: {\n\"my-package-3\": \">=1.0.0\",\n@@ -92,10 +84,8 @@ describe(\"VersionSerializer\", () => {\n).toEqual({\nname: \"my-package-1\",\nversion: \"1.0.0\",\n- dependencies: {\n- \"my-dependency\": \"dont-touch-this#1.0.0\",\n- },\ndevDependencies: {\n+ \"external-dep\": \"github:org/external-dep#1.0.0\",\n\"my-package-2\": \"1.0.0\",\n\"my-package-3\": \"1.0.0\",\n},\n@@ -108,13 +98,15 @@ describe(\"VersionSerializer\", () => {\ndescribe(\"serialize\", () => {\nit(\"should not touch versions parser does not recognize\", () => {\n+ const serializer = new VersionSerializer({\n+ localDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n+ tagVersionPrefix: \"v\", // default\n+ });\nconst pkg = {\nname: \"my-package-1\",\nversion: \"1.0.0\",\n- dependencies: {\n- \"my-dependency\": \"^1.0.0\",\n- },\ndevDependencies: {\n+ \"external-dep\": \"^1.0.0\",\n\"my-package-2\": \"^1.0.0\",\n\"my-package-3\": \"^1.0.0\",\n},\n@@ -127,16 +119,22 @@ describe(\"VersionSerializer\", () => {\n});\nit(\"should write back version strings transformed by deserialize\", () => {\n+ const serializer = new VersionSerializer({\n+ localDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n+ });\n+\n// since serializer is stateful, version prefixes will be stored in its state\nserializer.deserialize({\nname: \"my-package-1\",\nversion: \"1.0.0\",\ndependencies: {\n- \"my-dependency\": \"dont-touch-this#1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#v1.0.0\",\n+ // normalized by npm-package-arg\n+ \"my-package-2\": \"git+ssh://git@github.com:user/my-package-2#v1.0.0\",\n},\ndevDependencies: {\n- \"my-package-2\": \"bbb#1.0.0\",\n- \"my-package-3\": \"ccc#1.0.0\",\n+ // default sshurl with optional .git suffix\n+ \"my-package-3\": \"ssh://git@github.com:user/my-package-3.git#v1.0.0\",\n},\npeerDependencies: {\n\"my-package-3\": \">=1.0.0\",\n@@ -149,10 +147,10 @@ describe(\"VersionSerializer\", () => {\nname: \"my-package-1\",\nversion: \"1.0.0\",\ndependencies: {\n- \"my-dependency\": \"dont-touch-this#1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#v1.0.0\",\n+ \"my-package-2\": \"1.0.0\",\n},\ndevDependencies: {\n- \"my-package-2\": \"1.0.0\",\n\"my-package-3\": \"1.0.0\",\n},\npeerDependencies: {\n@@ -163,16 +161,57 @@ describe(\"VersionSerializer\", () => {\nname: \"my-package-1\",\nversion: \"1.0.0\",\ndependencies: {\n- \"my-dependency\": \"dont-touch-this#1.0.0\",\n+ \"external-dep\": \"github:org/external-dep#v1.0.0\",\n+ \"my-package-2\": \"git+ssh://git@github.com:user/my-package-2#v1.0.0\",\n},\ndevDependencies: {\n- \"my-package-2\": \"bbb#1.0.0\",\n- \"my-package-3\": \"ccc#1.0.0\",\n+ \"my-package-3\": \"ssh://git@github.com:user/my-package-3.git#v1.0.0\",\n},\npeerDependencies: {\n\"my-package-3\": \">=1.0.0\",\n},\n});\n});\n+\n+ it(\"supports custom tag version prefix\", () => {\n+ const serializer = new VersionSerializer({\n+ localDependencies: new Set([\"my-package-1\", \"my-package-2\", \"my-package-3\"]),\n+ tagVersionPrefix: \"\",\n+ });\n+\n+ // since serializer is stateful, version prefixes will be stored in its state\n+ serializer.deserialize({\n+ name: \"my-package-1\",\n+ version: \"1.0.0\",\n+ dependencies: {\n+ \"external-dep\": \"github:org/external-dep#1.0.0\",\n+ // normalized by npm-package-arg\n+ \"my-package-2\": \"git+ssh://git@github.com:user/my-package-2#1.0.0\",\n+ // default sshurl with optional .git suffix\n+ \"my-package-3\": \"ssh://git@github.com:user/my-package-3.git#1.0.0\",\n+ },\n+ });\n+\n+ // the preserved prefixes should be written back\n+ expect(\n+ serializer.serialize({\n+ name: \"my-package-1\",\n+ version: \"1.0.0\",\n+ dependencies: {\n+ \"external-dep\": \"github:org/external-dep#1.0.0\",\n+ \"my-package-2\": \"1.0.0\",\n+ \"my-package-3\": \"1.0.0\",\n+ },\n+ })\n+ ).toEqual({\n+ name: \"my-package-1\",\n+ version: \"1.0.0\",\n+ dependencies: {\n+ \"external-dep\": \"github:org/external-dep#1.0.0\",\n+ \"my-package-2\": \"git+ssh://git@github.com:user/my-package-2#1.0.0\",\n+ \"my-package-3\": \"ssh://git@github.com:user/my-package-3.git#1.0.0\",\n+ },\n+ });\n+ });\n});\n});\n",
"new_path": "test/VersionSerializer.js",
"old_path": "test/VersionSerializer.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(VersionSerializer): inline parser, use npm-package-arg
GitVersionParser is now just an instance method of VersionSerializer,
with full coverage and more robust npm-package-arg usage.
| 1
|
refactor
|
VersionSerializer
|
807,849
|
19.02.2018 13:04:38
| 28,800
|
6efe31b0c6e971a4959b8d17013de23e6dfa3ee8
|
refactor(LsCommand): use command lifecycle, instance methods
|
[
{
"change_type": "MODIFY",
"diff": "@@ -36,36 +36,59 @@ class LsCommand extends Command {\n}\ninitialize(callback) {\n- // Nothing to do...\n- callback(null, true);\n- }\n-\n- execute(callback) {\n- const formattedPackages = this.filteredPackages.map(pkg => ({\n+ this.resultList = this.filteredPackages.map(pkg => ({\nname: pkg.name,\nversion: pkg.version,\nprivate: pkg.private,\n}));\n+ callback(null, true);\n+ }\n+\n+ execute(callback) {\n+ let result;\n+\nif (this.options.json) {\n- output(JSON.stringify(formattedPackages, null, 2));\n+ result = this.formatJSON();\n} else {\n- formattedPackages.forEach(pkg => {\n- pkg.version = pkg.version ? chalk.grey(`v${pkg.version}`) : chalk.yellow(\"MISSING\");\n- pkg.private = pkg.private ? `(${chalk.red(\"private\")})` : \"\";\n+ result = this.formatColumns();\n+ }\n+\n+ output(result);\n+\n+ callback(null, true);\n+ }\n+\n+ formatJSON() {\n+ return JSON.stringify(this.resultList, null, 2);\n+ }\n+\n+ formatColumns() {\n+ const formattedResults = this.resultList.map(result => {\n+ const formatted = {\n+ name: result.name,\n+ };\n+\n+ if (result.version) {\n+ formatted.version = chalk.grey(`v${result.version}`);\n+ } else {\n+ formatted.version = chalk.yellow(\"MISSING\");\n+ }\n+\n+ if (result.private) {\n+ formatted.private = `(${chalk.red(\"private\")})`;\n+ }\n+\n+ return formatted;\n});\n- output(\n- columnify(formattedPackages, {\n+\n+ return columnify(formattedResults, {\nshowHeaders: false,\nconfig: {\nversion: {\nalign: \"right\",\n},\n},\n- })\n- );\n- }\n-\n- callback(null, true);\n+ });\n}\n}\n",
"new_path": "src/commands/LsCommand.js",
"old_path": "src/commands/LsCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(LsCommand): use command lifecycle, instance methods
| 1
|
refactor
|
LsCommand
|
807,849
|
19.02.2018 13:06:49
| 28,800
|
407cae62cfbc5eddb10eca3095c112bea3b0a976
|
fix(LsCommand): add 'list' alias
|
[
{
"change_type": "MODIFY",
"diff": "@@ -13,7 +13,9 @@ exports.handler = function handler(argv) {\nexports.command = \"ls\";\n-exports.describe = \"List all public packages\";\n+exports.aliases = [\"list\"];\n+\n+exports.describe = \"List local packages\";\nexports.builder = {\njson: {\n",
"new_path": "src/commands/LsCommand.js",
"old_path": "src/commands/LsCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
fix(LsCommand): add 'list' alias
| 1
|
fix
|
LsCommand
|
679,913
|
19.02.2018 13:32:30
| 0
|
511c4980a38b82e86c1351132d33147f1160ef4b
|
feat(hiccup-dom-components): initial import
|
[
{
"change_type": "ADD",
"diff": "+build\n+coverage\n+dev\n+doc\n+src*\n+test\n+.nyc_output\n+tsconfig.json\n+*.tgz\n+*.html\n",
"new_path": "packages/hiccup-dom-components/.npmignore",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+ Apache License\n+ Version 2.0, January 2004\n+ http://www.apache.org/licenses/\n+\n+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n+\n+ 1. Definitions.\n+\n+ \"License\" shall mean the terms and conditions for use, reproduction,\n+ and distribution as defined by Sections 1 through 9 of this document.\n+\n+ \"Licensor\" shall mean the copyright owner or entity authorized by\n+ the copyright owner that is granting the License.\n+\n+ \"Legal Entity\" shall mean the union of the acting entity and all\n+ other entities that control, are controlled by, or are under common\n+ control with that entity. For the purposes of this definition,\n+ \"control\" means (i) the power, direct or indirect, to cause the\n+ direction or management of such entity, whether by contract or\n+ otherwise, or (ii) ownership of fifty percent (50%) or more of the\n+ outstanding shares, or (iii) beneficial ownership of such entity.\n+\n+ \"You\" (or \"Your\") shall mean an individual or Legal Entity\n+ exercising permissions granted by this License.\n+\n+ \"Source\" form shall mean the preferred form for making modifications,\n+ including but not limited to software source code, documentation\n+ source, and configuration files.\n+\n+ \"Object\" form shall mean any form resulting from mechanical\n+ transformation or translation of a Source form, including but\n+ not limited to compiled object code, generated documentation,\n+ and conversions to other media types.\n+\n+ \"Work\" shall mean the work of authorship, whether in Source or\n+ Object form, made available under the License, as indicated by a\n+ copyright notice that is included in or attached to the work\n+ (an example is provided in the Appendix below).\n+\n+ \"Derivative Works\" shall mean any work, whether in Source or Object\n+ form, that is based on (or derived from) the Work and for which the\n+ editorial revisions, annotations, elaborations, or other modifications\n+ represent, as a whole, an original work of authorship. For the purposes\n+ of this License, Derivative Works shall not include works that remain\n+ separable from, or merely link (or bind by name) to the interfaces of,\n+ the Work and Derivative Works thereof.\n+\n+ \"Contribution\" shall mean any work of authorship, including\n+ the original version of the Work and any modifications or additions\n+ to that Work or Derivative Works thereof, that is intentionally\n+ submitted to Licensor for inclusion in the Work by the copyright owner\n+ or by an individual or Legal Entity authorized to submit on behalf of\n+ the copyright owner. For the purposes of this definition, \"submitted\"\n+ means any form of electronic, verbal, or written communication sent\n+ to the Licensor or its representatives, including but not limited to\n+ communication on electronic mailing lists, source code control systems,\n+ and issue tracking systems that are managed by, or on behalf of, the\n+ Licensor for the purpose of discussing and improving the Work, but\n+ excluding communication that is conspicuously marked or otherwise\n+ designated in writing by the copyright owner as \"Not a Contribution.\"\n+\n+ \"Contributor\" shall mean Licensor and any individual or Legal Entity\n+ on behalf of whom a Contribution has been received by Licensor and\n+ subsequently incorporated within the Work.\n+\n+ 2. Grant of Copyright License. Subject to the terms and conditions of\n+ this License, each Contributor hereby grants to You a perpetual,\n+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n+ copyright license to reproduce, prepare Derivative Works of,\n+ publicly display, publicly perform, sublicense, and distribute the\n+ Work and such Derivative Works in Source or Object form.\n+\n+ 3. Grant of Patent License. Subject to the terms and conditions of\n+ this License, each Contributor hereby grants to You a perpetual,\n+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n+ (except as stated in this section) patent license to make, have made,\n+ use, offer to sell, sell, import, and otherwise transfer the Work,\n+ where such license applies only to those patent claims licensable\n+ by such Contributor that are necessarily infringed by their\n+ Contribution(s) alone or by combination of their Contribution(s)\n+ with the Work to which such Contribution(s) was submitted. If You\n+ institute patent litigation against any entity (including a\n+ cross-claim or counterclaim in a lawsuit) alleging that the Work\n+ or a Contribution incorporated within the Work constitutes direct\n+ or contributory patent infringement, then any patent licenses\n+ granted to You under this License for that Work shall terminate\n+ as of the date such litigation is filed.\n+\n+ 4. Redistribution. You may reproduce and distribute copies of the\n+ Work or Derivative Works thereof in any medium, with or without\n+ modifications, and in Source or Object form, provided that You\n+ meet the following conditions:\n+\n+ (a) You must give any other recipients of the Work or\n+ Derivative Works a copy of this License; and\n+\n+ (b) You must cause any modified files to carry prominent notices\n+ stating that You changed the files; and\n+\n+ (c) You must retain, in the Source form of any Derivative Works\n+ that You distribute, all copyright, patent, trademark, and\n+ attribution notices from the Source form of the Work,\n+ excluding those notices that do not pertain to any part of\n+ the Derivative Works; and\n+\n+ (d) If the Work includes a \"NOTICE\" text file as part of its\n+ distribution, then any Derivative Works that You distribute must\n+ include a readable copy of the attribution notices contained\n+ within such NOTICE file, excluding those notices that do not\n+ pertain to any part of the Derivative Works, in at least one\n+ of the following places: within a NOTICE text file distributed\n+ as part of the Derivative Works; within the Source form or\n+ documentation, if provided along with the Derivative Works; or,\n+ within a display generated by the Derivative Works, if and\n+ wherever such third-party notices normally appear. The contents\n+ of the NOTICE file are for informational purposes only and\n+ do not modify the License. You may add Your own attribution\n+ notices within Derivative Works that You distribute, alongside\n+ or as an addendum to the NOTICE text from the Work, provided\n+ that such additional attribution notices cannot be construed\n+ as modifying the License.\n+\n+ You may add Your own copyright statement to Your modifications and\n+ may provide additional or different license terms and conditions\n+ for use, reproduction, or distribution of Your modifications, or\n+ for any such Derivative Works as a whole, provided Your use,\n+ reproduction, and distribution of the Work otherwise complies with\n+ the conditions stated in this License.\n+\n+ 5. Submission of Contributions. Unless You explicitly state otherwise,\n+ any Contribution intentionally submitted for inclusion in the Work\n+ by You to the Licensor shall be under the terms and conditions of\n+ this License, without any additional terms or conditions.\n+ Notwithstanding the above, nothing herein shall supersede or modify\n+ the terms of any separate license agreement you may have executed\n+ with Licensor regarding such Contributions.\n+\n+ 6. Trademarks. This License does not grant permission to use the trade\n+ names, trademarks, service marks, or product names of the Licensor,\n+ except as required for reasonable and customary use in describing the\n+ origin of the Work and reproducing the content of the NOTICE file.\n+\n+ 7. Disclaimer of Warranty. Unless required by applicable law or\n+ agreed to in writing, Licensor provides the Work (and each\n+ Contributor provides its Contributions) on an \"AS IS\" BASIS,\n+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n+ implied, including, without limitation, any warranties or conditions\n+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n+ PARTICULAR PURPOSE. You are solely responsible for determining the\n+ appropriateness of using or redistributing the Work and assume any\n+ risks associated with Your exercise of permissions under this License.\n+\n+ 8. Limitation of Liability. In no event and under no legal theory,\n+ whether in tort (including negligence), contract, or otherwise,\n+ unless required by applicable law (such as deliberate and grossly\n+ negligent acts) or agreed to in writing, shall any Contributor be\n+ liable to You for damages, including any direct, indirect, special,\n+ incidental, or consequential damages of any character arising as a\n+ result of this License or out of the use or inability to use the\n+ Work (including but not limited to damages for loss of goodwill,\n+ work stoppage, computer failure or malfunction, or any and all\n+ other commercial damages or losses), even if such Contributor\n+ has been advised of the possibility of such damages.\n+\n+ 9. Accepting Warranty or Additional Liability. While redistributing\n+ the Work or Derivative Works thereof, You may choose to offer,\n+ and charge a fee for, acceptance of support, warranty, indemnity,\n+ or other liability obligations and/or rights consistent with this\n+ License. However, in accepting such obligations, You may act only\n+ on Your own behalf and on Your sole responsibility, not on behalf\n+ of any other Contributor, and only if You agree to indemnify,\n+ defend, and hold each Contributor harmless for any liability\n+ incurred by, or claims asserted against, such Contributor by reason\n+ of your accepting any such warranty or additional liability.\n+\n+ END OF TERMS AND CONDITIONS\n+\n+ APPENDIX: How to apply the Apache License to your work.\n+\n+ To apply the Apache License to your work, attach the following\n+ boilerplate notice, with the fields enclosed by brackets \"{}\"\n+ replaced with your own identifying information. (Don't include\n+ the brackets!) The text should be enclosed in the appropriate\n+ comment syntax for the file format. We also recommend that a\n+ file or class name and description of purpose be included on the\n+ same \"printed page\" as the copyright notice for easier\n+ identification within third-party archives.\n+\n+ Copyright {yyyy} {name of copyright owner}\n+\n+ Licensed under the Apache License, Version 2.0 (the \"License\");\n+ you may not use this file except in compliance with the License.\n+ You may obtain a copy of the License at\n+\n+ http://www.apache.org/licenses/LICENSE-2.0\n+\n+ Unless required by applicable law or agreed to in writing, software\n+ distributed under the License is distributed on an \"AS IS\" BASIS,\n+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ See the License for the specific language governing permissions and\n+ limitations under the License.\n",
"new_path": "packages/hiccup-dom-components/LICENSE",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+# @thi.ng/hiccup-dom-components\n+\n+[](https://www.npmjs.com/package/@thi.ng/hiccup-dom-components)\n+\n+## About\n+\n+A growing collection of unstyled, re-usable & customizable components for use\n+with\n+[@thi.ng/hiccup-dom](https://github.com/thi-ng/umbrella/tree/master/packages/hiccup-dom)\n+&\n+[@thi.ng/hiccup](https://github.com/thi-ng/umbrella/tree/master/packages/hiccup).\n+\n+**Status:** Pre-alpha\n+\n+## Installation\n+\n+```\n+yarn add @thi.ng/hiccup-dom-components\n+```\n+\n+## Usage examples\n+\n+```typescript\n+import * as hdc from \"@thi.ng/hiccup-dom-components\";\n+```\n+\n+### Canvas\n+\n+- [Canvas types](./src/canvas.ts)\n+\n+### Form elements\n+\n+- [Dropdown](./src/dropdown.ts)\n+\n+### Links\n+\n+- [Link types](./src/link.ts)\n+\n+### SVG\n+\n+- [SVG elements](./src/svg.ts)\n+\n+## Authors\n+\n+- Karsten Schmidt\n+\n+## License\n+\n+© 2018 Karsten Schmidt // Apache Software License 2.0\n",
"new_path": "packages/hiccup-dom-components/README.md",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+{\n+ \"name\": \"@thi.ng/hiccup-dom-components\",\n+ \"version\": \"0.0.1\",\n+ \"description\": \"TODO\",\n+ \"main\": \"./index.js\",\n+ \"typings\": \"./index.d.ts\",\n+ \"repository\": \"https://github.com/thi-ng/umbrella\",\n+ \"author\": \"Karsten Schmidt <k+npm@thi.ng>\",\n+ \"license\": \"Apache-2.0\",\n+ \"scripts\": {\n+ \"build\": \"yarn run clean && tsc --declaration\",\n+ \"clean\": \"rm -rf *.js *.d.ts build doc\",\n+ \"doc\": \"node_modules/.bin/typedoc --mode modules --out doc src\",\n+ \"pub\": \"yarn run build && yarn publish --access public\",\n+ \"test\": \"rm -rf build && tsc -p test && mocha build/test/*.js\"\n+ },\n+ \"devDependencies\": {\n+ \"@types/mocha\": \"^2.2.48\",\n+ \"@types/node\": \"^9.4.6\",\n+ \"mocha\": \"^5.0.1\",\n+ \"ts-loader\": \"^3.5.0\",\n+ \"typedoc\": \"^0.10.0\",\n+ \"typescript\": \"^2.7.2\",\n+ \"webpack\": \"^3.11.0\"\n+ },\n+ \"dependencies\": {},\n+ \"keywords\": [\n+ \"ES6\",\n+ \"typescript\"\n+ ],\n+ \"publishConfig\": {\n+ \"access\": \"public\"\n+ }\n+}\n\\ No newline at end of file\n",
"new_path": "packages/hiccup-dom-components/package.json",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+export interface CanvasOpts {\n+ width: number;\n+ height: number;\n+ [id: string]: any;\n+}\n+\n+const _canvas = (type, init, update, attribs, opts) => {\n+ let ctx;\n+ let frame = 0;\n+ return [{\n+ init(el: HTMLCanvasElement) {\n+ ctx = el.getContext(type, opts);\n+ init(ctx);\n+ },\n+ render() {\n+ ctx && update(ctx, frame++);\n+ return [\"canvas\", attribs]\n+ }\n+ }];\n+};\n+\n+export const canvasWebGL = (\n+ init: (gl: WebGLRenderingContext) => void,\n+ update: (gl: WebGLRenderingContext, frame: number) => void,\n+ attribs: CanvasOpts,\n+ glopts?: WebGLContextAttributes) =>\n+ _canvas(\"webgl\", init, update, attribs, glopts);\n+\n+export const canvas2D = (\n+ init: (gl: CanvasRenderingContext2D) => void,\n+ update: (gl: CanvasRenderingContext2D, frame: number) => void,\n+ attribs: CanvasOpts,\n+ ctxopts?: Canvas2DContextAttributes) =>\n+ _canvas(\"2d\", init, update, attribs, ctxopts);\n",
"new_path": "packages/hiccup-dom-components/src/canvas.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+export interface DropDownOption extends Array<any> {\n+ [0]: string | number;\n+ [1]: string;\n+ [2]?: boolean\n+};\n+\n+export interface DropDownOptionGroup extends Array<any> {\n+ [0]: { label?: string, [id: string]: any },\n+ [1]: DropDownOption[];\n+}\n+\n+export const option = ([value, label, disabled]: DropDownOption, sel: string | number) =>\n+ [\n+ \"option\",\n+ { value, disabled: !!disabled, selected: value === sel },\n+ label\n+ ];\n+\n+export const optgroup = (attribs: any, options: DropDownOption[], sel?: string | number) =>\n+ [\n+ \"optgroup\",\n+ (attribs.label = attribs.label || \"--\", attribs),\n+ ...options.map((o) => option(o, sel))\n+ ];\n+\n+export const dropdown = (attribs: any, options: DropDownOption[], sel?: string | number) =>\n+ [\n+ \"select\",\n+ attribs,\n+ ...options.map((o) => option(o, sel))\n+ ];\n+\n+export const groupedDropdown = (attribs: any, groups: DropDownOptionGroup[], sel?: string | number) =>\n+ [\n+ \"select\",\n+ attribs,\n+ ...groups.map((o) => optgroup(o[0], o[1], sel))\n+ ];\n",
"new_path": "packages/hiccup-dom-components/src/dropdown.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+export * from \"./canvas\";\n+export * from \"./dropdown\";\n+export * from \"./link\";\n+export * from \"./svg\";\n",
"new_path": "packages/hiccup-dom-components/src/index.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+import { isString } from \"@thi.ng/checks/is-string\";\n+\n+export const link = (attribs: any, body: any) =>\n+ [\"a\", isString(attribs) ? { href: attribs } : attribs, body];\n+\n+export const appLink = (attribs: any, body: any, onclick: (e: Event) => void) =>\n+ link(\n+ {\n+ ...attribs,\n+ href: \"#\",\n+ onclick: (e) => { e.preventDefault(); onclick(e); }\n+ },\n+ body);\n",
"new_path": "packages/hiccup-dom-components/src/link.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+import { SVG_NS } from \"@thi.ng/hiccup/api\";\n+\n+let PRECISION = 2;\n+\n+export const setPrecision = (n: number) => (PRECISION = n);\n+\n+export const svgdoc = (attribs, ...body) => [\n+ \"svg\",\n+ Object.assign(attribs, { xmlns: SVG_NS }),\n+ ...body\n+];\n+\n+export const ff = (x: number) => x.toFixed(PRECISION);\n+export const point = (p: number[]) => ff(p[0]) + \",\" + ff(p[1]);\n+\n+export const defs = (...defs) => [\"defs\", ...defs];\n+\n+export const circle = (p: number[], r = 1, attr?) =>\n+ [\n+ \"circle\",\n+ Object.assign({\n+ cx: ff(p[0]),\n+ cy: ff(p[1]),\n+ r: ff(r),\n+ }, attr)\n+ ];\n+\n+export const rect = (p: number[], width = 1, height = 1, attr?) =>\n+ [\n+ \"rect\",\n+ Object.assign({\n+ x: ff(p[0]),\n+ y: ff(p[1]),\n+ width: ff(width),\n+ height: ff(height),\n+ }, attr)\n+ ];\n+\n+export const polyline = (points: number[][], attr?) =>\n+ [\n+ \"polyline\",\n+ Object.assign({ points: points.map(point).join(\" \") }, attr)\n+ ];\n+\n+export const polygon = (points: number[][], attr?) =>\n+ [\n+ \"polygon\",\n+ Object.assign({ points: points.map(point).join(\" \") }, attr)\n+ ];\n+\n+export const text = (body: string, p: number[], attr?) =>\n+ [\"text\",\n+ {\n+ x: ff(p[0]),\n+ y: ff(p[1]),\n+ ...attr\n+ },\n+ body\n+ ];\n",
"new_path": "packages/hiccup-dom-components/src/svg.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+// import * as assert from \"assert\";\n+// import * as hc from \"../src/index\";\n+\n+describe(\"hiccup-dom-components\", () => {\n+ it(\"tests pending\");\n+});\n",
"new_path": "packages/hiccup-dom-components/test/index.ts",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+{\n+ \"extends\": \"../../../tsconfig.json\",\n+ \"compilerOptions\": {\n+ \"outDir\": \"../build\"\n+ },\n+ \"include\": [\n+ \"./**/*.ts\",\n+ \"../src/**/*.ts\"\n+ ]\n+}\n",
"new_path": "packages/hiccup-dom-components/test/tsconfig.json",
"old_path": null
},
{
"change_type": "ADD",
"diff": "+{\n+ \"extends\": \"../../tsconfig.json\",\n+ \"compilerOptions\": {\n+ \"outDir\": \".\"\n+ },\n+ \"include\": [\n+ \"./src/**/*.ts\"\n+ ]\n+}\n",
"new_path": "packages/hiccup-dom-components/tsconfig.json",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(hiccup-dom-components): initial import
| 1
|
feat
|
hiccup-dom-components
|
679,913
|
19.02.2018 13:34:20
| 0
|
2394396e490b41e7a99c6b19a94e3a2f16815dc8
|
feat(examples): add CA presets, refactor rule handling
|
[
{
"change_type": "MODIFY",
"diff": "\"dependencies\": {\n\"@thi.ng/api\": \"^2.0.3\",\n\"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/hiccup-dom-components\": \"^0.0.1\",\n\"@thi.ng/rstream\": \"^1.0.0\",\n\"@thi.ng/transducers\": \"^1.2.0\"\n}\n",
"new_path": "examples/cellular-automata/package.json",
"old_path": "examples/cellular-automata/package.json"
},
{
"change_type": "MODIFY",
"diff": "import { start } from \"@thi.ng/hiccup-dom\";\n+import { dropdown, DropDownOption } from \"@thi.ng/hiccup-dom-components/dropdown\";\nimport { transduce } from \"@thi.ng/transducers/transduce\";\nimport { comp } from \"@thi.ng/transducers/func/comp\";\n@@ -15,21 +16,46 @@ import { partition } from \"@thi.ng/transducers/xform/partition\";\nconst W = 128;\nconst H = 48;\n+const presets: DropDownOption[] = [\n+ [\"\", \"custom\"],\n+ [\"000100000-001100000\", \"conway\"],\n+ [\"000100000-001110000\", \"maze #1\"],\n+ [\"000111111-000001111\", \"maze #2\"],\n+ [\"000001111-111111110\", \"dots\"],\n+ [\"000101111-000001111\", \"growth\"],\n+ [\"000001011-001011111\", \"organic\"],\n+ [\"000010011-000011111\", \"angular\"],\n+];\n+\nlet grid;\nlet rules;\n// 3x3 convolution kernel (Moore neighborhood)\nconst kernel = buildKernel2d([1, 1, 1, 1, 0, 1, 1, 1, 1], 3, 3);\n-// parse rules from location hash (2 groups of 9 bits each)\n-// (essentially this is a compressed finite state machine)\n-if (location.hash.length === 20) {\n- rules = transduce(comp(map((x: string) => parseInt(x, 2)), bits(9), partition(9)), push(), location.hash.substr(1).split(\"-\"));\n-} else {\n- // Conway CA default state rules [[dead], [alive]]\n- rules = [[0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0]];\n+const setHash = () => (location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\"));\n+\n+// parse rules from string (e.g. location hash): 2 groups of 9 bits each\n+// (essentially these rules are a compressed finite state machine)\n+const parseRules = (raw) =>\n+ transduce(\n+ comp(\n+ map((x: string) => parseInt(x, 2)),\n+ bits(9),\n+ partition(9)\n+ ),\n+ push(),\n+ raw.split(\"-\")\n+ );\n+\n+const applyRules = (raw) => {\n+ if (raw.length === 19) {\n+ rules = parseRules(raw);\n+ randomizeGrid();\n+ setHash();\n}\n+};\n-const setHash = () => (location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\"));\n+// create random bit sequence w/ ones appearing in given probability\nconst randomSeq = (num, prob = 0.5) => [...repeatedly(() => Math.random() < prob ? 1 : 0, num)];\nconst randomizeGrid = (prob = 0.5) => (grid = randomSeq(W * H, prob));\nconst randomizeRules = () => {\n@@ -79,16 +105,19 @@ const ruleBoxes = (prefix, i) =>\n...rules[i].map((rule, j) => checkbox(rule, (e) => setRule(i, j, e.target.checked))),\n];\n-// seed grid with noise\n-randomizeGrid();\n+// Use Conway CA default state rules [[dead], [alive]] if no preset present in hash\n+applyRules(location.hash.length === 20 ? location.hash.substr(1) : presets[1][0]);\n// define & start main app component\nstart(\"app\", () => {\nreturn [\"div\",\nruleBoxes(\"birth\", 0),\nruleBoxes(\"survive\", 1),\n- [\"div\", [\"button\", { onclick: () => randomizeRules() }, \"randomize rules\"]],\n- [\"div\", [\"button\", { onclick: () => randomizeGrid() }, \"reset grid\"]],\n+ [\"div\",\n+ [\"button\", { onclick: () => randomizeRules() }, \"randomize rules\"],\n+ [\"button\", { onclick: () => randomizeGrid() }, \"reset grid\"],\n+ dropdown({ onchange: (e) => applyRules(e.target.value) }, presets, location.hash.substr(1))\n+ ],\n[\"pre\", format(grid = convolve(grid))]\n];\n});\n",
"new_path": "examples/cellular-automata/src/index.ts",
"old_path": "examples/cellular-automata/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(examples): add CA presets, refactor rule handling
| 1
|
feat
|
examples
|
679,913
|
19.02.2018 13:44:20
| 0
|
ac831234786db57a3e49fd19b6fccf35667ee82b
|
build(hiccup-dom-components): add missing deps
|
[
{
"change_type": "MODIFY",
"diff": "\"typescript\": \"^2.7.2\",\n\"webpack\": \"^3.11.0\"\n},\n- \"dependencies\": {},\n+ \"dependencies\": {\n+ \"@thi.ng/checks\": \"^1.2.1\",\n+ \"@thi.ng/hiccup\": \"^1.0.2\"\n+ },\n\"keywords\": [\n\"ES6\",\n\"typescript\"\n",
"new_path": "packages/hiccup-dom-components/package.json",
"old_path": "packages/hiccup-dom-components/package.json"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
build(hiccup-dom-components): add missing deps
| 1
|
build
|
hiccup-dom-components
|
679,913
|
19.02.2018 13:49:26
| 0
|
3627087baec94553412e112ece15c0113dd2e349
|
docs: add hiccup-dom-components to main readme
|
[
{
"change_type": "MODIFY",
"diff": "@@ -31,6 +31,7 @@ All packages are:\n| [`@thi.ng/diff`](./packages/diff) | [](https://www.npmjs.com/package/@thi.ng/diff) | [changelog](./packages/diff/CHANGELOG.md) |\n| [`@thi.ng/hiccup`](./packages/hiccup) | [](https://www.npmjs.com/package/@thi.ng/hiccup) | [changelog](./packages/hiccup/CHANGELOG.md) |\n| [`@thi.ng/hiccup-dom`](./packages/hiccup-dom) | [](https://www.npmjs.com/package/@thi.ng/hiccup-dom) | [changelog](./packages/hiccup-dom/CHANGELOG.md) |\n+| [`@thi.ng/hiccup-dom-components`](./packages/hiccup-dom-components) | [](https://www.npmjs.com/package/@thi.ng/hiccup-dom-components) | [changelog](./packages/hiccup-dom-components/CHANGELOG.md) |\n| [`@thi.ng/iterators`](./packages/iterators) | [](https://www.npmjs.com/package/@thi.ng/iterators) | [changelog](./packages/iterators/CHANGELOG.md) |\n| [`@thi.ng/rle-pack`](./packages/rle-pack) | [](https://www.npmjs.com/package/@thi.ng/rle-pack) | [changelog](./packages/rle-pack/CHANGELOG.md) |\n| [`@thi.ng/rstream`](./packages/rstream) | [](https://www.npmjs.com/package/@thi.ng/rstream) | [changelog](./packages/rstream/CHANGELOG.md) |\n",
"new_path": "README.md",
"old_path": "README.md"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
docs: add hiccup-dom-components to main readme
| 1
|
docs
| null |
679,913
|
19.02.2018 13:54:58
| 0
|
9c8e877ac7069da73a68307e120a30444830f523
|
build(examples): update CA deps
|
[
{
"change_type": "MODIFY",
"diff": "\"dependencies\": {\n\"@thi.ng/api\": \"^2.0.3\",\n\"@thi.ng/hiccup-dom\": \"^1.0.5\",\n- \"@thi.ng/hiccup-dom-components\": \"^0.0.1\",\n- \"@thi.ng/rstream\": \"^1.0.0\",\n+ \"@thi.ng/hiccup-dom-components\": \"^0.1.0\",\n\"@thi.ng/transducers\": \"^1.2.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/cellular-automata/package.json",
"old_path": "examples/cellular-automata/package.json"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
build(examples): update CA deps
| 1
|
build
|
examples
|
679,913
|
19.02.2018 14:04:13
| 0
|
77c95322ebeea91ff7ec56905e8d807ddd44e6ae
|
refactor(examples): update webgl demo
re-use canvas component from hiccup-dom-components
|
[
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/hiccup-dom-components\": \"^0.1.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/webgl/package.json",
"old_path": "examples/webgl/package.json"
},
{
"change_type": "MODIFY",
"diff": "import { start } from \"@thi.ng/hiccup-dom\";\n-\n-// reusable GL canvas component\n-const glcanvas = (init, update, attribs) => {\n- let gl: WebGLRenderingContext;\n- let frame = 0;\n- return [{\n- init(el: HTMLCanvasElement) {\n- gl = el.getContext(\"webgl\");\n- init(gl);\n- },\n- render() {\n- gl && update(gl, frame++);\n- return [\"canvas\", attribs]\n- }\n- }];\n-};\n+import { canvasWebGL } from \"@thi.ng/hiccup-dom-components/canvas\";\n// canvas init hook\nconst initGL = (_: WebGLRenderingContext) => {\n@@ -37,7 +22,7 @@ start(\ndocument.getElementById(\"app\"),\n// instantiate multiple canvases w/ different configs\n[\"div\",\n- glcanvas(initGL, updateGL(0, 0.01), { width: 100, height: 100 }),\n- glcanvas(initGL, updateGL(200, 0.025), { width: 100, height: 100 }),\n- glcanvas(initGL, updateGL(400, 0.05), { width: 100, height: 100 })]\n+ canvasWebGL(initGL, updateGL(0, 0.01), { width: 100, height: 100 }),\n+ canvasWebGL(initGL, updateGL(200, 0.025), { width: 100, height: 100 }),\n+ canvasWebGL(initGL, updateGL(400, 0.05), { width: 100, height: 100 })]\n);\n",
"new_path": "examples/webgl/src/index.ts",
"old_path": "examples/webgl/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(examples): update webgl demo
- re-use canvas component from hiccup-dom-components
| 1
|
refactor
|
examples
|
815,745
|
19.02.2018 14:23:03
| -7,200
|
d0295fa94f62c08f810b236113f374998fd73055
|
feat(tags): support promise while creating new tag
* feat(tags): support promise while creating new tag
closes
|
[
{
"change_type": "MODIFY",
"diff": "@@ -119,27 +119,27 @@ map: {\n## API\n| Input | Type | Default | Required | Description |\n| ------------- | ------------- | ------------- | ------------- | ------------- |\n-| [items] | Array<NgOption> | `[]` | yes | Items array |\n-| bindLabel | string | `label` | no | Object property to use for label. Default `label` |\n-| bindValue | string | `-` | no | Object property to use for selected model. By default binds to whole object. |\n-| [clearable] | boolean | `true` | no | Allow to clear selected value. Default `true`|\n-| [markFirst] | boolean | `true` | no | Marks first item as focused when opening/filtering. Default `true`|\n-| [searchable] | boolean | `true` | no | Allow to search for value. Default `true`|\n-| multiple | boolean | `false` | no | Allows to select multiple items. |\n-| maxSelectedItems | number | none | no | When multiple = true, allows to set a limit number of selection. |\n-| [addTag] | Function or boolean | `false` | no | Allows to create custom options. Using boolean simply adds tag with value as bindLabel. If you want custom properties add function which returns object. |\n-| placeholder | string | `-` | no | Placeholder text. |\n-| notFoundText | string | `No items found` | no | Set custom text when filter returns empty result |\n-| typeToSearchText | string | `Type to search` | no | Set custom text when using Typeahead |\n-| clearAllText | string | `Clear all` | no | Set custom text for clear all icon title |\n-| addTagText | string | `Add item` | no | Set custom text when using tagging |\n-| loadingText | string | `Loading...` | no | Set custom text when for loading items |\n-| [typeahead] | Subject | `-` | no | Custom autocomplete or filter. |\n-| [disableVirtualScroll] | boolean | false | no | Disable virtual scroll |\n+| [items] | `Array<NgOption>` | `[]` | yes | Items array |\n+| bindLabel | `string` | `label` | no | Object property to use for label. Default `label` |\n+| bindValue | `string` | `-` | no | Object property to use for selected model. By default binds to whole object. |\n+| [clearable] | `boolean` | `true` | no | Allow to clear selected value. Default `true`|\n+| [markFirst] | `boolean` | `true` | no | Marks first item as focused when opening/filtering. Default `true`|\n+| [searchable] | `boolean` | `true` | no | Allow to search for value. Default `true`|\n+| multiple | `boolean` | `false` | no | Allows to select multiple items. |\n+| maxSelectedItems | `number` | none | no | When multiple = true, allows to set a limit number of selection. |\n+| [addTag] | `boolean | ((term: string) => any | Promise<any>)` | `false` | no | Allows to create custom options. Using boolean simply adds tag with value as bindLabel. If you want custom properties add function which returns object or Promise. |\n+| placeholder | `string` | `-` | no | Placeholder text. |\n+| notFoundText | `string` | `No items found` | no | Set custom text when filter returns empty result |\n+| typeToSearchText | `string` | `Type to search` | no | Set custom text when using Typeahead |\n+| clearAllText | `string` | `Clear all` | no | Set custom text for clear all icon title |\n+| addTagText | `string` | `Add item` | no | Set custom text when using tagging |\n+| loadingText | `string` | `Loading...` | no | Set custom text when for loading items |\n+| [typeahead] | `Subject` | `-` | no | Custom autocomplete or filter. |\n+| [disableVirtualScroll] | `boolean` | false | no | Disable virtual scroll |\n| dropdownPosition | `bottom`,`top`,`auto` | `bottom` | no | Set the dropdown position on open |\n-| appendTo | string | null | no | Append drodown to body or any other element using css selector |\n-| loading | boolean | `-` | no | you can set the loading state from the outside (e.g. async items loading) |\n-| closeOnSelect | boolean | true | no | whether to close the menu when a value is selected |\n+| appendTo | `string` | null | no | Append drodown to body or any other element using css selector |\n+| loading | `boolean` | `-` | no | you can set the loading state from the outside (e.g. async items loading) |\n+| closeOnSelect | `boolean` | true | no | whether to close the menu when a value is selected |\n| Output | Description |\n| ------------- | ------------- |\n",
"new_path": "README.md",
"old_path": "README.md"
},
{
"change_type": "MODIFY",
"diff": "@@ -1012,6 +1012,26 @@ describe('NgSelectComponent', function () {\nid: 'custom tag', name: 'custom tag', custom: true\n}));\n}));\n+\n+ it('should select custom tag with promise', fakeAsync(() => {\n+ let fixture = createTestingModule(\n+ NgSelectBasicTestCmp,\n+ `<ng-select [items]=\"cities\"\n+ bindLabel=\"name\"\n+ [addTag]=\"tagFuncPromise\"\n+ placeholder=\"select value\"\n+ [(ngModel)]=\"selectedCity\">\n+ </ng-select>`);\n+\n+ tickAndDetectChanges(fixture);\n+ fixture.componentInstance.select.onFilter('server side tag');\n+ tickAndDetectChanges(fixture);\n+ triggerKeyDownEvent(getNgSelectElement(fixture), KeyCode.Enter);\n+ tick();\n+ expect(<any>fixture.componentInstance.selectedCity).toEqual(jasmine.objectContaining({\n+ id: 5, name: 'server side tag', valid: true\n+ }));\n+ }));\n});\ndescribe('Placeholder', () => {\n@@ -1514,6 +1534,11 @@ class NgSelectBasicTestCmp {\ntagFunc(term) {\nreturn { id: term, name: term, custom: true }\n}\n+ tagFuncPromise(term) {\n+ return Promise.resolve({\n+ id: 5, name: term, valid: true\n+ });\n+ };\n}\n@Component({\n",
"new_path": "src/ng-select/ng-select.component.spec.ts",
"old_path": "src/ng-select/ng-select.component.spec.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -82,7 +82,7 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\n@Input() maxSelectedItems: number;\n@Input() @HostBinding('class.typeahead') typeahead: Subject<string>;\n@Input() @HostBinding('class.ng-multiple') multiple = false;\n- @Input() @HostBinding('class.taggable') addTag: boolean | ((term: string) => NgOption) = false;\n+ @Input() @HostBinding('class.taggable') addTag: boolean | ((term: string) => any | Promise<any>) = false;\n@Input() @HostBinding('class.searchable') searchable = true;\n// output events\n@@ -335,15 +335,19 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\n}\nselectTag() {\n- let tag = {};\n+ let tag;\nif (this.addTag instanceof Function) {\n- tag = this.addTag(this.filterValue)\n+ tag = this.addTag(this.filterValue);\n} else {\n- tag[this.bindLabel] = this.filterValue;\n+ tag = { [this.bindLabel]: this.filterValue };\n}\n- const item = this.itemsList.addItem(tag);\n- this.select(item);\n+ if (tag instanceof Promise) {\n+ tag.then(newTag => this.select(this.itemsList.addItem(newTag)))\n+ .catch(() => { });\n+ } else if (tag) {\n+ this.select(this.itemsList.addItem(tag));\n+ }\n}\nshowClear() {\n",
"new_path": "src/ng-select/ng-select.component.ts",
"old_path": "src/ng-select/ng-select.component.ts"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
feat(tags): support promise while creating new tag (#273)
* feat(tags): support promise while creating new tag
closes #220
| 1
|
feat
|
tags
|
679,913
|
19.02.2018 15:52:22
| 0
|
193058d54678f76da88ceaec88bd427f8e327b7c
|
feat(transducers): add lookup1d/2d/3d helpers, update re-exports
|
[
{
"change_type": "ADD",
"diff": "+/**\n+ * Returns function accepting a single index arg used to\n+ * lookup value in given array. No bounds checks are done.\n+ *\n+ * ```\n+ * [...iterator(map(lookup1d([10, 20, 30])), [2,0,1])]\n+ * // [ 30, 10, 20 ]\n+ * ```\n+ *\n+ * @param src source data\n+ */\n+export function lookup1d<T>(src: T[]) {\n+ return (i: number) => src[i];\n+}\n+\n+/**\n+ * Returns function accepting a single `[x, y]` index tuple,\n+ * used to lookup value in given array. Useful for transducers\n+ * processing 2D data. **Note**: The source data MUST be in\n+ * row major linearized format, i.e. 1D representation of 2D data\n+ * (pixel buffer). No bounds checks are done.\n+ *\n+ * ```\n+ * [...iterator(map(lookup2d([...range(9)], 3)), range2d(2, -1, 0, 3))]\n+ * // [ 2, 1, 0, 5, 4, 3, 8, 7, 6 ]\n+ * ```\n+ *\n+ * @param src source data\n+ * @param width number of items along X (columns)\n+ */\n+export function lookup2d<T>(src: T[], width: number) {\n+ return (i: number[]) => src[i[0] + i[1] * width];\n+}\n+\n+/**\n+ * Same as `lookup2d()`, but for 3D data. The index ordering of the\n+ * source data MUST be in Z, Y, X order (i.e. a stack of row major 2D slices).\n+ * No bounds checks are done.\n+ *\n+ * @param src source data\n+ * @param width number of items along X (columns)\n+ * @param height number of items along Y (rows)\n+ */\n+export function lookup3d<T>(src: T[], width: number, height: number) {\n+ const stridez = width * height;\n+ return (i: number[]) => src[i[0] + i[1] * width + i[2] * stridez];\n+}\n",
"new_path": "packages/transducers/src/func/lookup.ts",
"old_path": null
},
{
"change_type": "MODIFY",
"diff": "@@ -94,6 +94,7 @@ export * from \"./func/hex\";\nexport * from \"./func/identity\";\nexport * from \"./func/juxt\";\nexport * from \"./func/key-selector\";\n+export * from \"./func/lookup\";\nexport * from \"./func/odd\";\nexport * from \"./func/renamer\";\nexport * from \"./func/swizzler\";\n",
"new_path": "packages/transducers/src/index.ts",
"old_path": "packages/transducers/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(transducers): add lookup1d/2d/3d helpers, update re-exports
| 1
|
feat
|
transducers
|
679,913
|
19.02.2018 15:58:05
| 0
|
9b664d253ce2a7fc70756bcdf91e516e4e7dbbd9
|
refactor(examples): simplify CA transducers, update rule format/handling
|
[
{
"change_type": "MODIFY",
"diff": "@@ -2,7 +2,9 @@ import { start } from \"@thi.ng/hiccup-dom\";\nimport { dropdown, DropDownOption } from \"@thi.ng/hiccup-dom-components/dropdown\";\nimport { transduce } from \"@thi.ng/transducers/transduce\";\n+import { step } from \"@thi.ng/transducers/step\";\nimport { comp } from \"@thi.ng/transducers/func/comp\";\n+import { lookup2d } from \"@thi.ng/transducers/func/lookup\";\nimport { range2d } from \"@thi.ng/transducers/iter/range2d\";\nimport { repeatedly } from \"@thi.ng/transducers/iter/repeatedly\";\nimport { push } from \"@thi.ng/transducers/rfn/push\";\n@@ -18,38 +20,37 @@ const H = 48;\nconst presets: DropDownOption[] = [\n[\"\", \"custom\"],\n- [\"000100000-001100000\", \"conway\"],\n- [\"000100000-001110000\", \"maze #1\"],\n- [\"000111111-000001111\", \"maze #2\"],\n- [\"000001111-111111110\", \"dots\"],\n- [\"000101111-000001111\", \"growth\"],\n- [\"000001011-001011111\", \"organic\"],\n- [\"000010011-000011111\", \"angular\"],\n+ [\"000100000001100000\", \"conway\"],\n+ [\"000100000001110000\", \"maze #1\"],\n+ [\"000111111000001111\", \"maze #2\"],\n+ [\"000001111111111110\", \"dots\"],\n+ [\"000101111000001111\", \"growth\"],\n+ [\"000001011001011111\", \"organic\"],\n+ [\"000010011000011111\", \"angular\"],\n];\n-let grid;\n-let rules;\n+// container for cell states\n+let grid: number[];\n+// CA rules are stored in a linearized 2x9 array: 2 groups of 9 bits each\n+// essentially these rules are a compressed finite state machine\n+let rules: number[];\n// 3x3 convolution kernel (Moore neighborhood)\nconst kernel = buildKernel2d([1, 1, 1, 1, 0, 1, 1, 1, 1], 3, 3);\n-const setHash = () => (location.hash = rules[0].join(\"\") + \"-\" + rules[1].join(\"\"));\n+const setHash = () => (location.hash = rules.join(\"\"));\n-// parse rules from string (e.g. location hash): 2 groups of 9 bits each\n-// (essentially these rules are a compressed finite state machine)\n-const parseRules = (raw) =>\n- transduce(\n+// build transducer to parse rules from string (e.g. location hash or preset)\n+// (an older version used a preset format w/ \"-\" to separate rule groups)\n+const parseRules = step(\ncomp(\n- map((x: string) => parseInt(x, 2)),\n- bits(9),\n- partition(9)\n- ),\n- push(),\n- raw.split(\"-\")\n+ map((x: string) => parseInt(x.replace(\"-\", \"\"), 2)),\n+ bits(18)\n+ )\n);\nconst applyRules = (raw) => {\n- if (raw.length === 19) {\n- rules = parseRules(raw);\n+ if (raw.length >= 18) {\n+ rules = <number[]>parseRules(raw);\nrandomizeGrid();\nsetHash();\n}\n@@ -59,39 +60,41 @@ const applyRules = (raw) => {\nconst randomSeq = (num, prob = 0.5) => [...repeatedly(() => Math.random() < prob ? 1 : 0, num)];\nconst randomizeGrid = (prob = 0.5) => (grid = randomSeq(W * H, prob));\nconst randomizeRules = () => {\n- rules = [randomSeq(9), randomSeq(9)];\n+ rules = randomSeq(18);\nrandomizeGrid();\nsetHash();\n};\n-// apply convolution & CA rules\n+// apply convolution & CA rules (in basically 2 lines of code, i.e. the transducer part!!)\n// this produces the next generation of the CA\n-// we're using `multiplex` to produce a tuple of `[orig-cell-value, neighbor-count]`\n-export const convolve = (src, wrap = true) =>\n+// we're using `multiplex` to run 2 transducers in parallel and\n+// produce a tuple of `[neighbor-count, orig-cell-value]`\n+// this tuple is then used to lookup the next cell state using the current rule set\n+export const convolve = (src: number[], rules: number[], width: number, height: number, rstride = 9, wrap = true) =>\ntransduce(\ncomp(\n- multiplex(map((p) => src[p[0] + p[1] * W]), convolve2d(src, W, H, kernel, wrap)),\n- map(([alive, neighbors]) => rules[alive][neighbors])\n+ multiplex(convolve2d(src, width, height, kernel, wrap), map(lookup2d(src, width))),\n+ map(lookup2d(rules, rstride))\n),\npush(),\n- range2d(W, H)\n+ range2d(width, height)\n);\n// format grid values as string\n-const format = (src) =>\n+const format = (src: number[], width: number, fill = \"\\u2588\", empty = \" \") =>\ntransduce(\ncomp(\n- map((x: number) => x ? \"\\u2588\" : \" \"),\n- partition(W),\n- map(x => x.join(\"\"))\n+ map((x: number) => x ? fill : empty),\n+ partition(width),\n+ map((x) => x.join(\"\"))\n),\nstr(\"\\n\"),\nsrc\n);\n// event handler for rule edits\n-const setRule = (i, j, s) => {\n- rules[i][j] = s ? 1 : 0;\n+const setRule = (i: number, j: number, s: number, rstride = 9) => {\n+ rules[i * rstride + j] = s ? 1 : 0;\nsetHash();\n};\n@@ -99,14 +102,16 @@ const setRule = (i, j, s) => {\nconst checkbox = (x, onchange) => [\"input\", { type: \"checkbox\", checked: !!x, onchange }];\n// component for single CA rule group (alive / dead FSM)\n-const ruleBoxes = (prefix, i) =>\n+const ruleBoxes = (prefix, i, rstride = 9) =>\n[\"div\",\n[\"label\", prefix],\n- ...rules[i].map((rule, j) => checkbox(rule, (e) => setRule(i, j, e.target.checked))),\n+ ...rules\n+ .slice(i * rstride, (i + 1) * rstride)\n+ .map((rule, j) => checkbox(rule, (e) => setRule(i, j, e.target.checked))),\n];\n// Use Conway CA default state rules [[dead], [alive]] if no preset present in hash\n-applyRules(location.hash.length === 20 ? location.hash.substr(1) : presets[1][0]);\n+applyRules(location.hash.length > 18 ? location.hash.substr(1) : presets[1][0]);\n// define & start main app component\nstart(\"app\", () => {\n@@ -118,6 +123,6 @@ start(\"app\", () => {\n[\"button\", { onclick: () => randomizeGrid() }, \"reset grid\"],\ndropdown({ onchange: (e) => applyRules(e.target.value) }, presets, location.hash.substr(1))\n],\n- [\"pre\", format(grid = convolve(grid))]\n+ [\"pre\", format(grid = convolve(grid, rules, W, H), W)]\n];\n});\n",
"new_path": "examples/cellular-automata/src/index.ts",
"old_path": "examples/cellular-automata/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(examples): simplify CA transducers, update rule format/handling
| 1
|
refactor
|
examples
|
679,913
|
19.02.2018 15:59:25
| 0
|
31a4710aa72d24b152ca43c4e08ff6dd2a10a45c
|
build(examples): update CA deps (transducers)
|
[
{
"change_type": "MODIFY",
"diff": "\"@thi.ng/api\": \"^2.0.3\",\n\"@thi.ng/hiccup-dom\": \"^1.0.5\",\n\"@thi.ng/hiccup-dom-components\": \"^0.1.0\",\n- \"@thi.ng/transducers\": \"^1.2.0\"\n+ \"@thi.ng/transducers\": \"^1.3.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/cellular-automata/package.json",
"old_path": "examples/cellular-automata/package.json"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
build(examples): update CA deps (transducers)
| 1
|
build
|
examples
|
807,849
|
19.02.2018 18:31:01
| 28,800
|
42dc70c10a1126752ca9087709c9ec59e5d88d2f
|
fix: move --registry option to commands that use it
|
[
{
"change_type": "MODIFY",
"diff": "@@ -61,11 +61,6 @@ const builder = {\nInclude all transitive dependencies when running a command, regardless of --scope, --since or --ignore.\n`,\n},\n- registry: {\n- describe: \"Use the specified registry for all npm client operations.\",\n- type: \"string\",\n- requiresArg: true,\n- },\n\"reject-cycles\": {\ndescribe: \"Fail if a cycle is detected among dependencies\",\ntype: \"boolean\",\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -64,6 +64,12 @@ exports.builder = yargs =>\ntype: \"string\",\nrequiresArg: true,\n},\n+ registry: {\n+ group: \"Command Options:\",\n+ describe: \"Use the specified registry for all npm client operations.\",\n+ type: \"string\",\n+ requiresArg: true,\n+ },\n});\nclass BootstrapCommand extends Command {\n",
"new_path": "src/commands/BootstrapCommand.js",
"old_path": "src/commands/BootstrapCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -112,6 +112,12 @@ exports.builder = {\ntype: \"string\",\nrequiresArg: true,\n},\n+ registry: {\n+ group: \"Command Options:\",\n+ describe: \"Use the specified registry for all npm client operations.\",\n+ type: \"string\",\n+ requiresArg: true,\n+ },\npreid: {\ngroup: \"Command Options:\",\ndescribe: \"Specify the prerelease identifier (major.minor.patch-pre).\",\n@@ -168,7 +174,7 @@ class PublishCommand extends Command {\nthis.npmConfig = {\nnpmClient: this.options.npmClient || \"npm\",\n- registry: this.npmRegistry,\n+ registry: this.options.registry,\n};\nif (this.options.useGitVersion && !this.options.exact) {\n@@ -742,10 +748,10 @@ class PublishCommand extends Command {\nremoveTempTag(pkg) {\nreturn Promise.resolve()\n- .then(() => npmDistTag.check(pkg, \"lerna-temp\", this.npmRegistry))\n+ .then(() => npmDistTag.check(pkg, \"lerna-temp\", this.npmConfig.registry))\n.then(exists => {\nif (exists) {\n- return npmDistTag.remove(pkg, \"lerna-temp\", this.npmRegistry);\n+ return npmDistTag.remove(pkg, \"lerna-temp\", this.npmConfig.registry);\n}\n});\n}\n@@ -754,7 +760,7 @@ class PublishCommand extends Command {\nconst distTag = this.getDistTag();\nconst version = this.options.canary ? pkg.version : this.updatesVersions.get(pkg.name);\n- return this.removeTempTag(pkg).then(() => npmDistTag.add(pkg, version, distTag, this.npmRegistry));\n+ return this.removeTempTag(pkg).then(() => npmDistTag.add(pkg, version, distTag, this.npmConfig.registry));\n}\ngetDistTag() {\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
fix: move --registry option to commands that use it
| 1
|
fix
| null |
815,745
|
19.02.2018 21:38:07
| -7,200
|
f3cfc7acd8e95eaf060e5e1423d24db9e967dba2
|
feat(template): expose label in ng-label-tmp
closes
|
[
{
"change_type": "MODIFY",
"diff": "<ng-template\n[ngTemplateOutlet]=\"labelTemplate || defaultLabelTemplate\"\n- [ngTemplateOutletContext]=\"{ item: item.value, clear: clearItem }\">\n+ [ngTemplateOutletContext]=\"{ item: item.value, clear: clearItem, label: item.label }\">\n</ng-template>\n</div>\n",
"new_path": "src/ng-select/ng-select.component.html",
"old_path": "src/ng-select/ng-select.component.html"
},
{
"change_type": "MODIFY",
"diff": "@@ -343,7 +343,7 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\n}\nif (tag instanceof Promise) {\n- tag.then(newTag => this.select(this.itemsList.addItem(newTag)))\n+ tag.then(item => this.select(this.itemsList.addItem(item)))\n.catch(() => { });\n} else if (tag) {\nthis.select(this.itemsList.addItem(tag));\n",
"new_path": "src/ng-select/ng-select.component.ts",
"old_path": "src/ng-select/ng-select.component.ts"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
feat(template): expose label in ng-label-tmp
closes #274
| 1
|
feat
|
template
|
791,690
|
20.02.2018 13:16:31
| 28,800
|
2796bbd98f6ba6a5a902a70c19558f11ede34717
|
extension: fix issue where gatheres cannot be found
|
[
{
"change_type": "MODIFY",
"diff": "@@ -136,7 +136,7 @@ gulp.task('browserify-lighthouse', () => {\nbundle = bundle.require(audit, {expose: audit.replace(corePath, '../')});\n});\ngatherers.forEach(gatherer => {\n- bundle = bundle.require(gatherer, {expose: gatherer.replace(driverPath, './')});\n+ bundle = bundle.require(gatherer, {expose: gatherer.replace(driverPath, '../gather/')});\n});\ncomputedArtifacts.forEach(artifact => {\nbundle = bundle.require(artifact, {expose: artifact.replace(corePath, './')});\n",
"new_path": "lighthouse-extension/gulpfile.js",
"old_path": "lighthouse-extension/gulpfile.js"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
extension: fix issue where gatheres cannot be found (#4592)
| 1
|
extension
| null |
791,690
|
20.02.2018 13:26:19
| 28,800
|
ee98193ee97ff81ed9142421cc468f32626b8463
|
docs(programmatic): add caveat about removing artifacts
|
[
{
"change_type": "MODIFY",
"diff": "@@ -14,8 +14,11 @@ const chromeLauncher = require('chrome-launcher');\nfunction launchChromeAndRunLighthouse(url, opts, config = null) {\nreturn chromeLauncher.launch({chromeFlags: opts.chromeFlags}).then(chrome => {\nopts.port = chrome.port;\n- return lighthouse(url, opts, config).then(results =>\n- chrome.kill().then(() => results));\n+ return lighthouse(url, opts, config).then(results => {\n+ // The gathered artifacts are typically removed as they can be quite large (~50MB+)\n+ delete results.artifacts;\n+ return chrome.kill().then(() => results)\n+ });\n});\n}\n",
"new_path": "docs/readme.md",
"old_path": "docs/readme.md"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
docs(programmatic): add caveat about removing artifacts (#4540)
| 1
|
docs
|
programmatic
|
791,719
|
20.02.2018 18:00:21
| 28,800
|
860550870a3e9959d5efcbb8fac14eb93bcbda98
|
core(seo): link to "meta description" reference
|
[
{
"change_type": "MODIFY",
"diff": "@@ -18,7 +18,7 @@ class Description extends Audit {\nfailureDescription: 'Document does not have a meta description',\nhelpText: 'Meta descriptions may be included in search results to concisely summarize ' +\n'page content. ' +\n- '[Learn more](https://support.google.com/webmasters/answer/35624?hl=en#1).',\n+ '[Learn more](https://developers.google.com/web/tools/lighthouse/audits/description).',\nrequiredArtifacts: ['MetaDescription'],\n};\n}\n",
"new_path": "lighthouse-core/audits/seo/meta-description.js",
"old_path": "lighthouse-core/audits/seo/meta-description.js"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
core(seo): link to "meta description" reference (#4566)
| 1
|
core
|
seo
|
791,719
|
21.02.2018 11:57:59
| 28,800
|
429fff796de2217f7c2b0c9a0273fd6d0ecec856
|
report: add docs link to bootup-time audit
|
[
{
"change_type": "MODIFY",
"diff": "@@ -21,8 +21,9 @@ class BootupTime extends Audit {\nname: 'bootup-time',\ndescription: 'JavaScript boot-up time',\nfailureDescription: 'JavaScript boot-up time is too high',\n- helpText: 'Consider reducing the time spent parsing, compiling and executing JS. ' +\n- 'You may find delivering smaller JS payloads helps with this.',\n+ helpText: 'Consider reducing the time spent parsing, compiling, and executing JS. ' +\n+ 'You may find delivering smaller JS payloads helps with this. [Learn ' +\n+ 'more](https://developers.google.com/web/lighthouse/audits/bootup).',\nrequiredArtifacts: ['traces'],\n};\n}\n",
"new_path": "lighthouse-core/audits/bootup-time.js",
"old_path": "lighthouse-core/audits/bootup-time.js"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
report: add docs link to bootup-time audit (#4537)
| 1
|
report
| null |
791,731
|
21.02.2018 13:44:41
| 28,800
|
7226fcd8dd9fc2422f15180b6e83b5925a1a24d9
|
docs(headless-chrome): switch to prioritize headless
|
[
{
"change_type": "MODIFY",
"diff": "# Running Lighthouse using headless Chrome\n-For now, we recommend running Chrome with xvfb. See below.\n+## CLI (headless)\n+\n+Setup:\n+\n+```sh\n+# get node 6\n+curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash - &&\\\n+sudo apt-get install -y nodejs\n+\n+# get chromium (stable)\n+apt-get install chromium-browser\n+\n+# install lighthouse\n+npm i -g lighthouse\n+```\n+\n+Kick off run of Lighthouse using headless Chrome:\n+\n+```sh\n+lighthouse --chrome-flags=\"--headless\" https://github.com\n+```\n## CLI (xvfb)\n-Chrome + xvfb is the stable solution we recommend. These steps worked on Debian Jessie:\n+Alternatively, you can run full Chrome + xvfb instead of headless mode. These steps worked on Debian Jessie:\n```sh\n# get node 6\n@@ -35,32 +55,7 @@ xvfb-run --server-args='-screen 0, 1024x768x16' \\\nlighthouse --port=9222 https://github.com\n```\n-## CLI (headless)\n-\n-> **Note**: Headless Chrome still has a few bugs to work out. For example, [network emulation](https://bugs.chromium.org/p/chromium/issues/detail?id=728451) is not supported yet.\n-This can affect the accuracy of performance scores returned by Lighthouse.\n-\n-Setup:\n-\n-```sh\n-# get node 6\n-curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash - &&\\\n-sudo apt-get install -y nodejs\n-\n-# get chromium (stable)\n-apt-get install chromium-browser\n-\n-# install lighthouse\n-npm i -g lighthouse\n-```\n-\n-Kick off run of Lighthouse using headless Chrome:\n-\n-```sh\n-lighthouse --chrome-flags=\"--headless\" https://github.com\n-```\n-\n-## Node\n+## Node module\nInstall:\n",
"new_path": "docs/headless-chrome.md",
"old_path": "docs/headless-chrome.md"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
docs(headless-chrome): switch to prioritize headless (#4553)
| 1
|
docs
|
headless-chrome
|
807,849
|
21.02.2018 18:49:23
| 28,800
|
ede6f5f055bf0821ae635e52ac9d6c590f514812
|
refactor(BootstrapCommand): use more package graph during hoist
Removes src/util/has-matching-dependency.js
Adds non-matching semver local nodes to externalDependencies map
|
[
{
"change_type": "MODIFY",
"diff": "@@ -129,6 +129,9 @@ class PackageGraph extends Map {\n// a local file: specifier OR a matching semver\ncurrentNode.localDependencies.set(depName, resolved);\ndepNode.localDependents.set(currentName, currentNode);\n+ } else {\n+ // non-matching semver of a local dependency\n+ currentNode.externalDependencies.set(depName, resolved);\n}\n});\n});\n",
"new_path": "src/PackageGraph.js",
"old_path": "src/PackageGraph.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -4,7 +4,6 @@ const async = require(\"async\");\nconst dedent = require(\"dedent\");\nconst getPort = require(\"get-port\");\nconst path = require(\"path\");\n-const semver = require(\"semver\");\nconst Command = require(\"../Command\");\nconst FileSystemUtilities = require(\"../FileSystemUtilities\");\n@@ -13,7 +12,6 @@ const npmRunScript = require(\"../utils/npm-run-script\");\nconst batchPackages = require(\"../utils/batch-packages\");\nconst matchPackageName = require(\"../utils/match-package-name\");\nconst hasDependencyInstalled = require(\"../utils/has-dependency-installed\");\n-const hasMatchingDependency = require(\"../utils/has-matching-dependency\");\nconst runParallelBatches = require(\"../utils/run-parallel-batches\");\nconst symlinkBinary = require(\"../utils/symlink-binary\");\nconst symlinkDependencies = require(\"../utils/symlink-dependencies\");\n@@ -290,180 +288,144 @@ class BootstrapCommand extends Command {\n}\n}\n- /**\n- * Determine if a dependency installed at the root satifies the requirements of the passed packages\n- * This helps to optimize the bootstrap process and skip dependencies that are already installed\n- * @param {String} dependency\n- * @param {Array.<String>} packages\n- */\n- dependencySatisfiesPackages(dependency, packages) {\n- const { version } = this.hoistedPackageJson(dependency) || {};\n- return packages.every(pkg => semver.satisfies(version, pkg.allDependencies[dependency]));\n- }\n-\n/**\n* Return a object of root and leaf dependencies to install\n* @returns {Object}\n*/\ngetDependenciesToInstall(tracker) {\n- // find package by name\n- const findPackage = (name, version) => {\n- const node = this.packageGraph.get(name);\n-\n- if (node && semver.satisfies(node.version, version)) {\n- return node.pkg;\n- }\n- };\n-\n// Configuration for what packages to hoist may be in lerna.json or it may\n// come in as command line options.\nconst { hoist, nohoist } = this.options;\n+ const rootPkg = this.repository.package;\nif (hoist) {\ntracker.verbose(\"hoist\", \"enabled for %j\", hoist);\n}\n// This will contain entries for each hoistable dependency.\n- const root = [];\n+ const rootSet = new Set();\n// This will map packages to lists of unhoistable dependencies\n- const leaves = {};\n+ const leaves = new Map();\n/**\n* Map of dependencies to install\n- * {\n- * <name>: {\n- * versions: {\n- * <version>: <# of dependents>\n- * },\n- * dependents: {\n- * <version>: [<dependent1>, <dependent2>, ...]\n- * }\n+ *\n+ * Map {\n+ * \"<externalName>\": Map {\n+ * \"<versionRange>\": Set { \"<dependent1>\", \"<dependent2>\", ... }\n* }\n* }\n*\n* Example:\n*\n- * {\n- * react: {\n- * versions: {\n- * \"15.x\": 3,\n- * \"^0.14.0\": 1\n- * },\n- * dependents: {\n- * \"15.x\": [\"my-component1\", \"my-component2\", \"my-component3\"],\n- * \"^0.14.0\": [\"my-component4\"],\n- * }\n+ * Map {\n+ * \"react\": Map {\n+ * \"15.x\": Set { \"my-component1\", \"my-component2\", \"my-component3\" },\n+ * \"^0.14.0\": Set { \"my-component4\" },\n* }\n* }\n*/\n- const depsToInstall = {};\n-\n- Object.keys(this.repository.package.allDependencies).forEach(name => {\n- const version = this.repository.package.allDependencies[name];\n- depsToInstall[name] = {\n- versions: { [version]: 0 },\n- dependents: { [version]: [] },\n- };\n- });\n-\n- // get the map of external dependencies to install\n- this.filteredPackages.forEach(pkg => {\n- // for all package dependencies\n- Object.keys(pkg.allDependencies)\n+ const depsToInstall = new Map();\n+ const filteredNodes = new Map(\n+ this.filteredPackages.map(pkg => [pkg.name, this.packageGraph.get(pkg.name)])\n+ );\n- // map to package or normalized external dependency\n- .map(\n- name => findPackage(name, pkg.allDependencies[name]) || { name, version: pkg.allDependencies[name] }\n- )\n+ // collect root dependency versions\n+ const mergedRootDeps = Object.assign({}, rootPkg.devDependencies, rootPkg.dependencies);\n+ const rootExternalVersions = new Map(\n+ Object.keys(mergedRootDeps).map(externalName => [externalName, mergedRootDeps[externalName]])\n+ );\n- // match external and version mismatched local packages\n- .filter(dep => !findPackage(dep.name, dep.version) || !hasMatchingDependency(pkg, dep))\n+ // seed the root dependencies\n+ rootExternalVersions.forEach((version, externalName) => {\n+ const externalDependents = new Set();\n+ const record = new Map();\n- .forEach(({ name, version }) => {\n- // Get the object for this package, auto-vivifying.\n- const dep =\n- depsToInstall[name] ||\n- (depsToInstall[name] = {\n- versions: {},\n- dependents: {},\n+ record.set(version, externalDependents);\n+ depsToInstall.set(externalName, record);\n});\n- // Add this version if it's the first time we've seen it.\n- if (!dep.versions[version]) {\n- dep.versions[version] = 0;\n- dep.dependents[version] = [];\n- }\n+ // build a map of external dependencies to install\n+ for (const [leafName, leafNode] of filteredNodes) {\n+ for (const [externalName, resolved] of leafNode.externalDependencies) {\n+ // rawSpec is something like \"^1.2.3\"\n+ const version = resolved.rawSpec;\n+ const record =\n+ depsToInstall.get(externalName) || depsToInstall.set(externalName, new Map()).get(externalName);\n+ const externalDependents = record.get(version) || record.set(version, new Set()).get(version);\n- // Record the dependency on this version.\n- dep.versions[version] += 1;\n- dep.dependents[version].push(pkg.name);\n- });\n- });\n+ externalDependents.add(leafName);\n+ }\n+ }\n// determine where each dependency will be installed\n- Object.keys(depsToInstall).forEach(name => {\n- const { versions, dependents } = depsToInstall[name];\n-\n+ for (const [externalName, externalDependents] of depsToInstall) {\nlet rootVersion;\n- if (hoist && isHoistedPackage(name, hoist, nohoist)) {\n- // Get the most common version.\n- const commonVersion = Object.keys(versions).reduce((a, b) => (versions[a] > versions[b] ? a : b));\n+ if (hoist && isHoistedPackage(externalName, hoist, nohoist)) {\n+ const commonVersion = Array.from(externalDependents.keys()).reduce(\n+ (a, b) => (externalDependents.get(a).size > externalDependents.get(b).size ? a : b)\n+ );\n// Get the version required by the repo root (if any).\n// If the root doesn't have a dependency on this package then we'll\n// install the most common dependency there.\n- rootVersion = this.repository.package.allDependencies[name] || commonVersion;\n+ rootVersion = rootExternalVersions.get(externalName) || commonVersion;\nif (rootVersion !== commonVersion) {\ntracker.warn(\n\"EHOIST_ROOT_VERSION\",\n- `The repository root depends on ${name}@${rootVersion}, ` +\n- `which differs from the more common ${name}@${commonVersion}.`\n+ `The repository root depends on ${externalName}@${rootVersion}, ` +\n+ `which differs from the more common ${externalName}@${commonVersion}.`\n);\n}\n+ const dependents = Array.from(externalDependents.get(rootVersion)).map(\n+ leafName => this.packageGraph.get(leafName).pkg\n+ );\n+\n+ // remove collection so leaves don't repeat it\n+ externalDependents.delete(rootVersion);\n+\n// Install the best version we can in the repo root.\n// Even if it's already installed there we still need to make sure any\n// binaries are linked to the packages that depend on them.\n- root.push({\n- name,\n- dependents: (dependents[rootVersion] || []).map(dep => this.packageGraph.get(dep).pkg),\n- dependency: `${name}@${rootVersion}`,\n- isSatisfied: hasDependencyInstalled(this.repository.package, name, rootVersion),\n+ rootSet.add({\n+ name: externalName,\n+ dependents,\n+ dependency: `${externalName}@${rootVersion}`,\n+ isSatisfied: hasDependencyInstalled(rootPkg, externalName, rootVersion),\n});\n}\n// Add less common versions to package installs.\n- Object.keys(versions).forEach(version => {\n- // Only install deps that can't be hoisted in the leaves.\n- if (version === rootVersion) {\n- return;\n- }\n-\n- dependents[version].forEach(pkgName => {\n+ for (const [leafVersion, leafDependents] of externalDependents) {\n+ for (const leafName of leafDependents) {\nif (rootVersion) {\ntracker.warn(\n\"EHOIST_PKG_VERSION\",\n- `\"${pkgName}\" package depends on ${name}@${version}, ` +\n- `which differs from the hoisted ${name}@${rootVersion}.`\n+ `\"${leafName}\" package depends on ${externalName}@${leafVersion}, ` +\n+ `which differs from the hoisted ${externalName}@${rootVersion}.`\n);\n}\n+ const leafNode = this.packageGraph.get(leafName);\n+ const leafRecord = leaves.get(leafNode) || leaves.set(leafNode, new Set()).get(leafNode);\n+\n// only install dependency if it's not already installed\n- (leaves[pkgName] || (leaves[pkgName] = [])).push({\n- dependency: `${name}@${version}`,\n- isSatisfied: hasDependencyInstalled(this.packageGraph.get(pkgName).pkg, name),\n- });\n- });\n- });\n+ leafRecord.add({\n+ dependency: `${externalName}@${leafVersion}`,\n+ isSatisfied: hasDependencyInstalled(leafNode.pkg, externalName, leafVersion),\n});\n+ }\n+ }\n+ }\n- tracker.silly(\"root dependencies\", JSON.stringify(root, null, 2));\n+ tracker.silly(\"root dependencies\", JSON.stringify(rootSet, null, 2));\ntracker.silly(\"leaf dependencies\", JSON.stringify(leaves, null, 2));\n- return { root, leaves };\n+ return { rootSet, leaves };\n}\n/**\n@@ -473,14 +435,16 @@ class BootstrapCommand extends Command {\ninstallExternalDependencies(callback) {\nconst tracker = this.logger.newItem(\"install dependencies\");\n- const { leaves, root } = this.getDependenciesToInstall(tracker);\n+ const { leaves, rootSet } = this.getDependenciesToInstall(tracker);\n+ const rootPkg = this.repository.package;\nconst actions = [];\n// Start root install first, if any, since it's likely to take the longest.\n- if (Object.keys(root).length) {\n+ if (rootSet.size) {\n// If we have anything to install in the root then we'll install\n// _everything_ that needs to go there. This is important for\n// consistent behavior across npm clients.\n+ const root = Array.from(rootSet);\nconst depsToInstallInRoot = root.some(({ isSatisfied }) => !isSatisfied)\n? root.map(({ dependency }) => dependency)\n: [];\n@@ -490,11 +454,7 @@ class BootstrapCommand extends Command {\ntracker.info(\"hoist\", \"Installing hoisted dependencies into root\");\n}\n- npmInstall.dependencies(\n- this.repository.rootPath,\n- depsToInstallInRoot,\n- this.npmConfig,\n- installError => {\n+ npmInstall.dependencies(rootPkg.location, depsToInstallInRoot, this.npmConfig, installError => {\nif (installError) {\nreturn actionDone(installError);\n}\n@@ -504,6 +464,7 @@ class BootstrapCommand extends Command {\nasync.series(\nroot.map(({ name, dependents }) => itemDone => {\nconst { bin } = this.hoistedPackageJson(name) || {};\n+\nif (bin) {\nasync.series(\ndependents.map(pkg => linkDone => {\n@@ -523,17 +484,16 @@ class BootstrapCommand extends Command {\nactionDone(err);\n}\n);\n- }\n- );\n+ });\n});\n// Remove any hoisted dependencies that may have previously been\n// installed in package directories.\nactions.push(actionDone => {\n// Compute the list of candidate directories synchronously\n- const candidates = root.filter(pkg => pkg.dependents.length).reduce((list, { name, dependents }) => {\n+ const candidates = root.filter(dep => dep.dependents.length).reduce((list, { name, dependents }) => {\nconst dirs = dependents\n- .filter(pkg => pkg.nodeModulesLocation !== this.repository.nodeModulesLocation)\n+ .filter(pkg => pkg.nodeModulesLocation !== rootPkg.nodeModulesLocation)\n.map(pkg => path.join(pkg.nodeModulesLocation, name));\nreturn list.concat(dirs);\n@@ -572,19 +532,19 @@ class BootstrapCommand extends Command {\n});\n// Install anything that needs to go into the leaves.\n- Object.keys(leaves)\n- .map(pkgName => ({ pkg: this.packageGraph.get(pkgName).pkg, deps: leaves[pkgName] }))\n- .forEach(({ pkg, deps }) => {\n+ leaves.forEach((leafRecord, leafNode) => {\n+ const deps = Array.from(leafRecord);\n+\n// If we have any unsatisfied deps then we need to install everything.\n// This is important for consistent behavior across npm clients.\nif (deps.some(({ isSatisfied }) => !isSatisfied)) {\nactions.push(cb => {\nnpmInstall.dependencies(\n- pkg.location,\n+ leafNode.location,\ndeps.map(({ dependency }) => dependency),\nleafNpmConfig,\nerr => {\n- tracker.verbose(\"installed leaf\", pkg.name);\n+ tracker.verbose(\"installed leaf\", leafNode.name);\ntracker.completeWork(1);\ncb(err);\n}\n",
"new_path": "src/commands/BootstrapCommand.js",
"old_path": "src/commands/BootstrapCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -9,19 +9,17 @@ module.exports = hasDependencyInstalled;\n/**\n* Determine if a dependency has already been installed for this package\n- * @param {Package} inPkg The Package instance to check for installed dependency\n+ * @param {Package} pkg The Package instance to check for installed dependency\n* @param {String} depName Name of the dependency\n- * @param {String} [version] Optional version to test with, defaults to existing spec\n+ * @param {String} needVersion version to test with\n* @returns {Boolean}\n*/\n-function hasDependencyInstalled(inPkg, depName, version) {\n- log.silly(\"hasDependencyInstalled\", inPkg.name, depName);\n-\n- const needVersion = version || inPkg.allDependencies[depName];\n+function hasDependencyInstalled(pkg, depName, needVersion) {\n+ log.silly(\"hasDependencyInstalled\", pkg.name, depName);\nlet retVal;\ntry {\n- const manifestLocation = path.join(inPkg.nodeModulesLocation, depName, \"package.json\");\n+ const manifestLocation = path.join(pkg.nodeModulesLocation, depName, \"package.json\");\nconst dependency = loadJsonFile.sync(manifestLocation);\nretVal = semver.satisfies(dependency.version, needVersion);\n",
"new_path": "src/utils/has-dependency-installed.js",
"old_path": "src/utils/has-dependency-installed.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-const dedent = require(\"dedent\");\n-const log = require(\"npmlog\");\n-const semver = require(\"semver\");\n-\n-module.exports = hasMatchingDependency;\n-\n-/**\n- * Determine if a dependency version satisfies the requirements of this package\n- * @param {Package} inPkg The Package instance to check for match\n- * @param {Package} dependency\n- * @returns {Boolean}\n- */\n-function hasMatchingDependency(inPkg, dependency) {\n- log.silly(\"hasMatchingDependency\", inPkg.name, dependency.name);\n-\n- const expectedVersion = inPkg.allDependencies[dependency.name];\n- const actualVersion = dependency.version;\n-\n- if (!expectedVersion) {\n- return false;\n- }\n-\n- // check if semantic versions are compatible\n- if (semver.satisfies(actualVersion, expectedVersion)) {\n- return true;\n- }\n-\n- log.warn(\n- inPkg.name,\n- dedent`\n- depends on \"${dependency.name}@${expectedVersion}\"\n- instead of \"${dependency.name}@${actualVersion}\"\n- `\n- );\n-\n- return false;\n-}\n",
"new_path": null,
"old_path": "src/utils/has-matching-dependency.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(BootstrapCommand): use more package graph during hoist
- Removes src/util/has-matching-dependency.js
- Adds non-matching semver local nodes to externalDependencies map
| 1
|
refactor
|
BootstrapCommand
|
807,849
|
21.02.2018 18:54:01
| 28,800
|
d9acaee95540240296efc62024fd6d7ddd1888ad
|
refactor(PackageGraph): simplify constructor signature
|
[
{
"change_type": "MODIFY",
"diff": "@@ -270,7 +270,7 @@ class Command {\ntry {\nthis.packages = collectPackages({ rootPath, packageConfigs });\n- this.packageGraph = new PackageGraph(this.packages, { graphType: \"allDependencies\" });\n+ this.packageGraph = new PackageGraph(this.packages);\nthis.filteredPackages = filterPackages(this.packages, { scope, ignore });\n// The UpdatedPackagesCollector requires that filteredPackages be present prior to checking for\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -7,10 +7,9 @@ const semver = require(\"semver\");\n* Represents a node in a PackageGraph.\n* @constructor\n* @param {!<Package>} pkg - A Package object to build the node from.\n- * @param {!<String>} graphType - \"allDependencies\" or \"dependencies\"\n*/\nclass PackageGraphNode {\n- constructor(pkg, graphType) {\n+ constructor(pkg) {\nObject.defineProperties(this, {\n// immutable properties\nname: {\n@@ -26,11 +25,6 @@ class PackageGraphNode {\nreturn pkg.version;\n},\n},\n- graphDependencies: {\n- get() {\n- return pkg[graphType] || {};\n- },\n- },\npkg: {\nget() {\nreturn pkg;\n@@ -98,15 +92,14 @@ class PackageGraphNode {\n* A PackageGraph.\n* @constructor\n* @param {!Array.<Package>} packages An array of Packages to build the graph out of.\n- * @param {!Object} config\n- * @param {!String} config.graphType (\"allDependencies\" or \"dependencies\")\n+ * @param {String} graphType (\"allDependencies\" or \"dependencies\")\n* Pass \"dependencies\" to create a graph of only dependencies,\n* excluding the devDependencies that would normally be included.\n- * @param {Boolean} config.forceLocal Force all local dependencies to be linked.\n+ * @param {Boolean} forceLocal Force all local dependencies to be linked.\n*/\nclass PackageGraph extends Map {\n- constructor(packages, { graphType, forceLocal }) {\n- super(packages.map(pkg => [pkg.name, new PackageGraphNode(pkg, graphType)]));\n+ constructor(packages, graphType = \"allDependencies\", forceLocal) {\n+ super(packages.map(pkg => [pkg.name, new PackageGraphNode(pkg)]));\nconst satisfies = forceLocal\n? () => true\n@@ -114,7 +107,10 @@ class PackageGraph extends Map {\nsemver.satisfies(version, resolved.gitCommittish || resolved.gitRange || resolved.fetchSpec);\nthis.forEach((currentNode, currentName) => {\n- const { graphDependencies } = currentNode;\n+ const graphDependencies =\n+ graphType === \"dependencies\"\n+ ? Object.assign({}, currentNode.pkg.dependencies)\n+ : Object.assign({}, currentNode.pkg.devDependencies, currentNode.pkg.dependencies);\nObject.keys(graphDependencies).forEach(depName => {\nconst depNode = this.get(depName);\n@@ -178,21 +174,12 @@ class PackageGraph extends Map {\nconst cycleNodes = new Set();\nthis.forEach((currentNode, currentName) => {\n- // console.error(\"START %s\\n%O\", currentName, currentNode);\nconst seen = new Set();\n- if (currentNode.localDependencies.has(currentName)) {\n- // utterly ridiculous self -> self\n- seen.add(currentNode);\n- cyclePaths.add([currentName, currentName]);\n- }\n-\nconst visits = walk => (dependentNode, dependentName, siblingDependents) => {\nconst step = walk.concat(dependentName);\n- // console.warn(\"VISITS %O\", step);\nif (seen.has(dependentNode)) {\n- // console.info(\"SEEN:: %O\", [currentName, dependentName]);\nreturn;\n}\n@@ -202,7 +189,6 @@ class PackageGraph extends Map {\n// a direct cycle\ncycleNodes.add(currentNode);\ncyclePaths.add(step);\n- // console.error(\"DIRECT\", step);\nreturn;\n}\n@@ -217,19 +203,13 @@ class PackageGraph extends Map {\n.reverse()\n.concat(cycleDependentName);\n- // console.error(\"TRANSITIVE\", pathToCycle);\ncycleNodes.add(dependentNode);\ncyclePaths.add(pathToCycle);\n}\ndependentNode.localDependents.forEach(visits(step));\n- // console.warn(\"EXITED %O\", step);\n};\n- // currentNode.localDependents.forEach((topLevelNode, topLevelName, sibs) => {\n- // console.log(\"TOPLVL %O\\n%O\", [currentName, topLevelName], topLevelNode);\n- // visits([currentName])(topLevelNode, topLevelName, sibs);\n- // });\ncurrentNode.localDependents.forEach(visits([currentName]));\n});\n",
"new_path": "src/PackageGraph.js",
"old_path": "src/PackageGraph.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -121,9 +121,7 @@ class BootstrapCommand extends Command {\ntry {\nthis.batchedPackages = this.toposort\n- ? batchPackages(this.filteredPackages, {\n- rejectCycles,\n- })\n+ ? batchPackages(this.filteredPackages, rejectCycles)\n: [this.filteredPackages];\n} catch (e) {\nreturn callback(e);\n",
"new_path": "src/commands/BootstrapCommand.js",
"old_path": "src/commands/BootstrapCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -79,9 +79,7 @@ class ExecCommand extends Command {\ntry {\nthis.batchedPackages = this.toposort\n- ? batchPackages(filteredPackages, {\n- rejectCycles: this.options.rejectCycles,\n- })\n+ ? batchPackages(filteredPackages, this.options.rejectCycles)\n: [filteredPackages];\n} catch (e) {\nreturn callback(e);\n",
"new_path": "src/commands/ExecCommand.js",
"old_path": "src/commands/ExecCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -41,7 +41,7 @@ class LinkCommand extends Command {\nlet graph = this.packageGraph;\nif (this.options.forceLocal) {\n- graph = new PackageGraph(this.packages, { graphType: \"allDependencies\", forceLocal: true });\n+ graph = new PackageGraph(this.packages, \"allDependencies\", \"forceLocal\");\n}\nsymlinkDependencies(this.packages, graph, this.logger, callback);\n",
"new_path": "src/commands/LinkCommand.js",
"old_path": "src/commands/LinkCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -223,12 +223,13 @@ class PublishCommand extends Command {\nthis.packagesToPublish = this.updates.map(({ package: pkg }) => pkg).filter(pkg => !pkg.private);\nthis.batchedPackagesToPublish = this.toposort\n- ? batchPackages(this.packagesToPublish, {\n+ ? batchPackages(\n+ this.packagesToPublish,\n+ this.options.rejectCycles,\n// Don't sort based on devDependencies because that would increase the chance of dependency cycles\n// causing less-than-ideal a publishing order.\n- graphType: \"dependencies\",\n- rejectCycles: this.options.rejectCycles,\n- })\n+ \"dependencies\"\n+ )\n: [this.packagesToPublish];\nconst tasks = [\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -89,9 +89,7 @@ class RunCommand extends Command {\ntry {\nthis.batchedPackages = this.toposort\n- ? batchPackages(this.packagesWithScript, {\n- rejectCycles: this.options.rejectCycles,\n- })\n+ ? batchPackages(this.packagesWithScript, this.options.rejectCycles)\n: [this.packagesWithScript];\n} catch (e) {\nreturn callback(e);\n",
"new_path": "src/commands/RunCommand.js",
"old_path": "src/commands/RunCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -7,9 +7,9 @@ const ValidationError = require(\"./validation-error\");\nmodule.exports = batchPackages;\n-function batchPackages(packagesToBatch, { graphType = \"allDependencies\", rejectCycles }) {\n+function batchPackages(packagesToBatch, rejectCycles, graphType) {\n// create a new graph because we will be mutating it\n- const graph = new PackageGraph(packagesToBatch, { graphType });\n+ const graph = new PackageGraph(packagesToBatch, graphType);\nconst [cyclePaths, cycleNodes] = graph.partitionCycles();\nconst batches = [];\n",
"new_path": "src/utils/batch-packages.js",
"old_path": "src/utils/batch-packages.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -29,7 +29,7 @@ describe(\"PackageGraph\", () => {\n\"/path/to/package-2\"\n),\n];\n- const graph = new PackageGraph(packages, { graphType: \"allDependencies\" });\n+ const graph = new PackageGraph(packages, \"allDependencies\");\nexpect(graph.get(\"my-package-1\").localDependencies.size).toBe(0);\nexpect(graph.get(\"my-package-2\").localDependencies.has(\"my-package-1\")).toBe(true);\n@@ -58,7 +58,7 @@ describe(\"PackageGraph\", () => {\n\"/path/to/package-2\"\n),\n];\n- const graph = new PackageGraph(packages, { graphType: \"dependencies\" });\n+ const graph = new PackageGraph(packages, \"dependencies\");\nexpect(graph.get(\"my-package-1\").localDependencies.size).toBe(0);\nexpect(graph.get(\"my-package-2\").localDependencies.size).toBe(0);\n@@ -87,7 +87,7 @@ describe(\"PackageGraph\", () => {\n\"/path/to/package-2\"\n),\n];\n- const graph = new PackageGraph(packages, { graphType: \"allDependencies\" });\n+ const graph = new PackageGraph(packages);\nexpect(graph.get(\"my-package-2\").localDependencies.has(\"my-package-1\")).toBe(true);\n});\n",
"new_path": "test/PackageGraph.js",
"old_path": "test/PackageGraph.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(PackageGraph): simplify constructor signature
| 1
|
refactor
|
PackageGraph
|
807,849
|
21.02.2018 18:56:19
| 28,800
|
228979cb04c7ecb4a18316ed96c9d1c9aca0844d
|
refactor(PackageGraph): move satisfies() method into node
|
[
{
"change_type": "MODIFY",
"diff": "@@ -86,6 +86,17 @@ class PackageGraphNode {\nthrow new Error(`unknown property \"${degreeType}\"`);\n}\n}\n+\n+ /**\n+ * Determine if the Node satisfies a resolved semver range.\n+ * @see https://github.com/npm/npm-package-arg#result-object\n+ *\n+ * @param {!Result} resolved npm-package-arg Result object\n+ * @returns {Boolean}\n+ */\n+ satisfies({ gitCommittish, gitRange, fetchSpec }) {\n+ return semver.satisfies(this.version, gitCommittish || gitRange || fetchSpec);\n+ }\n}\n/**\n@@ -101,11 +112,6 @@ class PackageGraph extends Map {\nconstructor(packages, graphType = \"allDependencies\", forceLocal) {\nsuper(packages.map(pkg => [pkg.name, new PackageGraphNode(pkg)]));\n- const satisfies = forceLocal\n- ? () => true\n- : (version, resolved) =>\n- semver.satisfies(version, resolved.gitCommittish || resolved.gitRange || resolved.fetchSpec);\n-\nthis.forEach((currentNode, currentName) => {\nconst graphDependencies =\ngraphType === \"dependencies\"\n@@ -121,7 +127,7 @@ class PackageGraph extends Map {\nreturn currentNode.externalDependencies.set(depName, resolved);\n}\n- if (resolved.fetchSpec === depNode.location || satisfies(depNode.version, resolved)) {\n+ if (forceLocal || resolved.fetchSpec === depNode.location || depNode.satisfies(resolved)) {\n// a local file: specifier OR a matching semver\ncurrentNode.localDependencies.set(depName, resolved);\ndepNode.localDependents.set(currentName, currentNode);\n",
"new_path": "src/PackageGraph.js",
"old_path": "src/PackageGraph.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(PackageGraph): move satisfies() method into node
| 1
|
refactor
|
PackageGraph
|
807,849
|
21.02.2018 18:57:49
| 28,800
|
f063950336e4c5a069208326bfa470780e3a3973
|
refactor(Package): remove unused allDependencies getter
|
[
{
"change_type": "MODIFY",
"diff": "@@ -69,11 +69,6 @@ class Package {\nreturn pkg.peerDependencies;\n},\n},\n- allDependencies: {\n- get() {\n- return Object.assign({}, pkg.devDependencies, pkg.dependencies);\n- },\n- },\n// immutable\nbin: {\nvalue:\n",
"new_path": "src/Package.js",
"old_path": "src/Package.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -105,20 +105,6 @@ describe(\"Package\", () => {\n});\n});\n- describe(\"get .allDependencies\", () => {\n- it(\"should return the combined dependencies\", () => {\n- const pkg = factory({\n- dependencies: { \"my-dependency\": \"^1.0.0\" },\n- devDependencies: { \"my-dev-dependency\": \"^1.0.0\" },\n- peerDependencies: { \"my-peer-dependency\": \">=1.0.0\" },\n- });\n- expect(pkg.allDependencies).toEqual({\n- \"my-dependency\": \"^1.0.0\",\n- \"my-dev-dependency\": \"^1.0.0\",\n- });\n- });\n- });\n-\ndescribe(\"get .scripts\", () => {\nit(\"should return the scripts\", () => {\nconst pkg = factory({\n",
"new_path": "test/Package.js",
"old_path": "test/Package.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(Package): remove unused allDependencies getter
| 1
|
refactor
|
Package
|
807,849
|
21.02.2018 18:58:10
| 28,800
|
8122745d2ebb70f93420641bb61e401cf9a69dee
|
refactor(Repository): remove unused nodeModulesLocation getter
|
[
{
"change_type": "MODIFY",
"diff": "@@ -49,10 +49,6 @@ class Repository {\nreturn this.lernaJson.version;\n}\n- get nodeModulesLocation() {\n- return path.join(this.rootPath, \"node_modules\");\n- }\n-\nget packageConfigs() {\nif (this.lernaJson.useWorkspaces) {\nif (!this.packageJson.workspaces) {\n",
"new_path": "src/Repository.js",
"old_path": "src/Repository.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -105,13 +105,6 @@ describe(\"Repository\", () => {\n});\n});\n- describe(\"get .nodeModulesLocation\", () => {\n- it(\"returns the root node_modules location\", () => {\n- const repo = new Repository(testDir);\n- expect(repo.nodeModulesLocation).toBe(path.join(testDir, \"node_modules\"));\n- });\n- });\n-\ndescribe(\"get .packageConfigs\", () => {\nit(\"returns the default packageConfigs\", () => {\nconst repo = new Repository(testDir);\n",
"new_path": "test/Repository.js",
"old_path": "test/Repository.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(Repository): remove unused nodeModulesLocation getter
| 1
|
refactor
|
Repository
|
448,043
|
21.02.2018 20:22:40
| -46,800
|
9baa0bc8ce95d49fa6cbb2066d254bf1e521976c
|
fix: pass 'setParentNodes' when calling 'createCompilerHost'
|
[
{
"change_type": "MODIFY",
"diff": "@@ -13,7 +13,8 @@ export function createCompilerHostForSynthesizedSourceFiles(\nsourceFiles: ts.SourceFile[],\ncompilerOptions: ts.CompilerOptions\n): ts.CompilerHost {\n- const wrapped = ts.createCompilerHost(compilerOptions);\n+ // FIX(#625): pass `setParentNodes` to the \"synthesized\" compiler host\n+ const wrapped = ts.createCompilerHost(compilerOptions, /* setParentNodes */ true);\nreturn {\n...wrapped,\n",
"new_path": "src/lib/ts/synthesized-compiler-host.ts",
"old_path": "src/lib/ts/synthesized-compiler-host.ts"
}
] |
TypeScript
|
MIT License
|
ng-packagr/ng-packagr
|
fix: pass 'setParentNodes' when calling 'createCompilerHost' (#625)
| 1
|
fix
| null |
815,745
|
21.02.2018 21:47:55
| -7,200
|
8a23b710b8075b73ef838799aafddfb26a602cb9
|
fix: mark first item when selected is not among options
fixes
|
[
{
"change_type": "MODIFY",
"diff": "@@ -123,8 +123,9 @@ export class ItemsList {\nreturn;\n}\n- if (this._lastSelectedItem) {\n- this._markedIndex = this._filteredItems.indexOf(this._lastSelectedItem);\n+ const indexOfLastSelected = this._filteredItems.indexOf(this._lastSelectedItem);\n+ if (this._lastSelectedItem && indexOfLastSelected > -1) {\n+ this._markedIndex = indexOfLastSelected;\n} else {\nthis._markedIndex = markDefault ? 0 : -1;\n}\n@@ -184,8 +185,8 @@ export class ItemsList {\nprivate _getDefaultFilterFunc(term: string) {\nreturn (option: NgOption) => {\nreturn searchHelper.stripSpecialChars(option.label ? option.label.toString() : '')\n- .toUpperCase()\n- .indexOf(searchHelper.stripSpecialChars(term).toUpperCase()) > -1;\n+ .toLowerCase()\n+ .indexOf(searchHelper.stripSpecialChars(term).toLowerCase()) > -1;\n};\n}\n",
"new_path": "src/ng-select/items-list.ts",
"old_path": "src/ng-select/items-list.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -24,8 +24,8 @@ export class NgOptionHighlightDirective implements OnChanges {\n}\nlet indexOfTerm: number;\nindexOfTerm = searchHelper.stripSpecialChars(label)\n- .toUpperCase()\n- .indexOf(searchHelper.stripSpecialChars(this.term).toUpperCase());\n+ .toLowerCase()\n+ .indexOf(searchHelper.stripSpecialChars(this.term).toLowerCase());\nif (indexOfTerm > -1) {\nthis._setInnerHtml(\nlabel.substring(0, indexOfTerm)\n",
"new_path": "src/ng-select/ng-option-highlight.directive.ts",
"old_path": "src/ng-select/ng-option-highlight.directive.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -1065,9 +1065,8 @@ describe('NgSelectComponent', function () {\n[(ngModel)]=\"selectedCity\">\n</ng-select>`);\n- tick(200);\nfixture.componentInstance.select.onFilter('vilnius');\n- tick(200);\n+ tick();\nconst result = [jasmine.objectContaining({\nvalue: { id: 1, name: 'Vilnius' }\n@@ -1118,9 +1117,29 @@ describe('NgSelectComponent', function () {\n[(ngModel)]=\"selectedCity\">\n</ng-select>`);\n- tick(200);\nfixture.componentInstance.select.onFilter('pab');\n- tick(200);\n+ tick();\n+\n+ const result = jasmine.objectContaining({\n+ value: fixture.componentInstance.cities[2]\n+ });\n+ expect(fixture.componentInstance.select.itemsList.markedItem).toEqual(result)\n+ triggerKeyDownEvent(getNgSelectElement(fixture), KeyCode.Enter);\n+ expect(fixture.componentInstance.select.selectedItems).toEqual([result]);\n+ }));\n+\n+ it('should mark first item on filter when selected is not among filtered items', fakeAsync(() => {\n+ fixture = createTestingModule(\n+ NgSelectFilterTestCmp,\n+ `<ng-select [items]=\"cities\"\n+ bindLabel=\"name\"\n+ [(ngModel)]=\"selectedCity\">\n+ </ng-select>`);\n+\n+ fixture.componentInstance.selectedCity = fixture.componentInstance.cities[0];\n+ fixture.detectChanges();\n+ fixture.componentInstance.select.onFilter('pab');\n+ tick();\nconst result = jasmine.objectContaining({\nvalue: fixture.componentInstance.cities[2]\n@@ -1139,9 +1158,8 @@ describe('NgSelectComponent', function () {\n[(ngModel)]=\"selectedCity\">\n</ng-select>`);\n- tick(200);\nfixture.componentInstance.select.onFilter('pab');\n- tick(200);\n+ tick();\nexpect(fixture.componentInstance.select.itemsList.markedItem).toEqual(undefined)\n}));\n",
"new_path": "src/ng-select/ng-select.component.spec.ts",
"old_path": "src/ng-select/ng-select.component.spec.ts"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
fix: mark first item when selected is not among options (#281)
fixes #278
| 1
|
fix
| null |
815,745
|
22.02.2018 18:47:48
| -7,200
|
a5e2d1674f9072a0587f14ecde759fc37573ba61
|
feat(ng-option): support for disabled attribute
closes
|
[
{
"change_type": "MODIFY",
"diff": "@@ -65,12 +65,14 @@ import { Observable } from 'rxjs/Observable';\n<p>\nIf you have simple use case, you can omit items array and bind options directly in html using <b>ng-option</b> component.\n</p>\n+ <button type=\"button\" class=\"btn btn-secondary btn-sm\" (click)=\"disable = !disable\">Toggle disabled</button>\n+ <hr/>\n---html,true\n<ng-select [searchable]=\"false\" [(ngModel)]=\"staticValue\">\n- <ng-option [value]=\"'Volvo'\">Volvo</ng-option>\n- <ng-option [value]=\"'Saab'\">Saab</ng-option>\n- <ng-option [value]=\"'Opel'\">Opel</ng-option>\n- <ng-option [value]=\"'Audi'\">Audi</ng-option>\n+ <ng-option value=\"Volvo\">Volvo</ng-option>\n+ <ng-option [disabled]=\"disable\" value=\"Saab\">Saab</ng-option>\n+ <ng-option value=\"Opel\">Opel</ng-option>\n+ <ng-option value=\"Audi\">Audi</ng-option>\n</ng-select>\n---\n<br />Selected: {{staticValue | json}}\n@@ -84,6 +86,7 @@ export class DataSourceComponent {\nselectedSimpleItem = 'Two';\nsimpleItems = [];\n+ disable = true;\nconstructor(private dataService: DataService) { }\n",
"new_path": "demo/app/examples/data-source.component.ts",
"old_path": "demo/app/examples/data-source.component.ts"
},
{
"change_type": "MODIFY",
"diff": "-import {Directive, ElementRef, Input, OnChanges, Renderer2} from '@angular/core';\nimport * as searchHelper from './search-helper';\n+import {\n+ Directive,\n+ ElementRef,\n+ Input,\n+ OnChanges,\n+ Renderer2\n+} from '@angular/core';\n@Directive({\nselector: '[ngOptionHighlight]'\n@@ -9,21 +15,20 @@ export class NgOptionHighlightDirective implements OnChanges {\n@Input('ngOptionHighlight') term: string;\n@Input('innerHTML') label: any;\n- constructor(private elementRef: ElementRef, private renderer: Renderer2) {\n- }\n+ constructor(private elementRef: ElementRef, private renderer: Renderer2) { }\nngOnChanges(): void {\nthis._highlightLabelWithSearchTerm();\n}\nprivate _highlightLabelWithSearchTerm(): void {\n- let label: string = this.label ? this.label.toString() : '';\n+ const label: string = this.label ? this.label.toString() : '';\nif (!label || !this.term) {\nthis._setInnerHtml(label);\nreturn;\n}\n- let indexOfTerm: number;\n- indexOfTerm = searchHelper.stripSpecialChars(label)\n+\n+ const indexOfTerm = searchHelper.stripSpecialChars(label)\n.toLowerCase()\n.indexOf(searchHelper.stripSpecialChars(this.term).toLowerCase());\nif (indexOfTerm > -1) {\n",
"new_path": "src/ng-select/ng-option-highlight.directive.ts",
"old_path": "src/ng-select/ng-option-highlight.directive.ts"
},
{
"change_type": "MODIFY",
"diff": "-import { Component, Input, ElementRef } from '@angular/core';\n+import {\n+ ChangeDetectionStrategy,\n+ Component,\n+ ElementRef,\n+ Input,\n+ OnChanges,\n+ SimpleChanges\n+} from '@angular/core';\n+import { Subject } from 'rxjs/Subject';\n@Component({\nselector: 'ng-option',\n+ changeDetection: ChangeDetectionStrategy.OnPush,\ntemplate: `<ng-content></ng-content>`\n})\n-export class NgOptionComponent {\n+export class NgOptionComponent implements OnChanges {\n+\n@Input() value: any;\n+ @Input()\n+ get disabled() { return this._disabled; }\n+ set disabled(value: any) { this._disabled = this._isDisabled(value) }\n+\n+ readonly stateChange$ = new Subject<{ value: any, disabled: boolean }>();\n+ private _disabled = false;\n+\n+ constructor(public elementRef: ElementRef) { }\n+\n+ ngOnChanges(changes: SimpleChanges) {\n+ if (changes.disabled) {\n+ this.stateChange$.next({\n+ value: this.value,\n+ disabled: this._disabled\n+ });\n+ }\n+ }\n- constructor(public elementRef: ElementRef) {\n+ private _isDisabled(value) {\n+ return value != null && `${value}` !== 'false';\n}\n}\n",
"new_path": "src/ng-select/ng-option.component.ts",
"old_path": "src/ng-select/ng-option.component.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -867,12 +867,28 @@ describe('NgSelectComponent', function () {\nconst items = fixture.componentInstance.select.itemsList.items;\nexpect(items.length).toBe(2);\nexpect(items[0]).toEqual(jasmine.objectContaining({\n- value: { label: 'Yes', value: true }\n+ value: { label: 'Yes', value: true, disabled: false }\n}));\nexpect(items[1]).toEqual(jasmine.objectContaining({\n- value: { label: 'No', value: false }\n+ value: { label: 'No', value: false, disabled: false }\n}));\n}));\n+\n+ it('should update ng-option state', fakeAsync(() => {\n+ const fixture = createTestingModule(\n+ NgSelectBasicTestCmp,\n+ `<ng-select [(ngModel)]=\"selectedCity\">\n+ <ng-option [disabled]=\"disabled\" [value]=\"true\">Yes</ng-option>\n+ <ng-option [value]=\"false\">No</ng-option>\n+ </ng-select>`);\n+\n+ tickAndDetectChanges(fixture);\n+ const items = fixture.componentInstance.select.itemsList.items;\n+ expect(items[0].disabled).toBeFalsy();\n+ fixture.componentInstance.disabled = true;\n+ tickAndDetectChanges(fixture);\n+ expect(items[0].disabled).toBeTruthy();\n+ }));\n});\ndescribe('Multiple', () => {\n@@ -1542,6 +1558,7 @@ class NgSelectBasicTestCmp {\n@ViewChild(NgSelectComponent) select: NgSelectComponent;\nselectedCity: { id: number; name: string };\nmultiple = false;\n+ disabled = false;\ndropdownPosition = 'bottom';\ncitiesLoading = false;\ncities = [\n",
"new_path": "src/ng-select/ng-select.component.spec.ts",
"old_path": "src/ng-select/ng-select.component.spec.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -34,6 +34,8 @@ import { VirtualScrollComponent } from './virtual-scroll.component';\nimport { NgOption, KeyCode, NgSelectConfig } from './ng-select.types';\nimport { ItemsList } from './items-list';\nimport { Subject } from 'rxjs/Subject';\n+import { merge } from 'rxjs/observable/merge';\n+import { takeUntil, startWith } from 'rxjs/operators';\nimport { NgOptionComponent } from './ng-option.component';\nexport const NG_SELECT_DEFAULT_CONFIG = new InjectionToken<NgSelectConfig>('ng-select-default-options');\n@@ -122,6 +124,7 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\nprivate _defaultValue = 'value';\nprivate _typeaheadLoading = false;\n+ private readonly _destroy$ = new Subject<void>();\nprivate _onChange = (_: NgOption) => { };\nprivate _onTouched = () => { };\nprivate _disposeDocumentClickListener = () => { };\n@@ -179,12 +182,13 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\n}\nngOnDestroy() {\n- this.changeDetectorRef.detach();\nthis._disposeDocumentClickListener();\nthis._disposeDocumentResizeListener();\nif (this.appendTo) {\nthis.elementRef.nativeElement.appendChild(this.dropdownPanel.nativeElement);\n}\n+ this._destroy$.next();\n+ this._destroy$.complete();\n}\n@HostListener('keydown', ['$event'])\n@@ -437,22 +441,38 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\nprivate _setItemsFromNgOptions() {\nthis.bindLabel = this.bindLabel || this._defaultLabel;\nthis.bindValue = this.bindValue || this._defaultValue;\n+\nconst handleNgOptions = (options: QueryList<NgOptionComponent>) => {\nthis.items = options.map(option => ({\nvalue: option.value,\n- label: option.elementRef.nativeElement.innerHTML\n+ label: option.elementRef.nativeElement.innerHTML,\n+ disabled: option.disabled\n}));\nthis.itemsList.setItems(this.items, false);\n-\nif (this._isDefined(this._ngModel)) {\nthis.itemsList.clearSelected();\nthis._selectWriteValue(this._ngModel);\n}\nthis.detectChanges();\n- };\n+ }\n- this.ngOptions.changes.subscribe(options => handleNgOptions(options));\n- handleNgOptions(this.ngOptions);\n+ const handleOptionChange = () => {\n+ const changedOrDestroyed = merge(this.ngOptions.changes, this._destroy$);\n+ merge(...this.ngOptions.map(option => option.stateChange$))\n+ .pipe(takeUntil(changedOrDestroyed))\n+ .subscribe(option => {\n+ const item = this.itemsList.findItem(option.value);\n+ item.disabled = option.disabled;\n+ this.changeDetectorRef.markForCheck();\n+ });\n+ }\n+\n+ this.ngOptions.changes\n+ .pipe(startWith(this.ngOptions), takeUntil(this._destroy$))\n+ .subscribe(options => {\n+ handleNgOptions(options);\n+ handleOptionChange();\n+ });\n}\nprivate _handleDocumentClick() {\n",
"new_path": "src/ng-select/ng-select.component.ts",
"old_path": "src/ng-select/ng-select.component.ts"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
feat(ng-option): support for disabled attribute (#280)
closes #236
| 1
|
feat
|
ng-option
|
573,195
|
23.02.2018 10:58:20
| 28,800
|
0919f26db5d5614c0b2fa2567ac2ed43ee70b6d5
|
fix: send numbers or bools as payloads
|
[
{
"change_type": "MODIFY",
"diff": "@@ -295,9 +295,23 @@ function patch(Response) {\n*/\nResponse.prototype.send = function send(code, body, headers) {\nvar self = this;\n- var args = Array.prototype.slice.call(arguments);\n- args.push(true); // Append format = true to __send invocation\n- return self.__send.apply(self, args);\n+ var sendArgs;\n+\n+ if (typeof code === 'number') {\n+ sendArgs = {\n+ code: code,\n+ body: body,\n+ headers: headers\n+ };\n+ } else {\n+ sendArgs = {\n+ body: code,\n+ headers: body\n+ };\n+ }\n+\n+ sendArgs.format = true;\n+ return self.__send(sendArgs);\n};\n/**\n@@ -317,9 +331,23 @@ function patch(Response) {\n*/\nResponse.prototype.sendRaw = function sendRaw(code, body, headers) {\nvar self = this;\n- var args = Array.prototype.slice.call(arguments);\n- args.push(false); // Append format = false to __send invocation\n- return self.__send.apply(self, args);\n+ var sendArgs;\n+\n+ if (typeof code === 'number') {\n+ sendArgs = {\n+ code: code,\n+ body: body,\n+ headers: headers\n+ };\n+ } else {\n+ sendArgs = {\n+ body: code,\n+ headers: body\n+ };\n+ }\n+\n+ sendArgs.format = false;\n+ return self.__send(sendArgs);\n};\n// eslint-disable-next-line jsdoc/check-param-names\n@@ -331,51 +359,20 @@ function patch(Response) {\n* providing headers.\n*\n* @private\n- * @param {Number} [code] - http status code\n- * @param {Object | Buffer | String | Error} [body] - the content to send\n- * @param {Object} [headers] - any add'l headers to set\n- * @param {Boolean} [format] - When false, skip formatting\n- * @returns {Object} returns the response object\n+ * @param {Object} opts - an option sobject\n+ * @param {Object | Buffer | String | Error} opts.body - the content to send\n+ * @param {Boolean} opts.format - When false, skip formatting\n+ * @param {Number} [opts.code] - http status code\n+ * @param {Object} [opts.headers] - any add'l headers to set\n+ * @returns {Object} - returns the response object\n*/\n- Response.prototype.__send = function __send() {\n+ Response.prototype.__send = function __send(opts) {\nvar self = this;\nvar isHead = self.req.method === 'HEAD';\nvar log = self.log;\n- var code, body, headers, format;\n-\n- // derive arguments from types, one by one\n- var index = 0;\n- // Check to see if the first argument is a status code\n- if (typeof arguments[index] === 'number') {\n- code = arguments[index++];\n- }\n-\n- // Check to see if the next argument is a body\n- if (\n- typeof arguments[index] === 'object' ||\n- typeof arguments[index] === 'string'\n- ) {\n- body = arguments[index++];\n- }\n-\n- // Check to see if the next argument is a collection of headers\n- if (typeof arguments[index] === 'object') {\n- headers = arguments[index++];\n- }\n-\n- // Check to see if the next argument is the format boolean\n- if (typeof arguments[index] === 'boolean') {\n- format = arguments[index++];\n- }\n-\n- // Ensure the function was provided with arguments of the proper types,\n- // if we reach this line and there are still arguments, either one of\n- // the optional arguments was of an invalid type or we were provided\n- // with too many arguments\n- assert(\n- arguments[index] === undefined,\n- 'Unknown argument: ' + arguments[index] + '\\nProvided: ' + arguments\n- );\n+ var code = opts.code;\n+ var body = opts.body;\n+ var headers = opts.headers || {};\n// Now lets try to derive values for optional arguments that we were not\n// provided, otherwise we choose sane defaults.\n@@ -397,7 +394,6 @@ function patch(Response) {\n// Set sane defaults for optional arguments if they were not provided\n// and we failed to derive their values\ncode = code || self.statusCode || 200;\n- headers = headers || {};\n// Populate our response object with the derived arguments\nself.statusCode = code;\n@@ -429,7 +425,7 @@ function patch(Response) {\n// if no formatting, assert that the value to be written is a string\n// or a buffer, then send it.\n- if (format === false) {\n+ if (opts.format === false) {\nassert.ok(\ntypeof body === 'string' || Buffer.isBuffer(body),\n'res.sendRaw() accepts only strings or buffers'\n",
"new_path": "lib/response.js",
"old_path": "lib/response.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -636,3 +636,54 @@ test('should support multiple set-cookie headers', function(t) {\nt.end();\n});\n});\n+\n+test('GH-1607: should send bools with explicit status code', function(t) {\n+ SERVER.get('/bool/:value', function(req, res, next) {\n+ res.send(200, req.params.value === 'true' ? true : false);\n+ return next();\n+ });\n+\n+ STRING_CLIENT.get(join(LOCALHOST, '/bool/false'), function(\n+ err,\n+ req,\n+ res,\n+ data\n+ ) {\n+ t.equal(data, 'false');\n+\n+ STRING_CLIENT.get(join(LOCALHOST, '/bool/true'), function(\n+ err2,\n+ req2,\n+ res2,\n+ data2\n+ ) {\n+ t.equal(data2, 'true');\n+ t.end();\n+ });\n+ });\n+});\n+\n+test('GH-1607: should send numbers with explicit status code', function(t) {\n+ SERVER.get('/zero', function(req, res, next) {\n+ res.send(200, 0);\n+ return next();\n+ });\n+\n+ SERVER.get('/one', function(req, res, next) {\n+ res.send(200, 1);\n+ return next();\n+ });\n+\n+ STRING_CLIENT.get(join(LOCALHOST, '/zero'), function(err, req, res, data) {\n+ t.equal(data, '0');\n+ STRING_CLIENT.get(join(LOCALHOST, '/one'), function(\n+ err2,\n+ req2,\n+ res2,\n+ data2\n+ ) {\n+ t.equal(data2, '1');\n+ t.end();\n+ });\n+ });\n+});\n",
"new_path": "test/response.test.js",
"old_path": "test/response.test.js"
}
] |
JavaScript
|
MIT License
|
restify/node-restify
|
fix: send numbers or bools as payloads (#1609)
| 1
|
fix
| null |
807,849
|
23.02.2018 12:50:18
| 28,800
|
0782fd75378afb242a1ce2cccc8c2052bcde1d51
|
refactor(UpdatedPackagesCollector): extract utils/get-forced-packages
|
[
{
"change_type": "MODIFY",
"diff": "@@ -5,6 +5,7 @@ const path = require(\"path\");\nconst semver = require(\"semver\");\nconst GitUtilities = require(\"./GitUtilities\");\n+const getForcedPackages = require(\"./utils/get-forced-packages\");\nclass Update {\nconstructor(pkg) {\n@@ -12,27 +13,6 @@ class Update {\n}\n}\n-function getForcedPackages(forcePublish) {\n- // new Set(null) is equivalent to new Set([])\n- // i.e., an empty Set\n- let inputs = null;\n-\n- if (forcePublish === true) {\n- // --force-publish\n- inputs = [\"*\"];\n- } else if (typeof forcePublish === \"string\") {\n- // --force-publish=*\n- // --force-publish=foo\n- // --force-publish=foo,bar\n- inputs = forcePublish.split(\",\");\n- } else if (Array.isArray(forcePublish)) {\n- // --force-publish foo --force-publish baz\n- inputs = [...forcePublish];\n- }\n-\n- return new Set(inputs);\n-}\n-\nfunction makeDiffSince(rootPath, execOpts, ignorePatterns) {\nconst ignoreFilters = new Set(\nArray.from(ignorePatterns || []).map(p => minimatch.filter(`!${p}`, { matchBase: true }))\n",
"new_path": "src/UpdatedPackagesCollector.js",
"old_path": "src/UpdatedPackagesCollector.js"
},
{
"change_type": "ADD",
"diff": "+\"use strict\";\n+\n+module.exports = getForcedPackages;\n+\n+function getForcedPackages(forcePublish) {\n+ // new Set(null) is equivalent to new Set([])\n+ // i.e., an empty Set\n+ let inputs = null;\n+\n+ if (forcePublish === true) {\n+ // --force-publish\n+ inputs = [\"*\"];\n+ } else if (typeof forcePublish === \"string\") {\n+ // --force-publish=*\n+ // --force-publish=foo\n+ // --force-publish=foo,bar\n+ inputs = forcePublish.split(\",\");\n+ } else if (Array.isArray(forcePublish)) {\n+ // --force-publish foo --force-publish baz\n+ inputs = [...forcePublish];\n+ }\n+\n+ return new Set(inputs);\n+}\n",
"new_path": "src/utils/get-forced-packages.js",
"old_path": null
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(UpdatedPackagesCollector): extract utils/get-forced-packages
| 1
|
refactor
|
UpdatedPackagesCollector
|
807,849
|
23.02.2018 12:52:51
| 28,800
|
2f759825829fe459081ce884bf1c5aa8ec6f78f3
|
refactor(GitUtilities): diffSinceIn() should slash() location path
|
[
{
"change_type": "MODIFY",
"diff": "@@ -134,7 +134,8 @@ function describeTag(ref, opts) {\n}\nfunction diffSinceIn(since, location, opts) {\n- const formattedLocation = path.relative(opts.cwd, location).replace(/\\\\/g, \"/\");\n+ const formattedLocation = slash(path.relative(opts.cwd, location));\n+\nlog.silly(\"diffSinceIn\", since, formattedLocation);\nconst diff = ChildProcessUtilities.execSync(\n",
"new_path": "src/GitUtilities.js",
"old_path": "src/GitUtilities.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(GitUtilities): diffSinceIn() should slash() location path
| 1
|
refactor
|
GitUtilities
|
807,849
|
23.02.2018 13:04:09
| 28,800
|
78da241a4f327b6759ad627c4e3ee235bf3cb193
|
refactor(UpdatedPackagesCollector): extract utils/make-diff-predicate
|
[
{
"change_type": "MODIFY",
"diff": "\"use strict\";\n-const minimatch = require(\"minimatch\");\n-const path = require(\"path\");\nconst semver = require(\"semver\");\nconst GitUtilities = require(\"./GitUtilities\");\nconst getForcedPackages = require(\"./utils/get-forced-packages\");\n+const makeDiffPredicate = require(\"./utils/make-diff-predicate\");\nclass Update {\nconstructor(pkg) {\n@@ -13,31 +12,6 @@ class Update {\n}\n}\n-function makeDiffSince(rootPath, execOpts, ignorePatterns) {\n- const ignoreFilters = new Set(\n- Array.from(ignorePatterns || []).map(p => minimatch.filter(`!${p}`, { matchBase: true }))\n- );\n-\n- return function hasDiffSinceThatIsntIgnored(pkg, commits) {\n- const folder = path.relative(rootPath, pkg.location);\n- const diff = GitUtilities.diffSinceIn(commits, pkg.location, execOpts);\n-\n- if (diff === \"\") {\n- return false;\n- }\n-\n- let changedFiles = diff.split(\"\\n\").map(file => file.replace(folder + path.sep, \"\"));\n-\n- if (ignoreFilters.size) {\n- for (const ignored of ignoreFilters) {\n- changedFiles = changedFiles.filter(ignored);\n- }\n- }\n-\n- return !!changedFiles.length;\n- };\n-}\n-\nclass UpdatedPackagesCollector {\nconstructor(command) {\nthis.execOpts = command.execOpts;\n@@ -111,10 +85,10 @@ class UpdatedPackagesCollector {\nif (!since || forced.has(\"*\")) {\nthis.packages.forEach(node => this.candidates.add(node));\n} else {\n- const hasDiffSinceThatIsntIgnored = makeDiffSince(rootPath, execOpts, ignorePatterns);\n+ const hasDiff = makeDiffPredicate(since, rootPath, execOpts, ignorePatterns);\nthis.packages.forEach((node, name) => {\n- if (forced.has(name) || hasDiffSinceThatIsntIgnored(node, since)) {\n+ if (forced.has(name) || hasDiff(node)) {\nthis.candidates.add(node);\n}\n});\n",
"new_path": "src/UpdatedPackagesCollector.js",
"old_path": "src/UpdatedPackagesCollector.js"
},
{
"change_type": "ADD",
"diff": "+\"use strict\";\n+\n+const minimatch = require(\"minimatch\");\n+const GitUtilities = require(\"../GitUtilities\");\n+\n+module.exports = makeDiffPredicate;\n+\n+function makeDiffPredicate(committish, rootPath, execOpts, ignorePatterns = []) {\n+ const ignoreFilters = new Set(\n+ Array.from(ignorePatterns).map(p => minimatch.filter(`!${p}`, { matchBase: true }))\n+ );\n+\n+ return function hasDiffSinceThatIsntIgnored(node) {\n+ const diff = GitUtilities.diffSinceIn(committish, node.location, execOpts);\n+\n+ if (diff === \"\") {\n+ return false;\n+ }\n+\n+ let changedFiles = diff.split(\"\\n\");\n+\n+ if (ignoreFilters.size) {\n+ for (const ignored of ignoreFilters) {\n+ changedFiles = changedFiles.filter(ignored);\n+ }\n+ }\n+\n+ return changedFiles.length > 0;\n+ };\n+}\n",
"new_path": "src/utils/make-diff-predicate.js",
"old_path": null
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(UpdatedPackagesCollector): extract utils/make-diff-predicate
| 1
|
refactor
|
UpdatedPackagesCollector
|
807,849
|
23.02.2018 13:06:04
| 28,800
|
12c2976d3d033d60c1ed475dbe3bbae92ce18cd8
|
refactor(UpdatedPackagesCollector): extract utils/collect-dependents
|
[
{
"change_type": "MODIFY",
"diff": "const semver = require(\"semver\");\nconst GitUtilities = require(\"./GitUtilities\");\n+const collectDependents = require(\"./utils/collect-dependents\");\nconst getForcedPackages = require(\"./utils/get-forced-packages\");\nconst makeDiffPredicate = require(\"./utils/make-diff-predicate\");\n@@ -35,7 +36,9 @@ class UpdatedPackagesCollector {\nthis.collectUpdatedPackages();\nthis.collectPrereleasedPackages();\n- this.collectTransitiveDependents();\n+\n+ const dependents = collectDependents(this.candidates);\n+ dependents.forEach(node => this.candidates.add(node));\nconst updates = [];\n@@ -110,41 +113,6 @@ class UpdatedPackagesCollector {\n}\n});\n}\n-\n- collectTransitiveDependents() {\n- const collected = new Set();\n-\n- this.candidates.forEach((currentNode, currentName) => {\n- if (currentNode.localDependents.size === 0) {\n- // no point diving into a non-existent tree\n- return;\n- }\n-\n- // depth-first search, whee\n- const seen = new Set();\n-\n- const visit = (dependentNode, dependentName, siblingDependents) => {\n- if (seen.has(dependentNode)) {\n- return;\n- }\n-\n- seen.add(dependentNode);\n-\n- if (dependentNode === currentNode || siblingDependents.has(currentName)) {\n- // a direct or transitive cycle, skip it\n- return;\n- }\n-\n- collected.add(dependentNode);\n-\n- dependentNode.localDependents.forEach(visit);\n- };\n-\n- currentNode.localDependents.forEach(visit);\n- });\n-\n- collected.forEach(node => this.candidates.add(node));\n- }\n}\nmodule.exports = UpdatedPackagesCollector;\n",
"new_path": "src/UpdatedPackagesCollector.js",
"old_path": "src/UpdatedPackagesCollector.js"
},
{
"change_type": "ADD",
"diff": "+\"use strict\";\n+\n+module.exports = collectDependents;\n+\n+function collectDependents(nodes) {\n+ const collected = new Set();\n+\n+ nodes.forEach(currentNode => {\n+ if (currentNode.localDependents.size === 0) {\n+ // no point diving into a non-existent tree\n+ return;\n+ }\n+\n+ // depth-first search\n+ const seen = new Set();\n+ const visit = (dependentNode, dependentName, siblingDependents) => {\n+ if (seen.has(dependentNode)) {\n+ return;\n+ }\n+\n+ seen.add(dependentNode);\n+\n+ if (dependentNode === currentNode || siblingDependents.has(currentNode.name)) {\n+ // a direct or transitive cycle, skip it\n+ return;\n+ }\n+\n+ collected.add(dependentNode);\n+\n+ dependentNode.localDependents.forEach(visit);\n+ };\n+\n+ currentNode.localDependents.forEach(visit);\n+ });\n+\n+ return collected;\n+}\n",
"new_path": "src/utils/collect-dependents.js",
"old_path": null
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(UpdatedPackagesCollector): extract utils/collect-dependents
| 1
|
refactor
|
UpdatedPackagesCollector
|
807,849
|
23.02.2018 13:07:49
| 28,800
|
f4bc1cbead302c64cdc25999423b6e88daeee382
|
refactor(GitUtilities): diffSinceIn() expects a committish, not name@version
|
[
{
"change_type": "MODIFY",
"diff": "@@ -133,14 +133,14 @@ function describeTag(ref, opts) {\nreturn description;\n}\n-function diffSinceIn(since, location, opts) {\n+function diffSinceIn(committish, location, opts) {\nconst formattedLocation = slash(path.relative(opts.cwd, location));\n- log.silly(\"diffSinceIn\", since, formattedLocation);\n+ log.silly(\"diffSinceIn\", committish, formattedLocation);\nconst diff = ChildProcessUtilities.execSync(\n\"git\",\n- [\"diff\", \"--name-only\", since, \"--\", formattedLocation],\n+ [\"diff\", \"--name-only\", committish, \"--\", formattedLocation],\nopts\n);\nlog.silly(\"diff\", diff);\n",
"new_path": "src/GitUtilities.js",
"old_path": "src/GitUtilities.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -252,10 +252,10 @@ describe(\"GitUtilities\", () => {\nChildProcessUtilities.execSync.mockReturnValueOnce(\"files\");\n- expect(GitUtilities.diffSinceIn(\"foo@1.0.0\", \"packages/foo\", opts)).toBe(\"files\");\n+ expect(GitUtilities.diffSinceIn(\"v1.0.0\", \"packages/foo\", opts)).toBe(\"files\");\nexpect(ChildProcessUtilities.execSync).lastCalledWith(\n\"git\",\n- [\"diff\", \"--name-only\", \"foo@1.0.0\", \"--\", \"packages/foo\"],\n+ [\"diff\", \"--name-only\", \"v1.0.0\", \"--\", \"packages/foo\"],\nopts\n);\n});\n",
"new_path": "test/GitUtilities.js",
"old_path": "test/GitUtilities.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(GitUtilities): diffSinceIn() expects a committish, not name@version
| 1
|
refactor
|
GitUtilities
|
679,913
|
23.02.2018 13:34:11
| 0
|
f0fdfa10686c28e2765763cb839354b6b003982d
|
feat(transducers): add deepTransform & mapDeep xform
|
[
{
"change_type": "MODIFY",
"diff": "+import { IObjectOf } from \"@thi.ng/api/api\";\nimport { Reduced } from \"./reduced\";\nexport type Fn<A, B> = (x: A) => B;\n@@ -20,4 +21,9 @@ export type ConvolutionKernel1D = [number, number][];\nexport type ConvolutionKernel2D = [number, [number, number]][];\nexport type ConvolutionKernel3D = [number, [number, number, number]][];\n+export interface TransformSpec extends Array<any> {\n+ [0]: (x: any) => any;\n+ [1]?: IObjectOf<TransformSpec | ((x: any) => any)>;\n+}\n+\nexport const SEMAPHORE = Symbol(\"SEMAPHORE\");\n",
"new_path": "packages/transducers/src/api.ts",
"old_path": "packages/transducers/src/api.ts"
},
{
"change_type": "ADD",
"diff": "+import { isFunction } from \"@thi.ng/checks/is-function\";\n+\n+import { TransformSpec } from \"../api\";\n+\n+/**\n+ * Higher-order deep object transformer. Accepts a nested `spec`\n+ * array reflecting same key structure as the object to be mapped,\n+ * but with functions or sub-specs as their values.\n+ * Returns a new function, which when called, recursively applies\n+ * nested transformers in post traversal order and returns the result\n+ * of the root transformer given.\n+ *\n+ * The transform specs are given as arrays in this format:\n+ *\n+ * ```\n+ * [tx-function, {key1: [tx-function, {...}], key2: tx-fn}]\n+ * ```\n+ *\n+ * If a key in the spec has no further sub maps, its transform function\n+ * can be given directly without having to wrap it into the usual array\n+ * structure.\n+ *\n+ * ```\n+ * // source object to be transformed\n+ * src = {\n+ * meta: {\n+ * author: { name: \"Alice\", email: \"a@.b.com\" },\n+ * date: 1041510896000\n+ * },\n+ * type: \"post\",\n+ * title: \"Hello world\",\n+ * body: \"Ratione necessitatibus doloremque itaque.\"\n+ * };\n+ *\n+ * // deep transformation spec\n+ * spec = [\n+ * // root transform (called last)\n+ * ({type, meta, title, body}) => [\"div\", {class: type}, title, meta, body],\n+ * // object of transform sub-specs\n+ * {\n+ * meta: [\n+ * ({author, date}) => [\"div.meta\", author, `(${date})`],\n+ * {\n+ * author: ({email, name}) => [\"a\", {href: `mailto:${email}`}, name],\n+ * date: (d) => new Date(d).toLocaleString()\n+ * }\n+ * ],\n+ * title: (title) => [\"h1\", title]\n+ * }\n+ * ];\n+ *\n+ * // build transformer & apply to src\n+ * deepTransform(spec)(src);\n+ *\n+ * // [ \"div\",\n+ * // { class: \"article\" },\n+ * // [ \"h1\", \"Hello world\" ],\n+ * // [ \"div.meta\",\n+ * // [ \"a\", { href: \"mailto:a@.b.com\" }, \"Alice\" ],\n+ * // \"(1/2/2003, 12:34:56 PM)\" ],\n+ * // \"Ratione necessitatibus doloremque itaque.\" ]\n+ * ```\n+ *\n+ * @param spec transformation spec\n+ */\n+export function deepTransform(spec: TransformSpec): (x) => any {\n+ if (isFunction(spec)) {\n+ return <any>spec;\n+ }\n+ const mapfns = Object.keys(spec[1] || {}).reduce(\n+ (acc, k) => (acc[k] = deepTransform((<any>spec)[1][k]), acc),\n+ {}\n+ );\n+ return (x) => {\n+ const res = { ...x };\n+ for (let k in mapfns) {\n+ res[k] = mapfns[k](res[k]);\n+ }\n+ return spec[0](res);\n+ };\n+}\n",
"new_path": "packages/transducers/src/func/deep-transform.ts",
"old_path": null
},
{
"change_type": "MODIFY",
"diff": "@@ -55,6 +55,7 @@ export * from \"./xform/map-keys\";\nexport * from \"./xform/map-nth\";\nexport * from \"./xform/map\";\nexport * from \"./xform/mapcat\";\n+export * from \"./xform/map-deep\";\nexport * from \"./xform/moving-average\";\nexport * from \"./xform/moving-median\";\nexport * from \"./xform/multiplex\";\n@@ -87,6 +88,7 @@ export * from \"./xform/utf8\";\nexport * from \"./func/binary-search\";\nexport * from \"./func/comp\";\nexport * from \"./func/constantly\";\n+export * from \"./func/deep-transform\";\nexport * from \"./func/delay\";\nexport * from \"./func/ensure-iterable\";\nexport * from \"./func/even\";\n",
"new_path": "packages/transducers/src/index.ts",
"old_path": "packages/transducers/src/index.ts"
},
{
"change_type": "ADD",
"diff": "+import { Transducer, TransformSpec } from \"../api\";\n+import { deepTransform } from \"../func/deep-transform\";\n+\n+import { map } from \"./map\";\n+\n+/**\n+ * Same as `map(deepTransform(spec))`\n+ *\n+ * @param spec\n+ */\n+export function mapDeep(spec: TransformSpec): Transducer<any, any> {\n+ return map(deepTransform(spec));\n+}\n",
"new_path": "packages/transducers/src/xform/map-deep.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(transducers): add deepTransform & mapDeep xform
| 1
|
feat
|
transducers
|
679,913
|
23.02.2018 13:47:56
| 0
|
223669b1e9ec1e76c5f36b2699ae57a61892ecba
|
test(transducers): add tests
|
[
{
"change_type": "MODIFY",
"diff": "@@ -24,7 +24,7 @@ import { TransformSpec } from \"../api\";\n* // source object to be transformed\n* src = {\n* meta: {\n- * author: { name: \"Alice\", email: \"a@.b.com\" },\n+ * author: { name: \"Alice\", email: \"a@b.com\" },\n* date: 1041510896000\n* },\n* type: \"post\",\n",
"new_path": "packages/transducers/src/func/deep-transform.ts",
"old_path": "packages/transducers/src/func/deep-transform.ts"
},
{
"change_type": "ADD",
"diff": "+import { deepTransform } from \"../src/func/deep-transform\";\n+\n+import * as assert from \"assert\";\n+\n+describe(\"deepTransform\", () => {\n+ it(\"transforms hiccup\", () => {\n+ assert.deepEqual(\n+ deepTransform(\n+ [\n+ ({ type, meta, title, body }) => [\"div\", { class: type }, title, meta, body],\n+ {\n+ meta: [\n+ ({ author, date }) => [\"div.meta\", author, `(${date})`],\n+ {\n+ author: ({ email, name }) => [\"a\", { href: `mailto:${email}` }, name],\n+ date: (d) => new Date(d).toLocaleString()\n+ }\n+ ],\n+ title: (title) => [\"h1\", title]\n+ }\n+ ]\n+ )(\n+ {\n+ meta: {\n+ author: { name: \"Alice\", email: \"a@b.com\" },\n+ date: 1041510896000\n+ },\n+ type: \"post\",\n+ title: \"Hello world\",\n+ body: \"Ratione necessitatibus doloremque itaque.\"\n+ }\n+ ),\n+ ['div',\n+ { class: 'post' },\n+ ['h1', 'Hello world'],\n+ ['div.meta', [\"a\", { href: \"mailto:a@b.com\" }, \"Alice\"], '(1/2/2003, 12:34:56 PM)'],\n+ 'Ratione necessitatibus doloremque itaque.']\n+ );\n+ });\n+});\n",
"new_path": "packages/transducers/test/map-deep.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
test(transducers): add tests
| 1
|
test
|
transducers
|
679,913
|
23.02.2018 14:28:33
| 0
|
2a11ff6f19506642761876e642c2d5dcf6a25714
|
refactor(transducers): add TransformSubSpec, fix test, minor update docs
|
[
{
"change_type": "MODIFY",
"diff": "@@ -21,9 +21,11 @@ export type ConvolutionKernel1D = [number, number][];\nexport type ConvolutionKernel2D = [number, [number, number]][];\nexport type ConvolutionKernel3D = [number, [number, number, number]][];\n+export type TransformFn = (x: any) => any;\n+export type TransformSubSpec = IObjectOf<TransformSpec | TransformFn>;\nexport interface TransformSpec extends Array<any> {\n- [0]: (x: any) => any;\n- [1]?: IObjectOf<TransformSpec | ((x: any) => any)>;\n+ [0]: TransformFn;\n+ [1]?: TransformSubSpec;\n}\nexport const SEMAPHORE = Symbol(\"SEMAPHORE\");\n",
"new_path": "packages/transducers/src/api.ts",
"old_path": "packages/transducers/src/api.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -7,8 +7,8 @@ import { TransformSpec } from \"../api\";\n* array reflecting same key structure as the object to be mapped,\n* but with functions or sub-specs as their values.\n* Returns a new function, which when called, recursively applies\n- * nested transformers in post traversal order and returns the result\n- * of the root transformer given.\n+ * nested transformers in post-order traversal (child transformers\n+ * are run first) and returns the result of the root transformer.\n*\n* The transform specs are given as arrays in this format:\n*\n@@ -16,9 +16,9 @@ import { TransformSpec } from \"../api\";\n* [tx-function, {key1: [tx-function, {...}], key2: tx-fn}]\n* ```\n*\n- * If a key in the spec has no further sub maps, its transform function\n- * can be given directly without having to wrap it into the usual array\n- * structure.\n+ * If a key in the spec has no further sub maps, its transform\n+ * function can be given directly without having to wrap it into\n+ * the usual array structure.\n*\n* ```\n* // source object to be transformed\n",
"new_path": "packages/transducers/src/func/deep-transform.ts",
"old_path": "packages/transducers/src/func/deep-transform.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -13,7 +13,7 @@ describe(\"deepTransform\", () => {\n({ author, date }) => [\"div.meta\", author, `(${date})`],\n{\nauthor: ({ email, name }) => [\"a\", { href: `mailto:${email}` }, name],\n- date: (d) => new Date(d).toLocaleString()\n+ date: (epoch) => new Date(epoch).toISOString()\n}\n],\ntitle: (title) => [\"h1\", title]\n@@ -33,7 +33,7 @@ describe(\"deepTransform\", () => {\n['div',\n{ class: 'post' },\n['h1', 'Hello world'],\n- ['div.meta', [\"a\", { href: \"mailto:a@b.com\" }, \"Alice\"], '(1/2/2003, 12:34:56 PM)'],\n+ ['div.meta', [\"a\", { href: \"mailto:a@b.com\" }, \"Alice\"], '(2003-01-02T12:34:56.000Z)'],\n'Ratione necessitatibus doloremque itaque.']\n);\n});\n",
"new_path": "packages/transducers/test/map-deep.ts",
"old_path": "packages/transducers/test/map-deep.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(transducers): add TransformSubSpec, fix test, minor update docs
| 1
|
refactor
|
transducers
|
807,849
|
23.02.2018 14:48:22
| 28,800
|
389a5e36f6856dfe23eafb15c8b33a4ee293f59c
|
refactor(UpdatedPackagesCollector): remove Update() wrapper, re-use PackageGraphNodes
|
[
{
"change_type": "MODIFY",
"diff": "@@ -276,7 +276,7 @@ class Command {\n// The UpdatedPackagesCollector requires that filteredPackages be present prior to checking for\n// updates. That's okay because it further filters based on what's already been filtered.\nif (typeof since === \"string\") {\n- const updated = new UpdatedPackagesCollector(this).getUpdates().map(update => update.package.name);\n+ const updated = new UpdatedPackagesCollector(this).getUpdates().map(({ pkg }) => pkg.name);\nthis.filteredPackages = this.filteredPackages.filter(pkg => updated.indexOf(pkg.name) > -1);\n}\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -7,12 +7,6 @@ const collectDependents = require(\"./utils/collect-dependents\");\nconst getForcedPackages = require(\"./utils/get-forced-packages\");\nconst makeDiffPredicate = require(\"./utils/make-diff-predicate\");\n-class Update {\n- constructor(pkg) {\n- this.package = pkg;\n- }\n-}\n-\nclass UpdatedPackagesCollector {\nconstructor(command) {\nthis.execOpts = command.execOpts;\n@@ -45,8 +39,7 @@ class UpdatedPackagesCollector {\nconst mapper = (node, name) => {\nthis.logger.verbose(\"has filtered update\", name);\n- // TODO: stop re-wrapping with a silly Update class\n- updates.push(new Update(node.pkg));\n+ updates.push(node);\n};\nif (this.options.canary) {\n",
"new_path": "src/UpdatedPackagesCollector.js",
"old_path": "src/UpdatedPackagesCollector.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -220,7 +220,7 @@ class PublishCommand extends Command {\nreturn callback(null, false);\n}\n- this.packagesToPublish = this.updates.map(({ package: pkg }) => pkg).filter(pkg => !pkg.private);\n+ this.packagesToPublish = this.updates.map(({ pkg }) => pkg).filter(pkg => !pkg.private);\nthis.batchedPackagesToPublish = this.toposort\n? batchPackages(\n@@ -271,9 +271,7 @@ class PublishCommand extends Command {\nresolveLocalDependencyLinks() {\n// resolve relative file: links to their actual version range\n- const updatesWithLocalLinks = this.updates\n- .map(({ package: pkg }) => this.packageGraph.get(pkg.name))\n- .filter(\n+ const updatesWithLocalLinks = this.updates.filter(\n({ localDependencies }) =>\nlocalDependencies.size &&\nArray.from(localDependencies.values()).some(({ type }) => type === \"directory\")\n@@ -364,7 +362,7 @@ class PublishCommand extends Command {\n}\nreduceVersions(getVersion) {\n- const iterator = (versionMap, { package: pkg }) =>\n+ const iterator = (versionMap, { pkg }) =>\nPromise.resolve(getVersion(pkg)).then(version => versionMap.set(pkg.name, version));\nreturn pReduce(this.updates, iterator, new Map());\n@@ -382,7 +380,7 @@ class PublishCommand extends Command {\nchain = chain.then(() => {\nconst globalVersion = this.repository.version;\n- this.updates.forEach(({ package: pkg }) => {\n+ this.updates.forEach(({ pkg }) => {\nif (semver.lt(pkg.version, globalVersion)) {\nthis.logger.verbose(\n\"publish\",\n@@ -468,7 +466,7 @@ class PublishCommand extends Command {\n}\nconfirmVersions() {\n- const changes = this.updates.map(({ package: pkg }) => {\n+ const changes = this.updates.map(({ pkg }) => {\nlet line = ` - ${pkg.name}: ${pkg.version} => ${this.updatesVersions.get(pkg.name)}`;\nif (pkg.private) {\nline += ` (${chalk.red(\"private\")})`;\n@@ -528,7 +526,7 @@ class PublishCommand extends Command {\nchain = chain.then(() =>\npMap(\nthis.updates,\n- ({ package: pkg }) =>\n+ ({ pkg, localDependencies }) =>\n// start the chain\nPromise.resolve()\n@@ -541,7 +539,7 @@ class PublishCommand extends Command {\npkg.version = this.updatesVersions.get(pkg.name);\n// update pkg dependencies\n- for (const [depName, resolved] of this.packageGraph.get(pkg.name).localDependencies) {\n+ for (const [depName, resolved] of localDependencies) {\nconst depVersion = this.updatesVersions.get(depName);\nif (depVersion && resolved.type !== \"directory\") {\n@@ -619,7 +617,7 @@ class PublishCommand extends Command {\n// run the postversion script for each update\nchain = chain.then(() => {\n- this.updates.forEach(({ package: pkg }) => this.runLifecycle(pkg, \"postversion\"));\n+ this.updates.forEach(({ pkg }) => this.runLifecycle(pkg, \"postversion\"));\n});\n// run postversion, if set, in the root directory\n@@ -629,7 +627,7 @@ class PublishCommand extends Command {\n}\ngitCommitAndTagVersionForUpdates() {\n- const tags = this.updates.map(({ package: pkg }) => `${pkg.name}@${this.updatesVersions.get(pkg.name)}`);\n+ const tags = this.updates.map(({ pkg }) => `${pkg.name}@${this.updatesVersions.get(pkg.name)}`);\nconst subject = this.options.message || \"Publish\";\nconst message = tags.reduce((msg, tag) => `${msg}${os.EOL} - ${tag}`, `${subject}${os.EOL}`);\n@@ -677,7 +675,7 @@ class PublishCommand extends Command {\n// if we skip temp tags we should tag with the proper value immediately\nconst distTag = this.options.tempTag ? \"lerna-temp\" : this.getDistTag();\n- this.updates.forEach(({ package: pkg }) => this.execScript(pkg, \"prepublish\"));\n+ this.updates.forEach(({ pkg }) => this.execScript(pkg, \"prepublish\"));\ntracker.addWork(this.packagesToPublish.length);\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -52,7 +52,7 @@ class UpdatedCommand extends Command {\n}\nexecute(callback) {\n- const updatedPackages = this.updates.map(update => update.package).map(pkg => ({\n+ const updatedPackages = this.updates.map(({ pkg }) => ({\nname: pkg.name,\nversion: pkg.version,\nprivate: pkg.private,\n",
"new_path": "src/commands/UpdatedCommand.js",
"old_path": "src/commands/UpdatedCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -107,7 +107,7 @@ describe(\"ExecCommand\", () => {\nUpdatedPackagesCollector.prototype.getUpdates = jest.fn(() => [\n{\n- package: {\n+ pkg: {\nname: \"package-2\",\nlocation: path.join(testDir, \"packages/package-2\"),\n},\n",
"new_path": "test/ExecCommand.js",
"old_path": "test/ExecCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -86,7 +86,7 @@ describe(\"RunCommand\", () => {\nUpdatedPackagesCollector.prototype.getUpdates = jest.fn(() => [\n{\n- package: {\n+ pkg: {\nname: \"package-3\",\nlocation: path.join(testDir, \"packages/package-3\"),\nscripts: { \"my-script\": \"echo package-3\" },\n",
"new_path": "test/RunCommand.js",
"old_path": "test/RunCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(UpdatedPackagesCollector): remove Update() wrapper, re-use PackageGraphNodes
| 1
|
refactor
|
UpdatedPackagesCollector
|
679,913
|
23.02.2018 14:51:46
| 0
|
b3891d521c1df5ba63343c6850666194b60b15eb
|
refactor(examples): simplify json-components
|
[
{
"change_type": "MODIFY",
"diff": "\"webpack\": \"^3.11.0\"\n},\n\"dependencies\": {\n- \"@thi.ng/checks\": \"^1.2.1\",\n- \"@thi.ng/hiccup-dom\": \"^1.0.5\"\n+ \"@thi.ng/hiccup-dom\": \"^1.0.5\",\n+ \"@thi.ng/transducers\": \"^1.4.0\"\n}\n}\n\\ No newline at end of file\n",
"new_path": "examples/json-components/package.json",
"old_path": "examples/json-components/package.json"
},
{
"change_type": "MODIFY",
"diff": "-import { isFunction } from \"@thi.ng/checks/is-function\";\nimport { start } from \"@thi.ng/hiccup-dom\";\n+import { TransformSubSpec } from \"@thi.ng/transducers/api\";\n+import { deepTransform } from \"@thi.ng/transducers/func/deep-transform\";\n// some dummy JSON records\nlet db = [\n@@ -43,38 +44,17 @@ const tags = (tags) => [\"ul.tags\", ...tags.map(tag)];\nconst title = (title, level = 3) => [`h${level}`, title];\nconst content = (body) => [\"div\", body];\n-// generic JSON object tree transformer\n-// called with a nested object spec reflecting the structure\n-// of the source data, returns composed component function,\n-// which calls all nested sub-components\n-const componentFromSpec = (spec) => {\n- if (isFunction(spec)) {\n- return spec;\n- }\n- const mapfns = Object.keys(spec[1]).reduce(\n- (acc, k) => (acc[k] = componentFromSpec(spec[1][k]), acc),\n- {}\n- );\n- return (x) => {\n- const res = {};\n- for (let k in mapfns) {\n- res[k] = x[k] != null ? mapfns[k](x[k]) : undefined;\n- }\n- return spec[0](res);\n- };\n-};\n-\n-// now build themed component functions for the above JSON object format\n+// now compose themed component functions for the above JSON object format\n// the spec below is is only partially complete and will be reused by\n// the two themes below (this is only for demo purposes and of course\n// one could supply completely different functions per theme, but KISS here... :)\n// the full spec is an array of this recursive structure:\n// [mapfn, {optional chid key specs...}]\n-// for leaf keys only a function needs to be given, no need to wrap in array\n+// for leaf keys only a function needs to be given, no need to wrap in array.\n// giving component functions the same name as their object keys\n// makes this format very succinct\n-const itemSpec = {\n+const itemSpec: TransformSubSpec = {\nmeta: [\nmeta,\n{\n@@ -87,9 +67,13 @@ const itemSpec = {\ncontent\n};\n-// build themed component instances\n-const itemLight = componentFromSpec([item(\"light\"), itemSpec]);\n-const itemDark = componentFromSpec([item(\"dark\"), itemSpec]);\n+// build themed component instances using @thi.ng/tranducers' deepTransform()\n+// deepTransform() is generic object tree transformer\n+// called with a nested object spec reflecting the structure\n+// of the source data, returns composed component function,\n+// which calls all nested transformer functions in post-order traversal\n+const itemLight = deepTransform([item(\"light\"), itemSpec]);\n+const itemDark = deepTransform([item(\"dark\"), itemSpec]);\n// simple text area editor for our JSON data\n// any change to the input should be immediately\n",
"new_path": "examples/json-components/src/index.ts",
"old_path": "examples/json-components/src/index.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(examples): simplify json-components
| 1
|
refactor
|
examples
|
807,849
|
23.02.2018 15:03:58
| 28,800
|
a5acaf6e52eefe9c5915ddec7fed0d46222691b3
|
refactor(Package): updateDependency() => updateLocalDependency()
|
[
{
"change_type": "MODIFY",
"diff": "@@ -27,6 +27,7 @@ function shallowCopy(json) {\nclass Package {\nconstructor(pkg, location, rootPath = location) {\n+ // npa will throw an error if the name is invalid\nconst resolved = npa.resolve(pkg.name, path.relative(rootPath, location), rootPath);\nObject.defineProperties(this, {\n@@ -99,7 +100,7 @@ class Package {\n});\n}\n- updateDependency(resolved, depVersion, savePrefix) {\n+ updateLocalDependency(resolved, depVersion, savePrefix) {\nconst depName = resolved.name;\n// first, try runtime dependencies\n",
"new_path": "src/Package.js",
"old_path": "src/Package.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -283,7 +283,7 @@ class PublishCommand extends Command {\nconst depVersion = this.updatesVersions.get(depName) || this.packageGraph.get(depName).pkg.version;\n// it no longer matters if we mutate the shared Package instance\n- pkg.updateDependency(resolved, depVersion, this.savePrefix);\n+ pkg.updateLocalDependency(resolved, depVersion, this.savePrefix);\n}\nreturn writePkg(pkg.manifestLocation, pkg.toJSON()).then(() => pkg);\n@@ -544,7 +544,7 @@ class PublishCommand extends Command {\nif (depVersion && resolved.type !== \"directory\") {\n// don't overwrite local file: specifiers (yet)\n- pkg.updateDependency(resolved, depVersion, this.savePrefix);\n+ pkg.updateLocalDependency(resolved, depVersion, this.savePrefix);\n}\n}\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(Package): updateDependency() => updateLocalDependency()
| 1
|
refactor
|
Package
|
807,849
|
23.02.2018 15:39:07
| 28,800
|
08b5f60d522fa4ed384919c3408f2f9c0865465c
|
refactor(UpdatedPackagesCollector): rename => utils/collect-packages
|
[
{
"change_type": "MODIFY",
"diff": "@@ -9,8 +9,8 @@ const GitUtilities = require(\"./GitUtilities\");\nconst PackageGraph = require(\"./PackageGraph\");\nconst Repository = require(\"./Repository\");\nconst writeLogFile = require(\"./utils/write-log-file\");\n-const UpdatedPackagesCollector = require(\"./UpdatedPackagesCollector\");\nconst collectPackages = require(\"./utils/collect-packages\");\n+const collectUpdates = require(\"./utils/collect-updates\");\nconst filterPackages = require(\"./utils/filter-packages\");\nconst ValidationError = require(\"./utils/validation-error\");\n@@ -273,10 +273,10 @@ class Command {\nthis.packageGraph = new PackageGraph(this.packages);\nthis.filteredPackages = filterPackages(this.packages, { scope, ignore });\n- // The UpdatedPackagesCollector requires that filteredPackages be present prior to checking for\n+ // collectUpdates requires that filteredPackages be present prior to checking for\n// updates. That's okay because it further filters based on what's already been filtered.\nif (typeof since === \"string\") {\n- const updated = new UpdatedPackagesCollector(this).getUpdates().map(({ pkg }) => pkg.name);\n+ const updated = collectUpdates(this).map(({ pkg }) => pkg.name);\nthis.filteredPackages = this.filteredPackages.filter(pkg => updated.indexOf(pkg.name) > -1);\n}\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-const semver = require(\"semver\");\n-\n-const GitUtilities = require(\"./GitUtilities\");\n-const collectDependents = require(\"./utils/collect-dependents\");\n-const getForcedPackages = require(\"./utils/get-forced-packages\");\n-const makeDiffPredicate = require(\"./utils/make-diff-predicate\");\n-\n-class UpdatedPackagesCollector {\n- constructor(command) {\n- this.execOpts = command.execOpts;\n- this.logger = command.logger;\n- this.rootPath = command.repository.rootPath;\n- this.options = command.options;\n-\n- if (command.filteredPackages.length === command.packageGraph.size) {\n- this.packages = command.packageGraph;\n- } else {\n- this.packages = new Map(\n- command.filteredPackages.map(({ name }) => [name, command.packageGraph.get(name)])\n- );\n- }\n- }\n-\n- getUpdates() {\n- this.logger.silly(\"getUpdates\");\n-\n- this.candidates = new Set();\n-\n- this.collectUpdatedPackages();\n- this.collectPrereleasedPackages();\n-\n- const dependents = collectDependents(this.candidates);\n- dependents.forEach(node => this.candidates.add(node));\n-\n- const updates = [];\n-\n- const mapper = (node, name) => {\n- this.logger.verbose(\"has filtered update\", name);\n-\n- updates.push(node);\n- };\n-\n- if (this.options.canary) {\n- this.packages.forEach(mapper);\n- } else {\n- this.packages.forEach((node, name) => {\n- if (this.candidates.has(node)) {\n- mapper(node, name);\n- }\n- });\n- }\n-\n- return updates;\n- }\n-\n- collectUpdatedPackages() {\n- this.logger.info(\"\", \"Checking for updated packages...\");\n-\n- const { execOpts, options, rootPath } = this;\n- const { canary, forcePublish, ignore: ignorePatterns } = options;\n- let { since } = options;\n-\n- if (GitUtilities.hasTags(execOpts)) {\n- if (canary) {\n- const sha = GitUtilities.getShortSHA(execOpts);\n-\n- // if it's a merge commit, it will return all the commits that were part of the merge\n- // ex: If `ab7533e` had 2 commits, ab7533e^..ab7533e would contain 2 commits + the merge commit\n- since = `${sha}^..${sha}`;\n- } else if (!since) {\n- since = GitUtilities.getLastTag(execOpts);\n- }\n- }\n-\n- this.logger.info(\"\", `Comparing with ${since || \"initial commit\"}.`);\n-\n- const forced = getForcedPackages(forcePublish);\n-\n- if (!since || forced.has(\"*\")) {\n- this.packages.forEach(node => this.candidates.add(node));\n- } else {\n- const hasDiff = makeDiffPredicate(since, rootPath, execOpts, ignorePatterns);\n-\n- this.packages.forEach((node, name) => {\n- if (forced.has(name) || hasDiff(node)) {\n- this.candidates.add(node);\n- }\n- });\n- }\n- }\n-\n- collectPrereleasedPackages() {\n- if ((this.options.cdVersion || \"\").startsWith(\"pre\")) {\n- return;\n- }\n-\n- this.logger.info(\"\", \"Checking for prereleased packages...\");\n-\n- // skip packages that have not been previously prereleased\n- this.packages.forEach((node, name) => {\n- if (semver.prerelease(node.version)) {\n- this.logger.verbose(\"prereleased\", name);\n- this.candidates.add(node);\n- }\n- });\n- }\n-}\n-\n-module.exports = UpdatedPackagesCollector;\n",
"new_path": null,
"old_path": "src/UpdatedPackagesCollector.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -19,7 +19,7 @@ const ConventionalCommitUtilities = require(\"../ConventionalCommitUtilities\");\nconst GitUtilities = require(\"../GitUtilities\");\nconst PromptUtilities = require(\"../PromptUtilities\");\nconst output = require(\"../utils/output\");\n-const UpdatedPackagesCollector = require(\"../UpdatedPackagesCollector\");\n+const collectUpdates = require(\"../utils/collect-updates\");\nconst npmDistTag = require(\"../utils/npm-dist-tag\");\nconst npmPublish = require(\"../utils/npm-publish\");\nconst npmRunScript = require(\"../utils/npm-run-script\");\n@@ -213,7 +213,7 @@ class PublishCommand extends Command {\n}\n}\n- this.updates = new UpdatedPackagesCollector(this).getUpdates();\n+ this.updates = collectUpdates(this);\nif (!this.updates.length) {\nthis.logger.info(\"No updated packages to publish.\");\n",
"new_path": "src/commands/PublishCommand.js",
"old_path": "src/commands/PublishCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -5,7 +5,7 @@ const chalk = require(\"chalk\");\nconst Command = require(\"../Command\");\nconst output = require(\"../utils/output\");\nconst publishOptions = require(\"./PublishCommand\").builder;\n-const UpdatedPackagesCollector = require(\"../UpdatedPackagesCollector\");\n+const collectUpdates = require(\"../utils/collect-updates\");\nexports.handler = function handler(argv) {\n// eslint-disable-next-line no-use-before-define\n@@ -40,7 +40,7 @@ class UpdatedCommand extends Command {\n}\ninitialize(callback) {\n- this.updates = new UpdatedPackagesCollector(this).getUpdates();\n+ this.updates = collectUpdates(this);\nconst proceedWithUpdates = this.updates.length > 0;\n",
"new_path": "src/commands/UpdatedCommand.js",
"old_path": "src/commands/UpdatedCommand.js"
},
{
"change_type": "ADD",
"diff": "+\"use strict\";\n+\n+const semver = require(\"semver\");\n+\n+const GitUtilities = require(\"../GitUtilities\");\n+const collectDependents = require(\"./collect-dependents\");\n+const getForcedPackages = require(\"./get-forced-packages\");\n+const makeDiffPredicate = require(\"./make-diff-predicate\");\n+\n+module.exports = collectUpdates;\n+\n+function collectUpdates({ filteredPackages, packageGraph, options, rootPath, execOpts, logger }) {\n+ const packages =\n+ filteredPackages.length === packageGraph.size\n+ ? packageGraph\n+ : new Map(filteredPackages.map(({ name }) => [name, packageGraph.get(name)]));\n+\n+ logger.info(\"\", \"Checking for updated packages...\");\n+\n+ let { since: committish } = options;\n+\n+ if (GitUtilities.hasTags(execOpts)) {\n+ if (options.canary) {\n+ const sha = GitUtilities.getShortSHA(execOpts);\n+\n+ // if it's a merge commit, it will return all the commits that were part of the merge\n+ // ex: If `ab7533e` had 2 commits, ab7533e^..ab7533e would contain 2 commits + the merge commit\n+ committish = `${sha}^..${sha}`;\n+ } else if (!committish) {\n+ committish = GitUtilities.getLastTag(execOpts);\n+ }\n+ }\n+\n+ logger.info(\"\", `Comparing with ${committish || \"initial commit\"}.`);\n+\n+ const forced = getForcedPackages(options.forcePublish);\n+ let candidates;\n+\n+ if (!committish || forced.has(\"*\")) {\n+ candidates = new Set(packages.values());\n+ } else {\n+ candidates = new Set();\n+\n+ const hasDiff = makeDiffPredicate(committish, rootPath, execOpts, options.ignore);\n+ const needsBump = (options.cdVersion || \"\").startsWith(\"pre\")\n+ ? () => false\n+ : /* skip packages that have not been previously prereleased */\n+ node => semver.prerelease(node.version);\n+\n+ packages.forEach((node, name) => {\n+ if (forced.has(name) || needsBump(node) || hasDiff(node)) {\n+ candidates.add(node);\n+ }\n+ });\n+ }\n+\n+ const dependents = collectDependents(candidates);\n+ dependents.forEach(node => candidates.add(node));\n+\n+ if (options.canary || packages.size === candidates.size) {\n+ logger.verbose(\"updated\", \"(short-circuit)\");\n+\n+ return Array.from(candidates);\n+ }\n+\n+ const updates = [];\n+\n+ packages.forEach((node, name) => {\n+ if (candidates.has(node)) {\n+ logger.verbose(\"updated\", name);\n+\n+ updates.push(node);\n+ }\n+ });\n+\n+ return updates;\n+}\n",
"new_path": "src/utils/collect-updates.js",
"old_path": null
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\njest.mock(\"../src/ChildProcessUtilities\");\n-jest.mock(\"../src/UpdatedPackagesCollector\");\n+jest.mock(\"../src/utils/collect-updates\");\nconst path = require(\"path\");\n// mocked modules\nconst ChildProcessUtilities = require(\"../src/ChildProcessUtilities\");\n-const UpdatedPackagesCollector = require(\"../src/UpdatedPackagesCollector\");\n+const collectUpdates = require(\"../src/utils/collect-updates\");\n// helpers\nconst callsBack = require(\"./helpers/callsBack\");\n@@ -105,7 +105,7 @@ describe(\"ExecCommand\", () => {\nit(\"should filter packages that are not updated with --since\", async () => {\nconst testDir = await initFixture(\"ExecCommand/basic\");\n- UpdatedPackagesCollector.prototype.getUpdates = jest.fn(() => [\n+ collectUpdates.mockReturnValueOnce([\n{\npkg: {\nname: \"package-2\",\n",
"new_path": "test/ExecCommand.js",
"old_path": "test/ExecCommand.js"
},
{
"change_type": "MODIFY",
"diff": "\"use strict\";\njest.mock(\"../src/utils/npm-run-script\");\n+jest.mock(\"../src/utils/collect-updates\");\nconst path = require(\"path\");\n// mocked modules\nconst npmRunScript = require(\"../src/utils/npm-run-script\");\n-const UpdatedPackagesCollector = require(\"../src/UpdatedPackagesCollector\");\n+const collectUpdates = require(\"../src/utils/collect-updates\");\n// helpers\nconst callsBack = require(\"./helpers/callsBack\");\n@@ -84,7 +85,7 @@ describe(\"RunCommand\", () => {\nit(\"should filter packages that are not updated with --since\", async () => {\nconst testDir = await initFixture(\"RunCommand/basic\");\n- UpdatedPackagesCollector.prototype.getUpdates = jest.fn(() => [\n+ collectUpdates.mockReturnValueOnce([\n{\npkg: {\nname: \"package-3\",\n",
"new_path": "test/RunCommand.js",
"old_path": "test/RunCommand.js"
},
{
"change_type": "DELETE",
"diff": "-\"use strict\";\n-\n-jest.mock(\"../src/GitUtilities\");\n-\n-// mocked modules\n-const GitUtilities = require(\"../src/GitUtilities\");\n-\n-// file under test\n-const UpdatedPackagesCollector = require(\"../src/UpdatedPackagesCollector\");\n-\n-describe(\"UpdatedPackagesCollector\", () => {\n- GitUtilities.getShortSHA.mockReturnValue(\"deadbeef\");\n- GitUtilities.hasTags.mockReturnValue(true);\n- GitUtilities.diffSinceIn.mockReturnValue(\"\");\n- GitUtilities.getLastTag.mockReturnValue(\"lastTag\");\n-\n- describe(\".collectUpdatedPackages()\", () => {\n- const filteredPackages = [\n- { name: \"package-1\", location: \"location-1\" },\n- { name: \"package-2\", location: \"location-2\" },\n- ];\n- const packageGraph = new Map(filteredPackages.map(pkg => [pkg.name, pkg]));\n- const logger = {\n- silly: () => {},\n- info: () => {},\n- verbose: () => {},\n- };\n- const execOpts = { cwd: \"test-cwd\" };\n- const repository = {\n- rootPath: \"root-path\",\n- };\n-\n- it(\"should use the current SHA for commit ranges when the canary flag has been passed\", () => {\n- new UpdatedPackagesCollector({\n- options: {\n- canary: true,\n- },\n- execOpts,\n- repository,\n- logger,\n- filteredPackages,\n- packageGraph,\n- }).getUpdates();\n-\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"deadbeef^..deadbeef\", \"location-1\", execOpts);\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"deadbeef^..deadbeef\", \"location-2\", execOpts);\n- });\n-\n- it(\"should use the current SHA for commit ranges when the canary flag is a string\", () => {\n- new UpdatedPackagesCollector({\n- options: {\n- canary: \"my-tag\",\n- },\n- execOpts,\n- repository,\n- logger,\n- filteredPackages,\n- packageGraph,\n- }).getUpdates();\n-\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"deadbeef^..deadbeef\", \"location-1\", execOpts);\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"deadbeef^..deadbeef\", \"location-2\", execOpts);\n- });\n-\n- it(\"should use the last tag in non-canary mode for commit ranges when a repo has tags\", () => {\n- new UpdatedPackagesCollector({\n- options: {},\n- execOpts,\n- repository,\n- logger,\n- filteredPackages,\n- packageGraph,\n- }).getUpdates();\n-\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"lastTag\", \"location-1\", execOpts);\n- expect(GitUtilities.diffSinceIn).toBeCalledWith(\"lastTag\", \"location-2\", execOpts);\n- });\n- });\n-});\n",
"new_path": null,
"old_path": "test/UpdatedPackagesCollector.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(UpdatedPackagesCollector): rename => utils/collect-packages
| 1
|
refactor
|
UpdatedPackagesCollector
|
807,849
|
23.02.2018 15:49:47
| 28,800
|
aae27f18e90eae62b2db93220118dc2e2a886b6f
|
refactor(PackageGraph): remove unused (and confusing) graph properties
|
[
{
"change_type": "MODIFY",
"diff": "@@ -30,25 +30,6 @@ class PackageGraphNode {\nreturn pkg;\n},\n},\n- // graph-specific computed properties\n- indegree: {\n- get() {\n- // https://en.wikipedia.org/wiki/Directed_graph#Indegree_and_outdegree\n- return this.localDependencies.size;\n- },\n- },\n- outdegree: {\n- get() {\n- // https://en.wikipedia.org/wiki/Directed_graph#Indegree_and_outdegree\n- return this.localDependents.size;\n- },\n- },\n- degree: {\n- get() {\n- // https://en.wikipedia.org/wiki/Degree_(graph_theory)\n- return this.indegree + this.outdegree;\n- },\n- },\n});\nthis.externalDependencies = new Map();\n@@ -56,37 +37,6 @@ class PackageGraphNode {\nthis.localDependents = new Map();\n}\n- is(degreeType) {\n- // The mind-bending thing (just one?!) about dependency graphs\n- // is that the arrows (\"edges\") point in the reverse direction\n- // of the traditional implication of \"has a dependency on\" and\n- // \"is a dependent of\" relationship descriptions.\n- switch (degreeType) {\n- case \"source\":\n- // only local dependents\n- return this.indegree === 0;\n-\n- case \"sink\":\n- // only local dependencies\n- return this.outdegree === 0;\n-\n- case \"isolated\":\n- // no local dependencies or dependents\n- return this.degree === 0;\n-\n- case \"leaf\":\n- // exactly one local dependency OR dependent\n- return this.degree === 1;\n-\n- case \"internal\":\n- // more than one local dependency OR dependents\n- return !(this.indegree === 0 || this.outdegree === 0);\n-\n- default:\n- throw new Error(`unknown property \"${degreeType}\"`);\n- }\n- }\n-\n/**\n* Determine if the Node satisfies a resolved semver range.\n* @see https://github.com/npm/npm-package-arg#result-object\n",
"new_path": "src/PackageGraph.js",
"old_path": "src/PackageGraph.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -27,13 +27,13 @@ function batchPackages(packagesToBatch, rejectCycles, graphType) {\nwhile (graph.size) {\n// pick the current set of nodes _without_ localDependencies (aka it is a \"source\" node)\n- const batch = Array.from(graph.values()).filter(node => node.is(\"source\"));\n+ const batch = Array.from(graph.values()).filter(node => node.localDependencies.size === 0);\nlog.silly(\"batched\", batch);\n// batches are composed of Package instances, not PackageGraphNodes\nbatches.push(batch.map(node => node.pkg));\n- // pruning the graph changes the node.is(\"source\") evaluation\n+ // pruning the graph changes the node.localDependencies.size test\ngraph.prune(...batch);\n}\n",
"new_path": "src/utils/batch-packages.js",
"old_path": "src/utils/batch-packages.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(PackageGraph): remove unused (and confusing) graph properties
| 1
|
refactor
|
PackageGraph
|
679,913
|
23.02.2018 21:58:01
| 0
|
214fe4d060f0930f27c940ac53bd180f4a7d1a93
|
feat(hiccup-dom-components): add gradient, group, path SVG funcs
add linearGradient/radialGradient
update number[] => ArrayLike<number>
|
[
{
"change_type": "MODIFY",
"diff": "import { SVG_NS } from \"@thi.ng/hiccup/api\";\n+export interface PathSegment extends Array<any> {\n+ [0]: string;\n+ [1]?: ArrayLike<number>[];\n+}\n+\nlet PRECISION = 2;\nexport const setPrecision = (n: number) => (PRECISION = n);\n-export const svgdoc = (attribs, ...body) => [\n+export const svgdoc = (attr, ...body) => [\n\"svg\",\n- Object.assign(attribs, { xmlns: SVG_NS }),\n+ Object.assign(attr, { xmlns: SVG_NS }),\n...body\n];\nexport const ff = (x: number) => x.toFixed(PRECISION);\n-export const point = (p: number[]) => ff(p[0]) + \",\" + ff(p[1]);\n+export const point = (p: ArrayLike<number>) => ff(p[0]) + \",\" + ff(p[1]);\nexport const defs = (...defs) => [\"defs\", ...defs];\n-export const circle = (p: number[], r = 1, attr?) =>\n+export const group = (attr, ...body) => [\"g\", attr, ...body];\n+\n+export const circle = (p: ArrayLike<number>, r = 1, attr?) =>\n[\n\"circle\",\nObject.assign({\n@@ -25,7 +32,7 @@ export const circle = (p: number[], r = 1, attr?) =>\n}, attr)\n];\n-export const rect = (p: number[], width = 1, height = 1, attr?) =>\n+export const rect = (p: ArrayLike<number>, width = 1, height = 1, attr?) =>\n[\n\"rect\",\nObject.assign({\n@@ -36,19 +43,28 @@ export const rect = (p: number[], width = 1, height = 1, attr?) =>\n}, attr)\n];\n-export const polyline = (points: number[][], attr?) =>\n+export const polyline = (points: ArrayLike<number>[], attr?) =>\n[\n\"polyline\",\nObject.assign({ points: points.map(point).join(\" \") }, attr)\n];\n-export const polygon = (points: number[][], attr?) =>\n+export const polygon = (points: ArrayLike<number>[], attr?) =>\n[\n\"polygon\",\nObject.assign({ points: points.map(point).join(\" \") }, attr)\n];\n-export const text = (body: string, p: number[], attr?) =>\n+export const path = (segments: PathSegment[], attr?) =>\n+ [\n+ \"path\",\n+ {\n+ ...attr,\n+ d: segments.map((seg) => seg[0] + seg[1].map(point).join(\",\")),\n+ }\n+ ];\n+\n+export const text = (body: string, p: ArrayLike<number>, attr?) =>\n[\"text\",\n{\nx: ff(p[0]),\n@@ -57,3 +73,25 @@ export const text = (body: string, p: number[], attr?) =>\n},\nbody\n];\n+\n+const gradient = (type: string, attribs: any, stops: [any, string][]) =>\n+ [type,\n+ attribs,\n+ ...stops.map(\n+ ([offset, col]) => [\"stop\", { offset, \"stop-color\": col }]\n+ )\n+ ];\n+\n+export const linearGradient = (id: string, x1, y1, x2, y2, stops: [any, string][]) =>\n+ gradient(\n+ \"linearGradient\",\n+ { id, x1, y1, x2, y2 },\n+ stops\n+ );\n+\n+export const radialGradient = (id: string, cx, cy, r, stops: [any, string][]) =>\n+ gradient(\n+ \"radialGradient\",\n+ { id, cx, cy, r },\n+ stops\n+ );\n",
"new_path": "packages/hiccup-dom-components/src/svg.ts",
"old_path": "packages/hiccup-dom-components/src/svg.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(hiccup-dom-components): add gradient, group, path SVG funcs
- add linearGradient/radialGradient
- update number[] => ArrayLike<number>
| 1
|
feat
|
hiccup-dom-components
|
679,913
|
24.02.2018 14:43:35
| 0
|
44f33dfc1fe30e309e460ab357bed609140caaee
|
feat(hiccup): add support for more SVG tags (66 total)
|
[
{
"change_type": "MODIFY",
"diff": "@@ -3,7 +3,7 @@ export const SVG_NS = \"http://www.w3.org/2000/svg\";\nexport const TAG_REGEXP = /^([^\\s\\.#]+)(?:#([^\\s\\.#]+))?(?:\\.([^\\s#]+))?$/;\n// tslint:disable-next-line\n-export const SVG_TAGS = \"svg circle clipPath defs ellipse g line linearGradient mask path pattern polygon polyline radialGradient rect stop symbol text\"\n+export const SVG_TAGS = \"animate animateColor animateMotion animateTransform circle clipPath color-profile defs desc discard ellipse feBlend feColorMatrix feComponentTransfer feComposite feConvolveMatrix feDiffuseLighting feDisplacementMap feDistantLight feDropShadow feFlood feFuncA feFuncB feFuncG feFuncR feGaussianBlur feImage feMerge feMergeNode feMorphology feOffset fePointLight feSpecularLighting feSpotLight feTile feTurbulence filter font foreignObject g image line linearGradient marker mask metadata mpath path pattern polygon polyline radialGradient rect set stop style svg switch symbol text textPath title tref tspan use view\"\n.split(\" \")\n.reduce((acc, x) => (acc[x] = 1, acc), {});\n",
"new_path": "packages/hiccup/src/api.ts",
"old_path": "packages/hiccup/src/api.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(hiccup): add support for more SVG tags (66 total)
| 1
|
feat
|
hiccup
|
679,913
|
26.02.2018 03:03:30
| 0
|
9824844aa2701da25003c65111c723c7f976de33
|
feat(transducers): add keys()/vals() iterators, refactor pairs()
|
[
{
"change_type": "MODIFY",
"diff": "@@ -106,6 +106,7 @@ export * from \"./iter/choices\";\nexport * from \"./iter/concat\";\nexport * from \"./iter/cycle\";\nexport * from \"./iter/iterate\";\n+export * from \"./iter/keys\";\nexport * from \"./iter/pairs\";\nexport * from \"./iter/range\";\nexport * from \"./iter/range2d\";\n@@ -114,3 +115,4 @@ export * from \"./iter/repeat\";\nexport * from \"./iter/repeatedly\";\nexport * from \"./iter/reverse\";\nexport * from \"./iter/tuples\";\n+export * from \"./iter/vals\";\n",
"new_path": "packages/transducers/src/index.ts",
"old_path": "packages/transducers/src/index.ts"
},
{
"change_type": "ADD",
"diff": "+export function* keys(x: any): IterableIterator<string> {\n+ for (let k in x) {\n+ if (x.hasOwnProperty(k)) {\n+ yield k;\n+ }\n+ }\n+}\n",
"new_path": "packages/transducers/src/iter/keys.ts",
"old_path": null
},
{
"change_type": "MODIFY",
"diff": "-export function* pairs(x: any): IterableIterator<[string, any]> {\n- for (let k of Object.keys(x)) {\n+import { IObjectOf } from \"@thi.ng/api/api\";\n+\n+export function* pairs<T>(x: IObjectOf<T>): IterableIterator<[string, T]> {\n+ for (let k in x) {\n+ if (x.hasOwnProperty(k)) {\nyield [k, x[k]];\n}\n}\n+}\n",
"new_path": "packages/transducers/src/iter/pairs.ts",
"old_path": "packages/transducers/src/iter/pairs.ts"
},
{
"change_type": "ADD",
"diff": "+import { IObjectOf } from \"@thi.ng/api/api\";\n+\n+export function* vals<T>(x: IObjectOf<T>): IterableIterator<T> {\n+ for (let k in x) {\n+ if (x.hasOwnProperty(k)) {\n+ yield x[k];\n+ }\n+ }\n+}\n",
"new_path": "packages/transducers/src/iter/vals.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(transducers): add keys()/vals() iterators, refactor pairs()
| 1
|
feat
|
transducers
|
679,913
|
26.02.2018 03:04:04
| 0
|
45d63858b4c9a2edf5be71736eb7350ecda6ee33
|
refactor(hiccup): sort void tags
|
[
{
"change_type": "MODIFY",
"diff": "export const SVG_NS = \"http://www.w3.org/2000/svg\";\n+export const XLINK_NS = \"http://www.w3.org/1999/xlink\";\nexport const TAG_REGEXP = /^([^\\s\\.#]+)(?:#([^\\s\\.#]+))?(?:\\.([^\\s#]+))?$/;\n@@ -8,7 +9,7 @@ export const SVG_TAGS = \"animate animateColor animateMotion animateTransform cir\n.reduce((acc, x) => (acc[x] = 1, acc), {});\n// tslint:disable-next-line\n-export const VOID_TAGS = \"area base br col command embed hr img input keygen link meta param source track wbr circle ellipse line path polygon polyline rect stop\"\n+export const VOID_TAGS = \"area base br circle col command ellipse embed hr img input keygen line link meta param path polygon polyline rect source stop track use wbr\"\n.split(\" \")\n.reduce((acc, x) => (acc[x] = 1, acc), {});\n",
"new_path": "packages/hiccup/src/api.ts",
"old_path": "packages/hiccup/src/api.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
refactor(hiccup): sort void tags
| 1
|
refactor
|
hiccup
|
791,690
|
26.02.2018 13:51:37
| 28,800
|
d0d83efd263b3265a8d28b83e31ad58477e186f2
|
core(render-blocking): handle amp-style stylesheets
|
[
{
"change_type": "MODIFY",
"diff": "<link rel=\"stylesheet\" href=\"./dbw_tester.css?scriptActivated&delay=200\"> <!-- PASS: initiator is script -->\n</template>\n+<!-- AMP-style stylesheet script -->\n+<!-- based on https://github.com/ampproject/amphtml/blob/38f1bdf4cc385f9a25cf2979abf215952880b876/src/font-stylesheet-timeout.js#L87-L103 -->\n+<script type=\"text/javascript\">\n+ setTimeout(() => {\n+ const stylesheet = document.getElementById('amp-style-link');\n+ stylesheet.media = 'not-matching';\n+ stylesheet.onload = () => stylesheet.media = 'screen';\n+ stylesheet.parentNode.insertBefore(stylesheet, stylesheet.nextSibling);\n+ }, 1500);\n+</script>\n+\n<!-- Note: these will only fail when using the static-server.js, which supports the ?delay=true param.\nIf you're using your own server, the resource will load instantly and the\nstylesheets will be ignored for being below the threshold. -->\n<link rel=\"import\" href=\"./dbw_partial_a.html?delay=200\"> <!-- FAIL -->\n<link rel=\"import\" href=\"./dbw_partial_b.html?delay=200&isasync\" async> <!-- PASS -->\n+<!-- FAIL: render blocking but capped at 500ms -->\n+<link id=\"amp-style-link\" rel=\"stylesheet\" href=\"./dbw_tester.css?delay=3000&capped\">\n<!-- PASS: preload that's activated later does not block rendering. -->\n<link rel=\"preload\" href=\"./dbw_tester.css?delay=2000&async=true\" as=\"style\" onload=\"this.rel = 'stylesheet'\">\n-<!-- PASS: async stylesheet that loads after FCP -->\n+<!-- PASS: async stylesheet -->\n<link rel=\"stylesheet\" href=\"./dbw_tester.css?delay=3000&async=true\" disabled onload=\"this.disabled = false\">\n<!-- FAIL: block rendering -->\n</style>\n</template>\n+<!-- Force FCP to be ~5s out, necessary to make sure our render-blocking audits still work -->\n+<!-- even when other forces were responsible for delaying the render. -->\n+<script src=\"./fcp-delayer.js?delay=5000\"></script>\n</head>\n<body>\n",
"new_path": "lighthouse-cli/test/fixtures/dobetterweb/dbw_tester.html",
"old_path": "lighthouse-cli/test/fixtures/dobetterweb/dbw_tester.html"
},
{
"change_type": "MODIFY",
"diff": "@@ -15,12 +15,10 @@ module.exports = [\naudits: {\n'errors-in-console': {\nscore: false,\n- // TODO: should be 5 after m64 (see note in dbw_tester.js)\n- rawValue: 4,\n- displayValue: '4',\n+ rawValue: '>3',\ndetails: {\nitems: {\n- length: 4,\n+ length: '>3',\n},\n},\n},\n@@ -37,13 +35,13 @@ module.exports = [\nextendedInfo: {\nvalue: {\nresults: {\n- length: 16,\n+ length: 18,\n},\n},\n},\ndetails: {\nitems: {\n- length: 16,\n+ length: 18,\n},\n},\n},\n@@ -71,16 +69,17 @@ module.exports = [\n},\n'link-blocking-first-paint': {\nscore: 0,\n+ rawValue: '<3000',\nextendedInfo: {\nvalue: {\nresults: {\n- length: 4,\n+ length: 5,\n},\n},\n},\ndetails: {\nitems: {\n- length: 4,\n+ length: 5,\n},\n},\n},\n@@ -128,18 +127,17 @@ module.exports = [\nscore: false,\n},\n'script-blocking-first-paint': {\n- // TODO: re-enable score assertions when we have more flexible assertions like < 100\n- // score: 90,\n+ score: '<100',\nextendedInfo: {\nvalue: {\nresults: {\n- length: 1,\n+ length: 2,\n},\n},\n},\ndetails: {\nitems: {\n- length: 1,\n+ length: 2,\n},\n},\n},\n",
"new_path": "lighthouse-cli/test/smokehouse/dobetterweb/dbw-expectations.js",
"old_path": "lighthouse-cli/test/smokehouse/dobetterweb/dbw-expectations.js"
},
{
"change_type": "MODIFY",
"diff": "const Gatherer = require('../gatherer');\n-/* global document,window */\n+/* global document,window,HTMLLinkElement */\n+\n+function installMediaListener() {\n+ window.___linkMediaChanges = [];\n+ Object.defineProperty(HTMLLinkElement.prototype, 'media', {\n+ set: function(val) {\n+ window.___linkMediaChanges.push({\n+ href: this.href,\n+ media: val,\n+ msSinceHTMLEnd: Date.now() - window.performance.timing.responseEnd,\n+ matches: window.matchMedia(val).matches,\n+ });\n+\n+ return this.setAttribute('media', val);\n+ },\n+ });\n+}\n/* istanbul ignore next */\nfunction collectTagsThatBlockFirstPaint() {\n@@ -30,17 +46,19 @@ function collectTagsThatBlockFirstPaint() {\nconst tagList = [...document.querySelectorAll('link, head script[src]')]\n.filter(tag => {\nif (tag.tagName === 'SCRIPT') {\n- return !tag.hasAttribute('async') &&\n+ return (\n+ !tag.hasAttribute('async') &&\n!tag.hasAttribute('defer') &&\n!/^data:/.test(tag.src) &&\n- tag.getAttribute('type') !== 'module';\n+ tag.getAttribute('type') !== 'module'\n+ );\n}\n// Filter stylesheet/HTML imports that block rendering.\n// https://www.igvita.com/2012/06/14/debunking-responsive-css-performance-myths/\n// https://www.w3.org/TR/html-imports/#dfn-import-async-attribute\n- const blockingStylesheet = (tag.rel === 'stylesheet' &&\n- window.matchMedia(tag.media).matches && !tag.disabled);\n+ const blockingStylesheet =\n+ tag.rel === 'stylesheet' && window.matchMedia(tag.media).matches && !tag.disabled;\nconst blockingImport = tag.rel === 'import' && !tag.hasAttribute('async');\nreturn blockingStylesheet || blockingImport;\n})\n@@ -53,6 +71,7 @@ function collectTagsThatBlockFirstPaint() {\nrel: tag.rel,\nmedia: tag.media,\ndisabled: tag.disabled,\n+ mediaChanges: window.___linkMediaChanges.filter(item => item.href === tag.href),\n};\n});\nresolve(tagList);\n@@ -99,17 +118,33 @@ class TagsBlockingFirstPaint extends Gatherer {\nstatic findBlockingTags(driver, networkRecords) {\nconst scriptSrc = `(${collectTagsThatBlockFirstPaint.toString()}())`;\n+ const firstRequestEndTime = networkRecords.reduce(\n+ (min, record) => Math.min(min, record._endTime),\n+ Infinity\n+ );\nreturn driver.evaluateAsync(scriptSrc).then(tags => {\nconst requests = filteredAndIndexedByUrl(networkRecords);\nreturn tags.reduce((prev, tag) => {\nconst request = requests[tag.url];\nif (request && !request.isLinkPreload) {\n+ // Even if the request was initially blocking or appeared to be blocking once the\n+ // page was loaded, the media attribute could have been changed during load, capping the\n+ // amount of time it was render blocking. See https://github.com/GoogleChrome/lighthouse/issues/2832.\n+ const timesResourceBecameNonBlocking = (tag.mediaChanges || [])\n+ .filter(change => !change.matches)\n+ .map(change => change.msSinceHTMLEnd);\n+ const earliestNonBlockingTime = Math.min(...timesResourceBecameNonBlocking);\n+ const lastTimeResourceWasBlocking = Math.max(\n+ request.startTime,\n+ firstRequestEndTime + earliestNonBlockingTime / 1000\n+ );\n+\nprev.push({\ntag,\ntransferSize: request.transferSize || 0,\nstartTime: request.startTime,\n- endTime: request.endTime,\n+ endTime: Math.min(request.endTime, lastTimeResourceWasBlocking),\n});\n// Prevent duplicates from showing up again\n@@ -122,12 +157,19 @@ class TagsBlockingFirstPaint extends Gatherer {\n}\n/**\n- * @param {!Object} options\n+ * @param {!Object} context\n+ */\n+ beforePass(context) {\n+ return context.driver.evaluteScriptOnNewDocument(`(${installMediaListener.toString()})()`);\n+ }\n+\n+ /**\n+ * @param {!Object} context\n* @param {{networkRecords: !Array<!NetworkRecord>}} tracingData\n* @return {!Array<{tag: string, transferSize: number, startTime: number, endTime: number}>}\n*/\n- afterPass(options, tracingData) {\n- return TagsBlockingFirstPaint.findBlockingTags(options.driver, tracingData.networkRecords);\n+ afterPass(context, tracingData) {\n+ return TagsBlockingFirstPaint.findBlockingTags(context.driver, tracingData.networkRecords);\n}\n}\n",
"new_path": "lighthouse-core/gather/gatherers/dobetterweb/tags-blocking-first-paint.js",
"old_path": "lighthouse-core/gather/gatherers/dobetterweb/tags-blocking-first-paint.js"
}
] |
JavaScript
|
Apache License 2.0
|
googlechrome/lighthouse
|
core(render-blocking): handle amp-style stylesheets (#4555)
| 1
|
core
|
render-blocking
|
815,745
|
26.02.2018 22:09:24
| -7,200
|
a85c53f38f728040ff8b6a2fd017139d3b4efb37
|
fix: hide clear button on disable
|
[
{
"change_type": "MODIFY",
"diff": "@@ -1447,6 +1447,12 @@ describe('NgSelectComponent', function () {\ntickAndDetectChanges(fixture);\nexpect(fixture.componentInstance.select.isOpen).toBe(false);\n}));\n+\n+ it('clear button should not appear if select is disabled', fakeAsync(() => {\n+ fixture.componentInstance.select.setDisabledState(true);\n+ const el = fixture.debugElement.query(By.css('.ng-clear-zone'));\n+ expect(el).toBeNull();\n+ }));\n});\ndescribe('Arrow icon click', () => {\n",
"new_path": "src/ng-select/ng-select.component.spec.ts",
"old_path": "src/ng-select/ng-select.component.spec.ts"
},
{
"change_type": "MODIFY",
"diff": "@@ -259,6 +259,7 @@ export class NgSelectComponent implements OnInit, OnDestroy, OnChanges, AfterVie\nsetDisabledState(isDisabled: boolean): void {\nthis.isDisabled = isDisabled;\n+ this.detectChanges();\n}\ntoggle() {\n",
"new_path": "src/ng-select/ng-select.component.ts",
"old_path": "src/ng-select/ng-select.component.ts"
}
] |
TypeScript
|
MIT License
|
ng-select/ng-select
|
fix: hide clear button on disable (#295)
| 1
|
fix
| null |
807,849
|
27.02.2018 12:19:44
| 28,800
|
43fd95434c952addd1a84cdf3e00987d72ae3e07
|
refactor(Command): run preparations inside Promise chain
|
[
{
"change_type": "MODIFY",
"diff": "@@ -270,7 +270,7 @@ class Command {\n}\nconst { rootPath, packageConfigs } = this.repository;\n- const { scope, ignore, registry, since } = this.options;\n+ const { scope, ignore } = this.options;\nif (scope) {\nlog.info(\"scope\", scope);\n@@ -280,25 +280,33 @@ class Command {\nlog.info(\"ignore\", ignore);\n}\n- if (registry) {\n- this.npmRegistry = registry;\n- }\n+ let chain = Promise.resolve();\n- this.packages = collectPackages({ rootPath, packageConfigs });\n- this.packageGraph = new PackageGraph(this.packages);\n- this.filteredPackages = filterPackages(this.packages, { scope, ignore });\n+ chain = chain.then(() => collectPackages({ rootPath, packageConfigs }));\n+ chain = chain.then(packages => {\n+ this.packages = packages;\n+ this.packageGraph = new PackageGraph(packages);\n+ this.filteredPackages = filterPackages(packages, { scope, ignore });\n+ });\n// collectUpdates requires that filteredPackages be present prior to checking for\n// updates. That's okay because it further filters based on what's already been filtered.\n- if (typeof since === \"string\") {\n- const updated = collectUpdates(this).map(({ pkg }) => pkg.name);\n+ if (typeof this.options.since === \"string\") {\n+ chain = chain.then(() => collectUpdates(this));\n+ chain = chain.then(updates => {\n+ const updated = new Set(updates.map(({ pkg }) => pkg.name));\n- this.filteredPackages = this.filteredPackages.filter(pkg => updated.indexOf(pkg.name) > -1);\n+ this.filteredPackages = this.filteredPackages.filter(pkg => updated.has(pkg.name));\n+ });\n}\nif (this.options.includeFilteredDependencies) {\n+ chain = chain.then(() => {\nthis.filteredPackages = this.packageGraph.addDependencies(this.filteredPackages);\n+ });\n}\n+\n+ return chain;\n}\nrunCommand() {\n",
"new_path": "src/Command.js",
"old_path": "src/Command.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(Command): run preparations inside Promise chain
| 1
|
refactor
|
Command
|
679,913
|
27.02.2018 14:22:02
| 0
|
6e0dfa1d4d5eb64c5b9f053a04d594657791b9f4
|
feat(diff): update diffArray, generic types
avoid nested array in DiffLogEntry, now flat 3-tuple
add generics for ArrayDiff, DiffLogEntry, DiffKeyMap
remove DiffLogPair
minor optimization diffArray
BREAKING CHANGE: update DiffLogEntry structure
|
[
{
"change_type": "MODIFY",
"diff": "-export interface DiffLogPair extends Array<any> {\n- [0]: number;\n- [1]: any;\n-}\n+import { IObjectOf } from \"@thi.ng/api/api\";\n-export interface DiffLogEntry extends Array<any> {\n+export interface DiffLogEntry<T> extends Array<any> {\n[0]: number;\n- [1]: DiffLogPair;\n+ [1]: number;\n+ [2]: T;\n}\n-export interface DiffKeyMap {\n- [id: number]: any;\n-}\n+export type DiffKeyMap<T> = IObjectOf<T>;\n-export interface ArrayDiff {\n+export interface ArrayDiff<T> {\ndistance: number;\n- adds: DiffKeyMap;\n- dels: DiffKeyMap;\n- const: DiffKeyMap;\n- linear: DiffLogEntry[];\n+ adds: DiffKeyMap<T>;\n+ dels: DiffKeyMap<T>;\n+ const: DiffKeyMap<T>;\n+ linear: DiffLogEntry<T>[];\n}\nexport interface ObjectDiff {\n",
"new_path": "packages/diff/src/api.ts",
"old_path": "packages/diff/src/api.ts"
},
{
"change_type": "MODIFY",
"diff": "-import { equiv } from \"@thi.ng/api/equiv\";\n+import { equiv as _equiv } from \"@thi.ng/api/equiv\";\nimport { ArrayDiff } from \"./api\";\n@@ -6,14 +6,14 @@ import { ArrayDiff } from \"./api\";\n* Based on \"An O(NP) Sequence Comparison Algorithm\"\"\n* by Wu, Manber, Myers and Miller\n*\n- * http://www.itu.dk/stud/speciale/bepjea/xwebtex/litt/an-onp-sequence-comparison-algorithm.pdf\n- * https://github.com/cubicdaiya/onp\n+ * - http://www.itu.dk/stud/speciale/bepjea/xwebtex/litt/an-onp-sequence-comparison-algorithm.pdf\n+ * - https://github.com/cubicdaiya/onp\n*\n* Various optimizations, fixes & refactorings.\n- * Uses `equiv` for equality checks.\n+ * By default uses `@thi.ng/api/equiv` for equality checks.\n*/\n-export function diffArray(_a, _b) {\n- const state = <ArrayDiff>{\n+export function diffArray<T>(_a: T[], _b: T[], equiv = _equiv) {\n+ const state = <ArrayDiff<T>>{\ndistance: 0,\nadds: {},\ndels: {},\n@@ -26,6 +26,7 @@ export function diffArray(_a, _b) {\nconst reverse = _a.length >= _b.length;\nconst adds = state[reverse ? \"dels\" : \"adds\"];\nconst dels = state[reverse ? \"adds\" : \"dels\"];\n+ const linear = state.linear;\nconst aID = reverse ? -1 : 1;\nconst dID = reverse ? 1 : -1;\nlet a, b, na, nb;\n@@ -51,8 +52,14 @@ export function diffArray(_a, _b) {\nfunction snake(k, p, pp) {\nconst koff = k + offset;\n- const r = path[koff + ((p > pp) ? -1 : 1)];\n- let y = p > pp ? p : pp;\n+ let r, y;\n+ if (p > pp) {\n+ r = path[koff - 1];\n+ y = p;\n+ } else {\n+ r = path[koff + 1];\n+ y = pp;\n+ }\nlet x = y - k;\nwhile (x < na && y < nb && equiv(a[x], b[y])) {\nx++;\n@@ -89,15 +96,15 @@ export function diffArray(_a, _b) {\nconst d = e[1] - e[0];\nif (d > py - px) {\nadds[py] = v = b[py];\n- state.linear.push([aID, [py, v]]);\n+ linear.push([aID, py, v]);\npy++;\n} else if (d < py - px) {\ndels[px] = v = a[px];\n- state.linear.push([dID, [px, v]]);\n+ linear.push([dID, px, v]);\npx++;\n} else {\nstate.const[px] = v = a[px];\n- state.linear.push([0, [px, v]]);\n+ linear.push([0, px, v]);\npx++;\npy++;\n}\n",
"new_path": "packages/diff/src/array.ts",
"old_path": "packages/diff/src/array.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(diff): update diffArray, generic types
- avoid nested array in DiffLogEntry, now flat 3-tuple
- add generics for ArrayDiff, DiffLogEntry, DiffKeyMap
- remove DiffLogPair
- minor optimization diffArray
BREAKING CHANGE: update DiffLogEntry structure
| 1
|
feat
|
diff
|
807,849
|
27.02.2018 14:26:36
| 28,800
|
3b6d6a788e42916d2ff91d4f61e2345476887152
|
refactor(collect-packages): sync -> async
|
[
{
"change_type": "MODIFY",
"diff": "const globby = require(\"globby\");\nconst loadJsonFile = require(\"load-json-file\");\nconst path = require(\"path\");\n+const pMap = require(\"p-map\");\nconst Package = require(\"../Package\");\nconst ValidationError = require(\"./validation-error\");\n@@ -10,7 +11,6 @@ const ValidationError = require(\"./validation-error\");\nmodule.exports = collectPackages;\nfunction collectPackages({ packageConfigs, rootPath }) {\n- const packages = [];\nconst globOpts = {\ncwd: rootPath,\nstrict: true,\n@@ -35,18 +35,24 @@ function collectPackages({ packageConfigs, rootPath }) {\n];\n}\n- packageConfigs.forEach(globPath => {\n- globby.sync(path.join(globPath, \"package.json\"), globOpts).forEach(globResult => {\n+ return pMap(\n+ packageConfigs,\n+ globPath =>\n+ globby(path.join(globPath, \"package.json\"), globOpts).then(\n+ globResults =>\n+ pMap(globResults, globResult => {\n// https://github.com/isaacs/node-glob/blob/master/common.js#L104\n// glob always returns \"\\\\\" as \"/\" in windows, so everyone\n// gets normalized because we can't have nice things.\nconst packageConfigPath = path.normalize(globResult);\nconst packageDir = path.dirname(packageConfigPath);\n- const packageJson = loadJsonFile.sync(packageConfigPath);\n- packages.push(new Package(packageJson, packageDir, rootPath));\n- });\n- });\n-\n- return packages;\n+ return loadJsonFile(packageConfigPath).then(\n+ packageJson => new Package(packageJson, packageDir, rootPath)\n+ );\n+ }),\n+ { concurrency: 50 }\n+ ),\n+ { concurrency: 4 }\n+ ).then(results => results.reduce((packages, result) => packages.concat(result), []));\n}\n",
"new_path": "src/utils/collect-packages.js",
"old_path": "src/utils/collect-packages.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -82,7 +82,7 @@ describe(\"CleanCommand\", () => {\nconst testDir = await initFixture(\"CleanCommand/basic\");\n- FileSystemUtilities.rimraf.mockRejectedValueOnce(new Error(\"whoops\"));\n+ FileSystemUtilities.rimraf.mockImplementationOnce(() => Promise.reject(new Error(\"whoops\")));\ntry {\nawait lernaClean(testDir)();\n",
"new_path": "test/CleanCommand.js",
"old_path": "test/CleanCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -88,10 +88,12 @@ describe(\"DiffCommand\", () => {\nit(\"should error when git diff exits non-zero\", async () => {\nconst testDir = await initFixture(\"DiffCommand/basic\");\n+ ChildProcessUtilities.spawn.mockImplementationOnce(() => {\nconst nonZero = new Error(\"An actual non-zero, not git diff pager SIGPIPE\");\nnonZero.code = 1;\n- ChildProcessUtilities.spawn.mockRejectedValueOnce(nonZero);\n+ throw nonZero;\n+ });\ntry {\nawait lernaDiff(testDir)(\"package-1\");\n",
"new_path": "test/DiffCommand.js",
"old_path": "test/DiffCommand.js"
},
{
"change_type": "MODIFY",
"diff": "@@ -29,7 +29,8 @@ const execInPackagesStreaming = testDir =>\n}, []);\ndescribe(\"ExecCommand\", () => {\n- ChildProcessUtilities.spawn.mockResolvedValue();\n+ // TODO: it's very suspicious that mockResolvedValue() doesn't work here\n+ ChildProcessUtilities.spawn.mockImplementation(() => Promise.resolve());\nChildProcessUtilities.spawnStreaming.mockResolvedValue();\ndescribe(\"in a basic repo\", () => {\n@@ -50,11 +51,13 @@ describe(\"ExecCommand\", () => {\nconst testDir = await initFixture(\"ExecCommand/basic\");\n+ ChildProcessUtilities.spawn.mockImplementationOnce(() => {\nconst boom = new Error(\"execa error\");\nboom.code = 1;\nboom.cmd = \"boom\";\n- ChildProcessUtilities.spawn.mockRejectedValueOnce(boom);\n+ throw boom;\n+ });\ntry {\nawait lernaExec(testDir)(\"boom\");\n",
"new_path": "test/ExecCommand.js",
"old_path": "test/ExecCommand.js"
}
] |
JavaScript
|
MIT License
|
lerna/lerna
|
refactor(collect-packages): sync -> async
| 1
|
refactor
|
collect-packages
|
679,913
|
27.02.2018 14:31:46
| 0
|
f5b66753d33a95d23ce894e337e046c1a5d4407b
|
feat(hiccup-dom): fix update normalizeTree/normalizeElement
update diffArray handling
extract normalize fns to own file
support using arrays of hiccup elements (in child position only, never as root)
|
[
{
"change_type": "MODIFY",
"diff": "import { isArray } from \"@thi.ng/checks/is-array\";\n-import { isFunction } from \"@thi.ng/checks/is-function\";\n-import { isIterable } from \"@thi.ng/checks/is-iterable\";\nimport { isString } from \"@thi.ng/checks/is-string\";\nimport * as diff from \"@thi.ng/diff\";\n-import { TAG_REGEXP } from \"@thi.ng/hiccup/api\";\n-import { DEBUG } from \"./api\";\n+// import { DEBUG } from \"./api\";\nimport { createDOM, removeAttribs, setAttrib, removeChild } from \"./dom\";\n+const diffArray = diff.diffArray;\n+const diffObject = diff.diffObject;\n+\nexport function diffElement(parent: Element, prev: any, curr: any) {\n_diffElement(parent, prev, curr, 0);\n}\nfunction _diffElement(parent: Element, prev: any, curr: any, child: number) {\n- const delta = diff.diffArray(prev, curr);\n- const edits = delta.linear;\n- const el = parent.children[child];\n+ const delta = diffArray(prev, curr);\nif (delta.distance === 0) {\nreturn;\n}\n+ const edits = delta.linear;\n+ const el = parent.children[child];\nif (edits[0][0] !== 0 || prev[1].key !== curr[1].key || hasChangedEvents(prev[1], curr[1])) {\n- DEBUG && console.log(\"replace:\", prev, curr);\n+ // DEBUG && console.log(\"replace:\", prev, curr);\nreleaseDeep(prev);\nremoveChild(parent, child);\ncreateDOM(parent, curr, undefined, child);\n@@ -30,9 +30,8 @@ function _diffElement(parent: Element, prev: any, curr: any, child: number) {\nreleaseDeep(prev);\n}\nif (curr.__init && prev.__init !== curr.__init) {\n- DEBUG && console.log(\"call __init\", curr);\n- const args = [el, ...(curr.__args)]; // Safari https://bugs.webkit.org/show_bug.cgi?format=multiple&id=162003\n- curr.__init.apply(curr, args);\n+ // DEBUG && console.log(\"call __init\", curr);\n+ curr.__init.apply(curr, [el, ...(curr.__args)]);\n}\nif (edits[1][0] !== 0) {\ndiffAttributes(el, prev[1], curr[1]);\n@@ -46,7 +45,7 @@ function _diffElement(parent: Element, prev: any, curr: any, child: number) {\noffsets[i] = i - 2;\n}\nfor (i = 2; i < n; i++) {\n- const e = edits[i], status = e[0], idx = e[1][0], val = e[1][1];\n+ const e = edits[i], status = e[0], idx = e[1], val = e[2];\n// DEBUG && console.log(`edit: o:[${offsets.toString()}] i:${idx} s:${status}`, val);\nif (status === -1) {\nif (isArray(val)) {\n@@ -97,99 +96,6 @@ function releaseDeep(tag: any) {\n}\n}\n-function normalizeElement(spec: any[]) {\n- let tag = spec[0];\n- let content = spec.slice(1), c;\n- let match, id, clazz;\n- const attribs: any = {};\n- if (!isString(tag) || !(match = TAG_REGEXP.exec(tag))) {\n- throw new Error(`${tag} is not a valid tag name`);\n- }\n- tag = match[1];\n- id = match[2];\n- clazz = match[3];\n- if (id) {\n- attribs.id = id;\n- }\n- if (clazz) {\n- attribs.class = clazz.replace(/\\./g, \" \");\n- }\n- c = content[0];\n- if (c != null && c.constructor === Object) {\n- Object.assign(attribs, c);\n- content.shift();\n- }\n- return [tag, attribs, content.length > 0 ? content : undefined];\n-}\n-\n-const NO_SPANS = { text: 1, textarea: 1 };\n-\n-export function normalizeTree(el: any, path = [0], keys = true, span = true) {\n- if (el == null) {\n- return;\n- }\n- if (isArray(el)) {\n- if (el.length === 0) {\n- return;\n- }\n- const tag = el[0];\n- let norm;\n- if (isFunction(tag)) {\n- return normalizeTree(tag.apply(null, el.slice(1)), path.slice(), keys, span);\n- }\n- if (!isString(tag)) {\n- const args = el.slice(1);\n- norm = normalizeTree(tag.render.apply(null, args), path.slice(), keys, span);\n- if (norm !== undefined) {\n- if (keys && norm[1].key === undefined) {\n- norm[1].key = path.join(\"-\");\n- }\n- norm.__init = tag.init;\n- norm.__release = tag.release;\n- norm.__args = args;\n- }\n- return norm;\n- }\n- norm = normalizeElement(el);\n- if (keys && norm[1].key === undefined) {\n- norm[1].key = path.join(\"-\");\n- }\n- if (norm[2]) {\n- const children = norm[2].slice();\n- const n = children.length;\n- norm.length = 2;\n- span = span && !NO_SPANS[norm[0]];\n- for (let i = 0, j = 2, k = 0; i < n; i++) {\n- let el = children[i];\n- if (el != null) {\n- if (!isArray(el) && !isString(el) && isIterable(el)) {\n- for (let c of el) {\n- c = normalizeTree(c, [...path, k], keys, span);\n- if (c !== undefined) {\n- norm[j++] = c;\n- }\n- k++;\n- }\n- } else {\n- el = normalizeTree(el, [...path, k], keys, span);\n- if (el !== undefined) {\n- norm[j++] = el;\n- }\n- k++;\n- }\n- }\n- }\n- }\n- return norm;\n- }\n- if (isFunction(el)) {\n- return normalizeTree(el(), path, keys, span);\n- }\n- return span ?\n- [\"span\", keys ? { key: path.join(\"-\") } : {}, el.toString()] :\n- el.toString();\n-}\n-\nfunction hasChangedEvents(prev: any, curr: any) {\nfor (let k in curr) {\nif (k.indexOf(\"on\") === 0 && prev[k] !== curr[k]) {\n@@ -200,29 +106,28 @@ function hasChangedEvents(prev: any, curr: any) {\n}\nfunction diffAttributes(el: Element, prev: any, curr: any) {\n- const delta = diff.diffObject(prev, curr);\n- let i, a, attribs;\n- DEBUG && console.log(\"diff attribs:\", delta);\n+ let i, e, edits;\n+ const delta = diffObject(prev, curr);\nremoveAttribs(el, delta.dels);\n- for (attribs = delta.edits, i = attribs.length - 1; i >= 0; i--) {\n- a = attribs[i];\n- setAttrib(el, a[0], a[1]);\n+ for (edits = delta.edits, i = edits.length - 1; i >= 0; i--) {\n+ e = edits[i];\n+ setAttrib(el, e[0], e[1]);\n}\n- for (attribs = delta.adds, i = attribs.length - 1; i >= 0; i--) {\n- a = attribs[i];\n- setAttrib(el, a, curr[a]);\n+ for (edits = delta.adds, i = edits.length - 1; i >= 0; i--) {\n+ e = edits[i];\n+ setAttrib(el, e, curr[e]);\n}\n}\n-function extractEquivElements(edits: diff.DiffLogEntry[]) {\n- const equiv = {};\n+function extractEquivElements(edits: diff.DiffLogEntry<any>[]) {\nlet k;\n+ const equiv = {};\nfor (let i = edits.length - 1; i >= 0; i--) {\nconst e = edits[i];\n- const v = e[1][1];\n- if (isArray(v) && (k = v[1].key)) {\n+ const v = e[2];\n+ if (isArray(v) && (k = v[1].key) !== undefined) {\nequiv[k] = equiv[k] || [, ,];\n- equiv[k][e[0] + 1] = e[1][0];\n+ equiv[k][e[0] + 1] = e[1];\n}\n}\nreturn equiv;\n",
"new_path": "packages/hiccup-dom/src/diff.ts",
"old_path": "packages/hiccup-dom/src/diff.ts"
},
{
"change_type": "MODIFY",
"diff": "export * from \"./api\";\nexport * from \"./diff\";\nexport * from \"./dom\";\n+export * from \"./normalize\";\nexport * from \"./start\";\n",
"new_path": "packages/hiccup-dom/src/index.ts",
"old_path": "packages/hiccup-dom/src/index.ts"
},
{
"change_type": "ADD",
"diff": "+import { isArray } from \"@thi.ng/checks/is-array\";\n+import { implementsFunction } from \"@thi.ng/checks/implements-function\";\n+import { isFunction } from \"@thi.ng/checks/is-function\";\n+import { isIterable } from \"@thi.ng/checks/is-iterable\";\n+import { isPlainObject } from \"@thi.ng/checks/is-plain-object\";\n+import { isString } from \"@thi.ng/checks/is-string\";\n+import { TAG_REGEXP } from \"@thi.ng/hiccup/api\";\n+\n+export function normalizeElement(spec: any[], keys: boolean) {\n+ let match, id, clazz, attribs;\n+ let tag = spec[0];\n+ let hasAttribs = isPlainObject(spec[1]);\n+ if (!isString(tag) || !(match = TAG_REGEXP.exec(tag))) {\n+ throw new Error(`${tag} is not a valid tag name`);\n+ }\n+ // return orig if already normalized and satifies key requirement\n+ if (tag === match[1] && hasAttribs && (!keys || spec[1].key)) {\n+ return spec;\n+ }\n+ attribs = hasAttribs ? { ...spec[1] } : {};\n+ id = match[2];\n+ clazz = match[3];\n+ if (id) {\n+ attribs.id = id;\n+ }\n+ if (clazz) {\n+ clazz = clazz.replace(/\\./g, \" \");\n+ if (attribs.class) {\n+ attribs.class += \" \" + clazz;\n+ } else {\n+ attribs.class = clazz;\n+ }\n+ }\n+ return [match[1], attribs, ...spec.slice(hasAttribs ? 2 : 1)];\n+}\n+\n+const NO_SPANS = {\n+ option: 1,\n+ text: 1,\n+ textarea: 1,\n+};\n+\n+export function normalizeTree(el: any, path = [0], keys = true, span = true) {\n+ if (el == null) {\n+ return;\n+ }\n+ if (isArray(el)) {\n+ if (el.length === 0) {\n+ return;\n+ }\n+ const tag = el[0];\n+ let norm;\n+ // use result of function call & pass remaining array elements as args\n+ if (isFunction(tag)) {\n+ return normalizeTree(tag.apply(null, el.slice(1)), path.slice(), keys, span);\n+ }\n+ // component object w/ life cycle methods (render() is the only required hook)\n+ if (implementsFunction(tag, \"render\")) {\n+ const args = el.slice(1);\n+ norm = normalizeTree(tag.render.apply(null, args), path.slice(), keys, span);\n+ if (norm !== undefined) {\n+ if (keys && norm[1].key === undefined) {\n+ norm[1].key = path.join(\"-\");\n+ }\n+ norm.__init = tag.init;\n+ norm.__release = tag.release;\n+ norm.__args = args;\n+ }\n+ return norm;\n+ }\n+ norm = normalizeElement(el, keys);\n+ if (keys && norm[1].key === undefined) {\n+ norm[1].key = path.join(\"-\");\n+ }\n+ if (norm.length > 2) {\n+ const tag = norm[0];\n+ const res = [tag, norm[1]];\n+ span = span && !NO_SPANS[tag];\n+ for (let i = 2, j = 2, k = 0, n = norm.length; i < n; i++) {\n+ let el = norm[i];\n+ if (el != null) {\n+ const isarray = isArray(el);\n+ if ((isarray && isArray(el[0])) || (!isarray && !isString(el) && isIterable(el))) {\n+ for (let c of el) {\n+ c = normalizeTree(c, [...path, k], keys, span);\n+ if (c !== undefined) {\n+ res[j++] = c;\n+ }\n+ k++;\n+ }\n+ } else {\n+ el = normalizeTree(el, [...path, k], keys, span);\n+ if (el !== undefined) {\n+ res[j++] = el;\n+ }\n+ k++;\n+ }\n+ }\n+ }\n+ return res;\n+ }\n+ return norm;\n+ }\n+ if (isFunction(el)) {\n+ return normalizeTree(el(), path, keys, span);\n+ }\n+ return span ?\n+ [\"span\", keys ? { key: path.join(\"-\") } : {}, el.toString()] :\n+ el.toString();\n+}\n",
"new_path": "packages/hiccup-dom/src/normalize.ts",
"old_path": null
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
feat(hiccup-dom): fix #11, update normalizeTree/normalizeElement
- update diffArray handling
- extract normalize fns to own file
- support using arrays of hiccup elements (in child position only, never as root)
| 1
|
feat
|
hiccup-dom
|
679,913
|
27.02.2018 14:33:30
| 0
|
9b2c1606b264fc4bb9b888243e6c88dcb0afb5c9
|
minor(hiccup-dom): update arg types
|
[
{
"change_type": "MODIFY",
"diff": "@@ -121,7 +121,7 @@ export function updateValueAttrib(el: HTMLInputElement, v: any) {\nel.value = v;\n}\n-export function removeAttribs(el: Element, attribs: any[]) {\n+export function removeAttribs(el: Element, attribs: string[]) {\nfor (let i = attribs.length - 1; i >= 0; i--) {\nel.removeAttribute(attribs[i]);\n}\n",
"new_path": "packages/hiccup-dom/src/dom.ts",
"old_path": "packages/hiccup-dom/src/dom.ts"
}
] |
TypeScript
|
Apache License 2.0
|
thi-ng/umbrella
|
minor(hiccup-dom): update arg types
| 1
|
minor
|
hiccup-dom
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.